This is a guide on how to use the AB Testing endpoint in Python. For this example we imagine a user is going to experiment on a new system prompt to add more information on a specific type of end-user to their chatbot.

The information needed to get the variants is:

  • The user_id: The unique identifier for the user.
  • The experiment_id: The unique identifier for the experiment. This id can be retrieved from the Nebuly dashboard in the experiment details.
  • Nebuly API key associated to the project you set up the experiment in.
import requests
import nebuly
nebuly.init("your_nebuly_api_key")
import openai

# Set up the OpenAI client
openai.api_key = "your_openai_api_key"


def cast_type(value: str, type: str) -> str:
    if type == "string":
        return value
    elif type == "int":
        return int(value)
    elif type == "float":
        return float(value)
    elif type == "bool":
        return value.lower() == "true"
    else:
        raise ValueError(f"Unknown type: {type}")


def generate_response(user_id: str, messages: List[Dict[str, str]]) -> str:
    
    # Get the variant for the user
    variant = requests.post(
        "https://backend.nebuly.com/api/external/variants",
        json={
            "user": user_id,
            "feature_flags": ["{experiment_id}"]
        },
        headers={"Authorization": f"Bearer {nebuly_api_key}"}
    )

    # Prepare the messages
    system_message = {"role": "system", "content": "You are a helpful assistant."}
    messages = [system_message] + messages

    # Manage the variant and edit the messages if needed
    variant_list = variant.json()["variants"]
    extra_params = {}
    for variant in variant_list:
        if variant["kind"] == "prompt":
            messages.append({"role": "system", "content": variant["prompt"]})
        config_params = variant["config_params"]
        for config_param in config_params:
            extra_params[config_param["name"]] = cast_type(config_param["value"], config_param["type"])

    # Generate the response using OpenAI
    response = openai.chat.completions.create(
        model="gpt-3.5-turbo",
        messages=messages,
        # nebuly params
        feature_flags=["{experiment_id}"]
        # params needed by the variant
        **extra_params
    )

    return response.choices[0].message['content']