This is a guide on how to use the AB Testing endpoint in Ruby. For this example we imagine a user is going to experiment on a new system prompt to add more information on a specific type of end-user to their chatbot.

The information needed to get the variants is:

  • The user_id: The unique identifier for the user.
  • The experiment_id: The unique identifier for the experiment. This id can be retrieved from the Nebuly dashboard in the experiment details.
  • Nebuly API key associated to the project you set up the experiment in.
require 'net/http'
require 'json'

def cast_type(value, type)
  case type
  when "string"
    value
  when "int"
    value.to_i
  when "float"
    value.to_f
  when "bool"
    value.downcase == "true"
  else
    raise ArgumentError, "Unknown type: #{type}"
  end
end

def generate_response(user_id, messages)
  # Get the variant for the user
  uri = URI("https://backend.nebuly.com/api/external/variants")
  http = Net::HTTP.new(uri.host, uri.port)
  http.use_ssl = true
  
  request = Net::HTTP::Post.new(uri)
  request["Authorization"] = "Bearer #{nebuly_api_key}"
  request["Content-Type"] = "application/json"
  request.body = {
    user: user_id,
    feature_flags: ["{experiment_id}"]
  }.to_json

  response = http.request(request)
  variant = JSON.parse(response.body)

  # Prepare the messages
  system_message = { role: "system", content: "You are a helpful assistant." }
  messages = [system_message] + messages

  # Manage the variant and edit the messages if needed
  variant_list = variant["variants"]
  extra_params = {}
  variant_list.each do |variant|
    if variant["kind"] == "prompt"
      messages << { role: "system", content: variant["prompt"] }
    end
    variant["config_params"].each do |config_param|
      extra_params[config_param["name"]] = cast_type(config_param["value"], config_param["type"])
    end
  end

  time_start = Time.now.utc.to_datetime.iso8601
  # Generate the response using OpenAI API directly
  uri = URI("https://api.openai.com/v1/chat/completions")
  http = Net::HTTP.new(uri.host, uri.port)
  http.use_ssl = true

  request = Net::HTTP::Post.new(uri)
  request["Authorization"] = "Bearer #{openai_api_key}"
  request["Content-Type"] = "application/json"
  request.body = {
    model: "gpt-3.5-turbo",
    messages: messages
  }.merge(extra_params).to_json

  response = http.request(request)
  result = JSON.parse(response.body)
  time_end = Time.now.utc.to_datetime.iso8601

  output = result.dig("choices", 0, "message", "content")

  uri = URI.parse("https://backend.nebuly.com/event-ingestion/api/v2/events/interactions")
    request = Net::HTTP::Post.new(uri)
    request.content_type = "application/json"
    request["Authorization"] = "Bearer #{NEBULY_API_KEY}"
    request.body = JSON.dump({
        "interaction" => {
            "messages" => messages,
            "output" => output,
            "time_start" => time_start,
            "time_end" => time_end,
            "end_user" => user_id,
            "model" => model,
            "tags" => {
                "version" => "v1.0.0"
            },
            "feature_flags" => ["{experiment_id}"],
        },
        "anonymize" => true,
    })

    req_options = {
        use_ssl: uri.scheme == "https",
    }

    response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
        http.request(request)
    end

    puts JSON.parse(response.body)
    return output
end