Skip to main content

1. Configure Freeplay Client

You’ll need to set the following environment variables:
  1. FREEPLAY_API_KEY
  2. OPENAI_API_KEY
  3. FREEPLAY_PROJECT_ID

2. Fetch Prompt

Fetch your prompt from the Freeplay server

3. Call your LLM

You will interact with your LLM directly, but you can key model config and messages off of the formatted prompt object

4. Record the LLM Interaction

Record your LLM response back to Freeplay

Examples

from freeplay import Freeplay, RecordPayload, CallInfo, ResponseInfo, SessionInfo, CallInfo
from openai import OpenAI

# create your a freeplay client object
fp_client = Freeplay(
    freeplay_api_key=os.getenv("FREEPLAY_API_KEY"),
    api_base="https://app.freeplay.ai/api"
)
# configure your openai client
openai_client = OpenAI(
    api_key=userdata.get('OPENAI_API_KEY'),
)

project_id=os.getenv("FREEPLAY_PROJECT_ID")

## PROMPT FETCH ##
# set the prompt variables
prompt_vars = {"keyA": "valueA"}
# get a formatted prompt
formatted_prompt = fp_client.prompts.get_formatted(project_id=project_id,
                                                  template_name="template_name",
                                                  environment="latest",
                                                  variables=prompt_vars)

## LLM CALL ##
# Make an LLM call to your provider of choice
start = time.time()
chat_response = openaiClient.chat.completions.create(
    model=formatted_prompt.prompt_info.model,
    messages=formatted_prompt.llm_prompt,
    **formatted_prompt.prompt_info.model_parameters
)
end = time.time()

# add the response to your message set
all_messages = formatted_prompt.all_messages(
    {'role': chat_response.choices[0].message.role, 
     'content': chat_response.choices[0].message.content}
)

## RECORD ##
# create a session
session = fp_client.sessions.create()

# build the record payload
payload = RecordPayload(
    project_id=project_id
    all_messages=all_messages,
    inputs=prompt_vars,
    session_info=session, 
    prompt_version_info=formatted_prompt.prompt_info,
    call_info=CallInfo.from_prompt_info(formatted_prompt.prompt_info, start_time=start, end_time=end), 
    response_version_info=ResponseInfo(
        is_complete=chat_response.choices[0].finish_reason == 'stop'
    )
)
# record the LLM interaction
fp_client.recordings.create(payload)