Skip to main content

1. Create the Freeplay Client

2. Fetch the prompt from Freeplay

Freeplay will handle conversion of the messages to the right format for you

3. Call the LLM via LiteLLM

Route the LLM call through LiteLLM

4. Record to Freeplay

Record the interaction back to Freeplay

Examples

from freeplay import Freeplay, RecordPayload, ResponseInfo, CallInfo, SessionInfo
from openai import OpenAI
from anthropic import Anthropic
from dotenv import load_dotenv
import os
import time
from litellm import completion

load_dotenv("../.env")

fp_client = Freeplay(
    freeplay_api_key=os.getenv("FREEPLAY_API_KEY"),
    api_base=os.getenv("FREEPLAY_URL"),
)

user_question = "What is the capital of France?"
# Fetch the prompt from freeplay
prompt_vars = {"question": user_question}
formatted_prompt = fp_client.prompts.get_formatted(
    project_id="5688ebaf-7f22-4d5d-b9bb-bc715c8faabb",
    template_name="BasicTriviaBot",
    environment="dev",
    variables=prompt_vars,
)

start = time.time()

response = completion(
  model=f"{formatted_prompt.prompt_info.provider}/{formatted_prompt.prompt_info.model}",
  messages=formatted_prompt.llm_prompt,
)
msg = response.choices[0].message
answer = msg.content

end = time.time()

# record to Freeplay
session = fp_client.sessions.create()
fp_client.recordings.create(
    RecordPayload(
				project_id=project_id,
        all_messages=formatted_prompt.all_messages(msg),
        inputs=prompt_vars,
        session_info=session,
        prompt_version_info=formatted_prompt.prompt_info,
        call_info=CallInfo.from_prompt_info(formatted_prompt.prompt_info, start, end),
        response_info=ResponseInfo(is_complete=True)
    )
)

print(f'Question {user_question} \n Answer {answer}')