Documentation Index
Fetch the complete documentation index at: https://docs.freeplay.ai/llms.txt
Use this file to discover all available pages before exploring further.
Route LLM calls through LiteLLM while recording to Freeplay.
Documentation Index
Fetch the complete documentation index at: https://docs.freeplay.ai/llms.txt
Use this file to discover all available pages before exploring further.
from freeplay import Freeplay, RecordPayload, CallInfo, SessionInfo
from openai import OpenAI
from anthropic import Anthropic
from dotenv import load_dotenv
import os
import time
from litellm import completion
load_dotenv("../.env")
fp_client = Freeplay(
freeplay_api_key=os.getenv("FREEPLAY_API_KEY"),
api_base=os.getenv("FREEPLAY_URL"),
)
user_question = "What is the capital of France?"
# Fetch the prompt from freeplay
prompt_vars = {"question": user_question}
formatted_prompt = fp_client.prompts.get_formatted(
project_id="5688ebaf-7f22-4d5d-b9bb-bc715c8faabb",
template_name="BasicTriviaBot",
environment="dev",
variables=prompt_vars,
)
start = time.time()
response = completion(
model=f"{formatted_prompt.prompt_info.provider}/{formatted_prompt.prompt_info.model}",
messages=formatted_prompt.llm_prompt,
)
msg = response.choices[0].message
answer = msg.content
end = time.time()
# record to Freeplay
session = fp_client.sessions.create()
fp_client.recordings.create(
RecordPayload(
project_id=project_id,
all_messages=formatted_prompt.all_messages(msg),
inputs=prompt_vars,
session_info=session,
prompt_version_info=formatted_prompt.prompt_info,
call_info=CallInfo.from_prompt_info(formatted_prompt.prompt_info, start, end)
)
)
print(f'Question {user_question} \n Answer {answer}')