1. Create client
2. Define a Call and Record Helper
3. Pass through Trace Info to Record
Pass trace info through on the record call to freeplay4. Loop over questions and record to traces
5. Create a Trace Object
Create a Trace Object, including a user display input message6. Close and Record the Trace
Record the Trace with the final display output to close the traceExamples
Copy
import os
import random
import time
from typing import Optional
from anthropic import Anthropic, NotGiven
from freeplay import Freeplay, RecordPayload, ResponseInfo, CallInfo, SessionInfo, TraceInfo
fpclient = Freeplay(
freeplay_api_key=os.environ['FREEPLAY_API_KEY'],
api_base=f"{os.environ['FREEPLAY_API_URL']}/api"
)
project_id = os.environ['FREEPLAY_PROJECT_ID']
client = Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY")
)
def call_and_record(
project_id: str,
template_name: str,
env: str,
input_variables: dict,
session_info: SessionInfo,
trace_info: Optional[TraceInfo] = None
) -> dict:
formatted_prompt = fpclient.prompts.get_formatted(
project_id=project_id,
template_name=template_name,
environment=env,
variables=input_variables
)
print(f"Ready for LLM: {formatted_prompt.llm_prompt}")
start = time.time()
completion = client.messages.create(
system=formatted_prompt.system_content or NotGiven(),
messages=formatted_prompt.llm_prompt,
model=formatted_prompt.prompt_info.model,
**formatted_prompt.prompt_info.model_parameters
)
end = time.time()
llm_response = completion.content[0].text
print("Completion: %s" % llm_response)
all_messages = formatted_prompt.all_messages(
new_message={'role': 'assistant', 'content': llm_response}
)
call_info = CallInfo.from_prompt_info(formatted_prompt.prompt_info, start, end)
response_info = ResponseInfo(
is_complete=completion.stop_reason == 'stop_sequence'
)
record_response = fpclient.recordings.create(
RecordPayload(
project_id=project_id,
all_messages=all_messages,
session_info=session_info,
inputs=input_variables,
prompt_version_info=formatted_prompt.prompt_info,
call_info=call_info,
response_info=response_info,
trace_info=trace_info,
)
)
return {'completion_id': record_response.completion_id, 'llm_response': llm_response}
# send 3 questions to the model encapsulated into a trace
user_questions = ["answer life's most existential questions", "what is sand?", "how tall are lions?"]
session = fpclient.sessions.create()
for question in user_questions:
trace_info = session.create_trace(input=question)
bot_response = call_and_record(
project_id=project_id,
template_name='my-anthropic-prompt',
env='latest',
input_variables={'question': question},
session_info=session.session_info,
trace_info=trace_info
)
categorization_result = call_and_record(
project_id=project_id,
template_name='question-classifier',
env='latest',
input_variables={'question': question},
session_info=session.session_info,
trace_info=trace_info
)
trace_info.record_output(project_id, bot_response['llm_response'])
print(f"Trace info id: {trace_info.trace_id}")

