Skip to main content

1. Set Up

  • Make sure you have your Azure Endpoint configured in Freeplay. See Using Freeplay -> Model Management for more details
  • Load environment variables
  • configure Freeplay client

2. Fetch your Prompt

Fetch and format your Prompt Template from Freeplay

3. Configure Azure Client

All details you need to configure your Azure OpenAI client can be found in the provider_info section of your FormattedPrompt object

4. Make your LLM Call

Make your call directly to Azure OpenAI keying all details off of your Formatted Prompt object

5. Record to Freeplay

Update your message set and record the interaction back to Freeplay

Examples

from freeplay import Freeplay, RecordPayload, ResponseInfo, CallInfo, SessionInfo
import openai
from dotenv import load_dotenv
import os
import time

load_dotenv("../.env")
project_id = os.getenv("FREEPLAY_PROJECT_ID")
freeplay_key = os.getenv("FREEPLAY_KEY")
freeplay_api_base = os.getenv("FREEPLAY_URL")
azure_api_key = os.getenv("AZURE_API_KEY")
API_VERSION_STRING = '2024-02-15-preview'

# instantiate freeplay client
fpClient = Freeplay(
    freeplay_api_key=freeplay_key,
    api_base=freeplay_api_base,
)

# get the formatted prompt
prompt_vars = {"feature": "eyes", "celebrity": "Oprah Whinfrey"}
formatted_prompt = fpClient.prompts.get_formatted(project_id=project_id,
                                                  template_name="complement",
                                                  environment="latest",
                                                  variables=prompt_vars)

# configure the azure openai client
azureClient = openai.AzureOpenAI(
    api_key=azure_api_key,
    api_version=API_VERSION_STRING,
    # key your provider info from the prompt template including: Endpoint, Deploy ID and Model
    **formatted_prompt.prompt_info.provider_info
)

# make your llm call
s = time.time()
completion = azureClient.chat.completions.create(
    model=formatted_prompt.prompt_info.model,
    messages=formatted_prompt.llm_prompt, # Note: casting may be required for formatting: cast(List[ChatCompletionMessageParam], formatted_prompt.llm_prompt)
    **formatted_prompt.prompt_info.model_parameters
)
e = time.time()

# Record to freeplay
# update your messages
all_messages = formatted_prompt.all_messages(
    {'role': completion.choices[0].message.role,
     'content': completion.choices[0].message.content}
)

# create a session
session = fpClient.sessions.create()

# record your llm call to freeplay
payload = RecordPayload(
  	project_id=project_id,
    all_messages=all_messages,
    inputs=prompt_vars,
    session_info=session,
    prompt_version_info=formatted_prompt.prompt_info,
    call_info=CallInfo.from_prompt_info(formatted_prompt.prompt_info, start_time=s, end_time=e),
    response_info=ResponseInfo(
        is_complete=completion.choices[0].finish_reason == 'stop'
    )
)
completion_info = fpClient.recordings.create(payload)