Documentation Index
Fetch the complete documentation index at: https://docs.freeplay.ai/llms.txt
Use this file to discover all available pages before exploring further.
Handle streaming LLM responses while recording completions to Freeplay for observability.
Documentation Index
Fetch the complete documentation index at: https://docs.freeplay.ai/llms.txt
Use this file to discover all available pages before exploring further.
import os
from freeplay import Freeplay
from freeplay.provider_config import ProviderConfig, OpenAIConfig
FREEPLAY_API_KEY = os.environ["FREEPLAY_API_KEY"]
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
FREEPLAY_CUSTOMER_NAME = os.environ["FREEPLAY_CUSTOMER_NAME"]
FREEPLAY_PROJECT_ID = os.environ["FREEPLAY_PROJECT_ID"]
fp_client = Freeplay(
provider_config=ProviderConfig(openai=OpenAIConfig(OPENAI_API_KEY)),
freeplay_api_key=FREEPLAY_API_KEY,
api_base=f'https://{FREEPLAY_CUSTOMER_NAME}.freeplay.ai/api'
)
completion_stream = fp_client.get_completion_stream(
project_id=FREEPLAY_PROJECT_ID,
template_name="album_bot",
variables={"pop_star": "Bruno Mars"}
)
for chunk in completion_stream:
print("Chunk: %s" % chunk.text.strip())