import lucidicai as laifrom openai import OpenAIlai.init(providers=["openai"])client = OpenAI()# Streaming responses are also tracked automaticallystream = client.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": "Tell me a story"}], stream=True)for chunk in stream: if chunk.choices[0].delta.content: print(chunk.choices[0].delta.content, end="")
import lucidicai as laifrom openai import OpenAIlai.init( session_name="chatbot_run", providers=["openai"] # API key and agent ID from env vars)client = OpenAI()lai.create_step(state="Research", goal="Generate research questions")# All LLM calls are automatically tracked as events within this stepresponse = client.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": "What are key areas in AI safety research?"}])response = client.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": "What are the main challenges in each area?"}])lai.end_step()