question_list = [
"What is Evidently Python library?",
"What is LLM observability?",
"How is MLOps different from LLMOps?"
]
@trace_event()
def pseudo_assistant(question):
system_prompt = "You are a helpful assistant. Please answer the following question concisely."
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": question},
]
return client.chat.completions.create(model="gpt-4o-mini", messages=messages).choices[0].message.content
# Iterate over the list of questions and pass each to the assistant
for question in question_list:
response = pseudo_assistant(question=question)
time.sleep(1)