Technology•March 19, 2024
The Astra Assistants API Now Supports Streaming: Because Who Wants to Wait?
python from openai import OpenAI from streaming_assistants import patch client = patch(OpenAI()) …
python print(f"creating run") with client.beta.threads.runs.create_and_stream( thread_id=thread.id, assistant_id=assistant.id, ) as stream: for text in stream.text_deltas: print(text, end="", flush=True) print()
python print(f"creating run") with client.beta.threads.runs.create_and_stream( thread_id=thread.id, assistant_id=assistant.id, ) as stream: for event in stream: print(text, end="", flush=True) print()
python class EventHandler(AssistantEventHandler): def __init__(self): super().__init__() @override def on_exception(self, exception: Exception): logger.error(exception) raise exception @override def on_tool_call_done(self, toolCall: ToolCall): logger.debug(toolCall) tool_outputs = [] tool_outputs.append({"tool_call_id": toolCall.id, "output": "75 degrees F and sunny"}) # actually call out to your function here with client.beta.threads.runs.submit_tool_outputs_stream( thread_id=self.current_run.thread_id, run_id=self.current_run.id, tool_outputs=tool_outputs, event_handler=EventHandler(), ) as stream: #for part in stream: # logger.info(part) for text in stream.text_deltas: print(text, end="", flush=True) print() with client.beta.threads.runs.create_and_stream( thread_id=thread.id, assistant_id=assistant.id, event_handler=EventHandler() ) as stream: stream.until_done()