Streaming
Streaming allows you to receive the response from the model chunk by chunk, which is useful for real-time applications.
const stream = await client.chat.completions.create({ model: 'gpt-4', messages: [{ role: 'user', content: 'Write a long poem.' }], stream: true,});
for await (const chunk of stream) { process.stdout.write(chunk.choices[0]?.delta?.content || '');}stream = client.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": "Write a long poem."}], stream=True,)
for chunk in stream: if chunk.choices[0].delta.content is not None: print(chunk.choices[0].delta.content, end="")