import { ResponseTextConfig, type ParsedResponse, type ResponseCreateParamsBase, type ResponseStreamEvent } from "../../resources/responses/responses.js"; import * as Core from "../../core.js"; import OpenAI from "../../index.js"; import { type BaseEvents, EventStream } from "../EventStream.js"; import { type ResponseFunctionCallArgumentsDeltaEvent, type ResponseTextDeltaEvent } from "./EventTypes.js"; import { ParseableToolsParams } from "../ResponsesParser.js"; export type ResponseStreamParams = ResponseCreateAndStreamParams | ResponseStreamByIdParams; export type ResponseCreateAndStreamParams = Omit & { stream?: true; }; export type ResponseStreamByIdParams = { /** * The ID of the response to stream. */ response_id: string; /** * If provided, the stream will start after the event with the given sequence number. */ starting_after?: number; /** * Configuration options for a text response from the model. Can be plain text or * structured JSON data. Learn more: * * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) */ text?: ResponseTextConfig; /** * An array of tools the model may call while generating a response. When continuing a stream, provide * the same tools as the original request. */ tools?: ParseableToolsParams; }; type ResponseEvents = BaseEvents & Omit<{ [K in ResponseStreamEvent['type']]: (event: Extract) => void; }, 'response.output_text.delta' | 'response.function_call_arguments.delta'> & { event: (event: ResponseStreamEvent) => void; 'response.output_text.delta': (event: ResponseTextDeltaEvent) => void; 'response.function_call_arguments.delta': (event: ResponseFunctionCallArgumentsDeltaEvent) => void; }; export type ResponseStreamingParams = Omit & { stream?: true; }; export declare class ResponseStream extends EventStream implements AsyncIterable { #private; constructor(params: ResponseStreamingParams | null); static createResponse(client: OpenAI, params: ResponseStreamParams, options?: Core.RequestOptions): ResponseStream; protected _createOrRetrieveResponse(client: OpenAI, params: ResponseStreamParams, options?: Core.RequestOptions): Promise>; [Symbol.asyncIterator](this: ResponseStream): AsyncIterator; /** * @returns a promise that resolves with the final Response, or rejects * if an error occurred or the stream ended prematurely without producing a REsponse. */ finalResponse(): Promise>; } export {}; //# sourceMappingURL=ResponseStream.d.ts.map