Skip to content

Commit

Permalink
add openAI namespace for nested beta api exports and do top level exp…
Browse files Browse the repository at this point in the history
…orts otherwise (#5)

* add openAI namespace for nested beta api exports and do top level exports otherwise

* bump version
  • Loading branch information
depombo authored Aug 28, 2024
1 parent 18deedd commit f5bb9ad
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 73 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "openai-react-native",
"version": "0.3.2",
"version": "0.3.3",
"description": "OpenAI React Native API Client without polyfills",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
Expand Down
139 changes: 67 additions & 72 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,58 +5,28 @@ import {
type ClientOptions as ClientOptionsNode,
} from 'openai';

export type onStreamError = (error: any) => void;
export type onStreamOpen = () => void;
export type onStreamDone = () => void;
export type onStreamOptionalEvents = {
onError?: onStreamError;
onOpen?: onStreamOpen;
onDone?: onStreamDone;
// export types from this library
export type onError = (error: any) => void;
export type onOpen = () => void;
export type onDone = () => void;
export type onEvents = {
onError?: onError;
onOpen?: onOpen;
onDone?: onDone;
};
export interface ClientOptions extends ClientOptionsNode {
apiKey: string;
baseURL: string;
}
// export types from OpenAINode
export namespace Chat {
export namespace Completions {
export import ChatCompletion = OpenAINode.Chat.Completions.ChatCompletion;
export type onStreamData = (
data: OpenAINode.Chat.Completions.ChatCompletion
) => void; // specific to this library
}
}
export import ChatCompletionCreateParamsNonStreaming = OpenAINode.ChatCompletionCreateParamsNonStreaming;

export namespace Moderation {
export import Categories = OpenAINode.Moderation.Categories;
export interface Moderation extends OpenAINode.Moderation {}
}
export type onChatCompletionData = (data: ChatCompletion) => void;
export type onThreadRunData = (data: OpenAI.Beta.Threads.Run) => void;
// export top level types from OpenAINode
export import Moderation = OpenAINode.Moderation;
export import ModerationCreateResponse = OpenAINode.ModerationCreateResponse;
export import ModerationCreateParams = OpenAINode.ModerationCreateParams;
export import Model = OpenAINode.Model;
export namespace Beta {
export import ThreadCreateParams = OpenAINode.Beta.ThreadCreateParams;
export import Thread = OpenAINode.Beta.Thread;
export import ThreadUpdateParams = OpenAINode.Beta.ThreadUpdateParams;
export import ThreadCreateAndRunParamsNonStreaming = OpenAINode.Beta.ThreadCreateAndRunParamsNonStreaming;
export import ThreadDeleted = OpenAINode.Beta.ThreadDeleted;
export import Assistant = OpenAINode.Beta.Assistant;
export namespace Threads {
export import Run = OpenAINode.Beta.Threads.Run;
export namespace Runs {
export type RunCreateParamsNonStreaming =
OpenAINode.Beta.Threads.Runs.RunCreateParamsNonStreaming;
export type onStreamData = (data: OpenAINode.Beta.Threads.Run) => void; // specific to this library
}
export import Message = OpenAINode.Beta.Threads.Message;
export namespace Messages {
export import MessageCreateParams = OpenAINode.Beta.Threads.Messages.MessageCreateParams;
export import MessageListParams = OpenAINode.Beta.Threads.Messages.MessageListParams;
export import MessageDeleted = OpenAINode.Beta.Threads.Messages.MessageDeleted;
}
}
}
export import ChatCompletionCreateParamsNonStreaming = OpenAINode.ChatCompletionCreateParamsNonStreaming;
export import ChatCompletion = OpenAINode.ChatCompletion;
export import FileObject = OpenAINode.FileObject;
export import FileContent = OpenAINode.FileContent;
export import FileDeleted = OpenAINode.FileDeleted;
Expand Down Expand Up @@ -85,48 +55,49 @@ export class OpenAI {

public beta = {
assistants: {
list: async (): Promise<Beta.Assistant[]> =>
list: async (): Promise<OpenAI.Beta.Assistant[]> =>
(await this.client.beta.assistants.list()).data,
},
threads: {
create: async (body?: Beta.ThreadCreateParams): Promise<Beta.Thread> =>
this.client.beta.threads.create(body),
retrieve: async (threadId: string): Promise<Beta.Thread> =>
create: async (
body?: OpenAI.Beta.ThreadCreateParams
): Promise<OpenAI.Beta.Thread> => this.client.beta.threads.create(body),
retrieve: async (threadId: string): Promise<OpenAI.Beta.Thread> =>
this.client.beta.threads.retrieve(threadId),
update: async (
threadId: string,
body: Beta.ThreadUpdateParams
): Promise<Beta.Thread> =>
body: OpenAI.Beta.ThreadUpdateParams
): Promise<OpenAI.Beta.Thread> =>
this.client.beta.threads.update(threadId, body),
del: async (threadId: string): Promise<Beta.ThreadDeleted> =>
del: async (threadId: string): Promise<OpenAI.Beta.ThreadDeleted> =>
this.client.beta.threads.del(threadId),
createAndRunPoll: async (
body: Beta.ThreadCreateAndRunParamsNonStreaming
): Promise<Beta.Threads.Run> =>
body: OpenAI.Beta.ThreadCreateAndRunParamsNonStreaming
): Promise<OpenAI.Beta.Threads.Run> =>
this.client.beta.threads.createAndRunPoll(body),
messages: {
list: async (
threadId: string,
query?: Beta.Threads.Messages.MessageListParams
): Promise<Beta.Threads.Message[]> =>
query?: OpenAI.Beta.Threads.Messages.MessageListParams
): Promise<OpenAI.Beta.Threads.Message[]> =>
(await this.client.beta.threads.messages.list(threadId, query)).data,
del: async (
threadId: string,
messageId: string
): Promise<Beta.Threads.Messages.MessageDeleted> =>
): Promise<OpenAI.Beta.Threads.Messages.MessageDeleted> =>
await this.client.beta.threads.messages.del(threadId, messageId),
create: async (
threadId: string,
body: Beta.Threads.Messages.MessageCreateParams
): Promise<Beta.Threads.Message> =>
body: OpenAI.Beta.Threads.Messages.MessageCreateParams
): Promise<OpenAI.Beta.Threads.Message> =>
await this.client.beta.threads.messages.create(threadId, body),
},
runs: {
stream: (
threadId: string,
body: Beta.Threads.Runs.RunCreateParamsNonStreaming,
onData: Chat.Completions.onStreamData,
callbacks: onStreamOptionalEvents
body: OpenAI.Beta.Threads.Runs.RunCreateParamsNonStreaming,
onData: onThreadRunData,
callbacks: onEvents
): void =>
this._stream(
`${this.baseURL}/threads/${threadId}/runs`,
Expand All @@ -152,19 +123,18 @@ export class OpenAI {
*/
create: async (
body: ChatCompletionCreateParamsNonStreaming
): Promise<Chat.Completions.ChatCompletion> =>
this.client.chat.completions.create(body),
): Promise<ChatCompletion> => this.client.chat.completions.create(body),
/**
* Create a chat completion stream using the OpenAI API.
* @param {ChatCompletionCreateParamsNonStreaming} params - Parameters for the OpenAI chat completion API since streaming is assumed.
* @param {Chat.Completions.onStreamData} onData - Callback to handle incoming messages.
* @param {onStreamOptionalEvents} callbacks - Object containing optional callback functions.
* @param {onChatCompletion} onData - Callback to handle incoming messages.
* @param {onEvents} callbacks - Object containing optional callback functions.
* @returns {void}
*/
stream: (
params: ChatCompletionCreateParamsNonStreaming,
onData: Chat.Completions.onStreamData,
callbacks: onStreamOptionalEvents
onData: onChatCompletionData,
callbacks: onEvents
): void =>
this._stream(
`${this.baseURL}/chat/completions`,
Expand Down Expand Up @@ -217,8 +187,8 @@ export class OpenAI {
* Connect to a given OpenAI API endpoint and start streaming.
* @param {string} url - The API endpoint to connect to.
* @param {OpenAIParams} params - The parameters to send with the API request.
* @param {ChatCompletionCallback | RunCallback} onData - Callback to handle incoming data.
* @param {onStreamOptionalEvents} callbacks - Object containing callback functions.
* @param {onChatCompletion | onThreadRun} onData - Callback to handle incoming data.
* @param {onEvents} callbacks - Object containing callback functions.
* @param {onStreamError} [callbacks.onError] - Callback to handle errors.
* @param {onStreamOpen} [callbacks.onOpen] - Callback to handle when the connection opens.
* @param {onStreamDone} [callbacks.onDone] - Callback to handle when the stream ends.
Expand All @@ -228,9 +198,9 @@ export class OpenAI {
url: string,
params:
| ChatCompletionCreateParamsNonStreaming
| Beta.Threads.Runs.RunCreateParamsNonStreaming,
onData: Chat.Completions.onStreamData | Beta.Threads.Runs.onStreamData,
callbacks: onStreamOptionalEvents
| OpenAI.Beta.Threads.Runs.RunCreateParamsNonStreaming,
onData: onChatCompletionData | onThreadRunData,
callbacks: onEvents
) {
const { onError, onOpen, onDone } = callbacks;
const requestBody = { ...params, stream: true };
Expand Down Expand Up @@ -272,4 +242,29 @@ export class OpenAI {
}
}

// export nested types from OpenAINode Beta API
export namespace OpenAI {
export namespace Beta {
export import ThreadCreateParams = OpenAINode.Beta.ThreadCreateParams;
export import Thread = OpenAINode.Beta.Thread;
export import ThreadUpdateParams = OpenAINode.Beta.ThreadUpdateParams;
export import ThreadCreateAndRunParamsNonStreaming = OpenAINode.Beta.ThreadCreateAndRunParamsNonStreaming;
export import ThreadDeleted = OpenAINode.Beta.ThreadDeleted;
export import Assistant = OpenAINode.Beta.Assistant;
export namespace Threads {
export import Run = OpenAINode.Beta.Threads.Run;
export namespace Runs {
export type RunCreateParamsNonStreaming =
OpenAINode.Beta.Threads.Runs.RunCreateParamsNonStreaming;
}
export import Message = OpenAINode.Beta.Threads.Message;
export namespace Messages {
export import MessageCreateParams = OpenAINode.Beta.Threads.Messages.MessageCreateParams;
export import MessageListParams = OpenAINode.Beta.Threads.Messages.MessageListParams;
export import MessageDeleted = OpenAINode.Beta.Threads.Messages.MessageDeleted;
}
}
}
}

export default OpenAI;

0 comments on commit f5bb9ad

Please sign in to comment.