Skip to content

Commit

Permalink
fix: max tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 committed May 8, 2024
1 parent 1a45b44 commit f1906bf
Showing 1 changed file with 14 additions and 4 deletions.
18 changes: 14 additions & 4 deletions packages/core/src/ChatHistory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,13 @@ export class SimpleChatHistory extends ChatHistory {
}
}

export type SummaryChatHistoryOptions = {
/**
* some LLM instance (like Ollama) might doesn't have a maxTokens metadata, so you should set it manually
*/
maxTokens: number;
};

export class SummaryChatHistory extends ChatHistory {
/**
* Tokenizer function that converts text to tokens,
Expand All @@ -77,20 +84,23 @@ export class SummaryChatHistory extends ChatHistory {
summaryPrompt: SummaryPrompt;
llm: LLM;
private messagesBefore: number;
readonly #maxTokens: number;

constructor(init?: Partial<SummaryChatHistory>) {
constructor(
init?: Partial<SummaryChatHistory> & Partial<SummaryChatHistoryOptions>,
) {
super();
this.messages = init?.messages ?? [];
this.messagesBefore = this.messages.length;
this.summaryPrompt = init?.summaryPrompt ?? defaultSummaryPrompt;
this.llm = init?.llm ?? new OpenAI();
if (!this.llm.metadata.maxTokens) {
if (!this.llm.metadata.maxTokens || !init?.maxTokens) {
throw new Error(
"LLM maxTokens is not set. Needed so the summarizer ensures the context window size of the LLM.",
);
}
this.tokensToSummarize =
this.llm.metadata.contextWindow - this.llm.metadata.maxTokens;
this.#maxTokens = this.llm.metadata.maxTokens ?? init?.maxTokens;
this.tokensToSummarize = this.llm.metadata.contextWindow - this.#maxTokens;
if (this.tokensToSummarize < this.llm.metadata.contextWindow * 0.25) {
throw new Error(
"The number of tokens that trigger the summarize process are less than 25% of the context window. Try lowering maxTokens or use a model with a larger context window.",
Expand Down

0 comments on commit f1906bf

Please sign in to comment.