Skip to content

Commit

Permalink
feat: upgrae oai and remove completeNextWords
Browse files Browse the repository at this point in the history
  • Loading branch information
transmissions11 committed Jun 11, 2024
1 parent f4eda81 commit 642fd0b
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 136 deletions.
73 changes: 29 additions & 44 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"framer-motion": "^9.0.4",
"highlightjs-solidity": "^2.0.6",
"mixpanel-browser": "^2.46.0",
"openai-streams": "^4.2.0",
"openai-streams": "^6.2.0",
"re-resizable": "^6.9.9",
"react": "^18.2.0",
"react-beforeunload": "^2.5.3",
Expand Down
89 changes: 4 additions & 85 deletions src/components/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -527,87 +527,6 @@ function App() {
if (MIXPANEL_TOKEN) mixpanel.track("Submitted Prompt"); // KPI
};

const completeNextWords = () => {
takeSnapshot();

const temp = settings.temp;

const lineage = selectedNodeLineage;
const selectedNodeId = lineage[0].id;

const streamId = generateStreamId();

// Set the node's streamId so it will accept the incoming text.
setNodes((nodes) => setFluxNodeStreamId(nodes, { id: selectedNodeId, streamId }));

(async () => {
// TODO: Stop sequences for user/assistant/etc?
// TODO: Select between instruction and auto raw base models?
const stream = await OpenAI(
"completions",
{
// TODO: Allow customizing.
model: "text-davinci-003",
temperature: temp,
prompt: promptFromLineage(lineage, settings),
max_tokens: 250,
stop: ["\n\n", "assistant:", "user:"],
},
{ apiKey: apiKey!, mode: "raw" }
);

const DECODER = new TextDecoder();

const abortController = new AbortController();

for await (const chunk of yieldStream(stream, abortController)) {
if (abortController.signal.aborted) break;

try {
const decoded = JSON.parse(DECODER.decode(chunk));

if (decoded.choices === undefined)
throw new Error(
"No choices in response. Decoded response: " + JSON.stringify(decoded)
);

const choice: CreateCompletionResponseChoicesInner = decoded.choices[0];

setNodes((newerNodes) => {
try {
return appendTextToFluxNodeAsGPT(newerNodes, {
id: selectedNodeId,
text: choice.text ?? UNDEFINED_RESPONSE_STRING,
streamId, // This will cause a throw if the streamId has changed.
});
} catch (e: any) {
// If the stream id does not match,
// it is stale and we should abort.
abortController.abort(e.message);

return newerNodes;
}
});
} catch (err) {
console.error(err);
}
}

// If the stream wasn't aborted or was aborted due to a cancelation.
if (
!abortController.signal.aborted ||
abortController.signal.reason === STREAM_CANCELED_ERROR_MESSAGE
) {
// Reset the stream id.
setNodes((nodes) =>
setFluxNodeStreamId(nodes, { id: selectedNodeId, streamId: undefined })
);
}
})().catch((err) => console.error(err));

if (MIXPANEL_TOKEN) mixpanel.track("Completed next words");
};

/*//////////////////////////////////////////////////////////////
SELECTED NODE LOGIC
//////////////////////////////////////////////////////////////*/
Expand Down Expand Up @@ -895,7 +814,9 @@ function App() {

if (!modelList.includes(settings.model)) {
const oldModel = settings.model;
const newModel = modelList.includes(DEFAULT_SETTINGS.model) ? DEFAULT_SETTINGS.model : modelList[0];
const newModel = modelList.includes(DEFAULT_SETTINGS.model)
? DEFAULT_SETTINGS.model
: modelList[0];

setSettings((settings) => ({ ...settings, model: newModel }));

Expand All @@ -911,7 +832,7 @@ function App() {
}, [apiKey]);

const isAnythingSaving = isSavingReactFlow || isSavingSettings;
const isAnythingLoading = isAnythingSaving || (availableModels === null);
const isAnythingLoading = isAnythingSaving || availableModels === null;

useBeforeunload((event: BeforeUnloadEvent) => {
// Prevent leaving the page before saving.
Expand Down Expand Up @@ -1028,7 +949,6 @@ function App() {
useHotkeys(`${modifierKey}+right`, moveToRightSibling, HOTKEY_CONFIG);
useHotkeys(`${modifierKey}+return`, () => submitPrompt(false), HOTKEY_CONFIG);
useHotkeys(`${modifierKey}+shift+return`, () => submitPrompt(true), HOTKEY_CONFIG);
useHotkeys(`${modifierKey}+k`, completeNextWords, HOTKEY_CONFIG);
useHotkeys(`${modifierKey}+backspace`, deleteSelectedNodes, HOTKEY_CONFIG);
useHotkeys(`${modifierKey}+shift+c`, copyMessagesToClipboard, HOTKEY_CONFIG);

Expand Down Expand Up @@ -1105,7 +1025,6 @@ function App() {
deleteSelectedNodes={deleteSelectedNodes}
submitPrompt={() => submitPrompt(false)}
regenerate={() => submitPrompt(true)}
completeNextWords={completeNextWords}
undo={undo}
redo={redo}
onClear={onClear}
Expand Down
6 changes: 0 additions & 6 deletions src/components/utils/NavigationBar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ export function NavigationBar({
newConnectedToSelectedNode,
submitPrompt,
regenerate,
completeNextWords,
undo,
redo,
onClear,
Expand All @@ -45,7 +44,6 @@ export function NavigationBar({
newConnectedToSelectedNode: (nodeType: FluxNodeType) => void;
submitPrompt: () => void;
regenerate: () => void;
completeNextWords: () => void;
deleteSelectedNodes: () => void;
undo: () => void;
redo: () => void;
Expand Down Expand Up @@ -159,10 +157,6 @@ export function NavigationBar({
<MenuItem command={`⇧${modifierKeyText}⏎`} onClick={regenerate}>
Regenerate GPT responses
</MenuItem>

<MenuItem command={`${modifierKeyText}K`} onClick={completeNextWords}>
Complete next words
</MenuItem>
</MenuGroup>
</MenuList>
</Menu>
Expand Down

0 comments on commit 642fd0b

Please sign in to comment.