diff --git a/src/components/App.tsx b/src/components/App.tsx index 125db37..8b856c8 100644 --- a/src/components/App.tsx +++ b/src/components/App.tsx @@ -370,7 +370,11 @@ function App() { temperature: temp, messages: messagesFromLineage(parentNodeLineage, settings), }, - { apiKey: apiKey!, mode: "raw" } + { + apiKey: apiKey!, + mode: "raw", + apiBase: "https://api.hyperbolic.xyz/v1", + } ); const DECODER = new TextDecoder(); diff --git a/src/components/utils/APIKeyInput.tsx b/src/components/utils/APIKeyInput.tsx index 160805e..fd94930 100644 --- a/src/components/utils/APIKeyInput.tsx +++ b/src/components/utils/APIKeyInput.tsx @@ -12,10 +12,10 @@ export function APIKeyInput({ return ( - Flux by + + Hyperbolic Flux + {" "} + by diff --git a/src/utils/apikey.ts b/src/utils/apikey.ts index 8f65d43..bbd366e 100644 --- a/src/utils/apikey.ts +++ b/src/utils/apikey.ts @@ -1,3 +1,3 @@ export function isValidAPIKey(apiKey: string | null) { - return (apiKey?.length === 51 && apiKey.startsWith("sk-")) || (apiKey?.length === 56 && apiKey.startsWith("sk-proj-")); + return apiKey?.length ?? 0 >= 50; // No idea what Hyperbolic's key spec is, but this is a safe bet. } diff --git a/src/utils/constants.ts b/src/utils/constants.ts index e67ea56..c5ed85e 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -19,7 +19,7 @@ export const DEFAULT_SETTINGS: Settings = { temp: 1, n: 3, autoZoom: true, - model: "gpt-4o", + model: "Qwen/Qwen2-72B-Instruct", defaultPreamble: `You are a helpful assistant.`, }; diff --git a/src/utils/fluxNode.ts b/src/utils/fluxNode.ts index 02aa010..e96afa3 100644 --- a/src/utils/fluxNode.ts +++ b/src/utils/fluxNode.ts @@ -376,7 +376,7 @@ export function displayNameFromFluxNodeType( case FluxNodeType.User: return "User"; case FluxNodeType.GPT: - return isGPT4 === undefined ? "GPT" : isGPT4 ? "GPT-4" : "GPT-3.5"; + return "Model"; case FluxNodeType.TweakedGPT: return displayNameFromFluxNodeType(FluxNodeType.GPT, isGPT4) + " (edited)"; case FluxNodeType.System: diff --git a/src/utils/models.ts b/src/utils/models.ts index 434fc0d..2f614da 100644 --- a/src/utils/models.ts +++ b/src/utils/models.ts @@ -1,28 +1,24 @@ export function getAvailableModels(apiKey: string): Promise { - return new Promise(async (resolve, reject) => { - try { - const response = await fetch("https://api.openai.com/v1/models", { - method: "GET", - headers: { - Authorization: `Bearer ${apiKey}`, - }, - }) - const data = await response.json(); - resolve(data.data.map((model: any) => model.id).sort()); - } catch (err) { - reject(err); - } - }); -}; + return new Promise(async (resolve, reject) => { + const models = [ + "Qwen/Qwen2-72B-Instruct", + // "meta-llama/Meta-Llama-3-70B-Instruct", — N completions is broken, get fragments or no response at all. + // "mistralai/Mixtral-8x22B-Instruct-v0.1", — Keep getting 'network error' + "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", + ]; + + resolve(models); + }); +} export function getAvailableChatModels(apiKey: string): Promise { - return new Promise((resolve, reject) => { - getAvailableModels(apiKey) - .then((models) => { - resolve(models.filter((model) => model.startsWith("gpt-"))); - }) - .catch((err) => { - reject(err); - }); - }); -}; + return new Promise((resolve, reject) => { + getAvailableModels(apiKey) + .then((models) => { + resolve(models); + }) + .catch((err) => { + reject(err); + }); + }); +}