diff --git a/README.md b/README.md index 77de896..2676dce 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ deno add @mumulhl/duckduckgo-ai-chat ```javascript import { initChat } from "@mumulhl/duckduckgo-ai-chat"; -// Initialize, optional models are gpt-4o-mini, claude-3-haiku-20240307, meta-llama/Llama-3-70b-chat-hf, mistralai/Mixtral-8x7B-Instruct-v0.1 +// Initialize, optional models are gpt-4o-mini, claude-3-haiku, llama, mixtral const chat = await initChat("gpt-4o-mini"); // Fetch the full reply in one go diff --git a/README_CN.md b/README_CN.md index b2cfd1e..e084acb 100644 --- a/README_CN.md +++ b/README_CN.md @@ -21,7 +21,7 @@ deno add @mumulhl/duckduckgo-ai-chat ```javascript import { initChat } from "@mumulhl/duckduckgo-ai-chat"; -// 初始化,可选模型有 gpt-4o-mini, claude-3-haiku-20240307, meta-llama/Llama-3-70b-chat-hf, mistralai/Mixtral-8x7B-Instruct-v0.1 +// 初始化,可选模型有 gpt-4o-mini, claude-3-haiku, llama, mixtral const chat = await initChat("gpt-4o-mini"); // 一次性获取完整的回复 diff --git a/index.ts b/index.ts index 8eeeef1..3407b9d 100644 --- a/index.ts +++ b/index.ts @@ -7,9 +7,15 @@ const STATUS_HEADERS = { "x-vqd-accept": "1" }; type Model = | "gpt-4o-mini" | "claude-3-haiku-20240307" - | "meta-llama/Llama-3-70b-chat-hf" + | "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" | "mistralai/Mixtral-8x7B-Instruct-v0.1"; +type ModelAlias = + | "gpt-4o-mini" + | "claude-3-haiku" + | "llama" + | "mixtral"; + type Messages = { content: string; role: "user" | "assistant" }[]; type ChatPayload = { @@ -17,6 +23,13 @@ type ChatPayload = { messages: Messages; }; +const _model: { [property: string]: Model } = { + "gpt-4o-mini": "gpt-4o-mini", + "claude-3-haiku": "claude-3-haiku-20240307", + "llama": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", + "mixtral": "mistralai/Mixtral-8x7B-Instruct-v0.1", +}; + class Chat { oldVqd: string; newVqd: string; @@ -132,7 +145,7 @@ class Chat { * @param model The model used by chat. * @returns A Chat instance. */ -async function initChat(model: Model): Promise { +async function initChat(model: ModelAlias): Promise { const status = await fetch(STATUS_URL, { headers: STATUS_HEADERS }); const vqd = status.headers.get("x-vqd-4"); if (!vqd) { @@ -140,8 +153,8 @@ async function initChat(model: Model): Promise { `${status.status}: Failed to initialize chat. ${status.statusText}`, ); } - return new Chat(vqd, model); + return new Chat(vqd, _model[model]); } export { initChat }; -export type { Chat, Model }; +export type { Chat, ModelAlias };