From 675902f5a8dd3ba02355e67f4863a1a88b0bd01e Mon Sep 17 00:00:00 2001 From: Johnson Date: Fri, 15 Mar 2024 22:47:21 +0800 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat(ollama):=20improve=20connectio?= =?UTF-8?q?n=20check=20method=20and=20provide=20selector=20for=20user=20to?= =?UTF-8?q?=20control=20model=20options=20(#1397)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🐛 fix(ollama): change checker with ollama's tags api * ✨feat(ollama): add error card to pull model * 🚚 chore: move files * 💄 style: update llava logo * 🐛 fix: add ollama service unavailable error type * 🐛 fix: ollama show passed with error message exists * :sparkles: feat(ollama): add download moniter to show speed and eta remaining time * 🚨 ci: fix lint * 💄 style: improve download style * 🌐 style: add i18n --------- Co-authored-by: shijianyue Co-authored-by: arvinxx --- locales/ar/error.json | 8 + locales/ar/setting.json | 10 +- locales/de-DE/error.json | 8 + locales/de-DE/setting.json | 10 +- locales/en-US/error.json | 8 + locales/en-US/setting.json | 10 +- locales/es-ES/error.json | 8 + locales/es-ES/setting.json | 10 +- locales/fr-FR/error.json | 8 + locales/fr-FR/setting.json | 10 +- locales/it-IT/error.json | 8 + locales/it-IT/setting.json | 10 +- locales/ja-JP/error.json | 8 + locales/ja-JP/setting.json | 10 +- locales/ko-KR/error.json | 8 + locales/ko-KR/setting.json | 10 +- locales/nl-NL/error.json | 8 + locales/nl-NL/setting.json | 10 +- locales/pl-PL/error.json | 8 + locales/pl-PL/setting.json | 10 +- locales/pt-BR/error.json | 8 + locales/pt-BR/setting.json | 10 +- locales/ru-RU/error.json | 8 + locales/ru-RU/setting.json | 10 +- locales/tr-TR/error.json | 8 + locales/tr-TR/setting.json | 10 +- locales/vi-VN/error.json | 8 + locales/vi-VN/setting.json | 10 +- locales/zh-CN/error.json | 8 + locales/zh-CN/setting.json | 10 +- locales/zh-TW/error.json | 8 + locales/zh-TW/setting.json | 10 +- package.json | 1 + src/app/api/config/route.ts | 5 +- src/app/api/errorResponse.ts | 3 +- src/app/settings/llm/Ollama/Checker.tsx | 73 +++++++++ src/app/settings/llm/Ollama/index.tsx | 6 +- src/app/settings/llm/components/Checker.tsx | 40 ++--- src/components/ModelIcon/index.tsx | 2 + src/components/ModelTag/ModelIcon.tsx | 2 + src/config/modelProviders/ollama.ts | 14 ++ src/config/server/provider.ts | 2 + .../Error/InvalidOllamaModel/index.tsx | 138 ++++++++++++++++++ .../InvalidOllamaModel/useDownloadMonitor.ts | 48 ++++++ .../Conversation/Error/OllamaBizError.tsx | 34 +++++ src/features/Conversation/Error/index.tsx | 5 + src/features/Conversation/Error/style.tsx | 4 +- src/locales/default/error.ts | 8 + src/locales/default/setting.ts | 10 +- src/services/__tests__/ollama.test.ts | 26 ++++ src/services/ollama.ts | 64 ++++++++ .../settings/selectors/modelProvider.ts | 21 +-- .../middleware/createHyperStorage/index.ts | 3 +- .../createHyperStorage/indexedDB.ts | 2 +- .../createHyperStorage/localStorage.ts | 2 +- .../createHyperStorage/urlStorage.ts | 2 +- src/types/fetch.ts | 1 + tsconfig.json | 2 +- 58 files changed, 747 insertions(+), 59 deletions(-) create mode 100644 src/app/settings/llm/Ollama/Checker.tsx create mode 100644 src/features/Conversation/Error/InvalidOllamaModel/index.tsx create mode 100644 src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts create mode 100644 src/features/Conversation/Error/OllamaBizError.tsx create mode 100644 src/services/__tests__/ollama.test.ts create mode 100644 src/services/ollama.ts diff --git a/locales/ar/error.json b/locales/ar/error.json index df525f182429b..a599d6672f2ab 100644 --- a/locales/ar/error.json +++ b/locales/ar/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "حدث خطأ في خدمة جانب القمر، يرجى التحقق من المعلومات أدناه أو إعادة المحاولة", "NoOpenAIAPIKey": "مفتاح API الخاص بـ OpenAI فارغ، يرجى إضافة مفتاح API الخاص بـ OpenAI", "OllamaBizError": "خطأ في طلب خدمة Ollama، يرجى التحقق من المعلومات التالية أو إعادة المحاولة", + "OllamaServiceUnavailable": "خدمة Ollama غير متوفرة، يرجى التحقق مما إذا كانت قد تم تشغيلها بشكل صحيح", "OpenAIBizError": "حدث خطأ في طلب خدمة OpenAI، يرجى التحقق من المعلومات أدناه وإعادة المحاولة", "PerplexityBizError": "خطأ في طلب خدمة Perplexity AI، يرجى التحقق من المعلومات التالية أو إعادة المحاولة", "PluginApiNotFound": "عذرًا، لا يوجد API للإضافة في وصف الإضافة، يرجى التحقق من تطابق طريقة الطلب الخاصة بك مع API الوصف", @@ -114,6 +115,13 @@ }, "closeMessage": "إغلاق الرسالة", "confirm": "تأكيد وإعادة المحاولة", + "model": { + "Ollama": { + "confirm": "تحميل", + "description": "أدخل علامة نموذج Ollama الخاصة بك لاستكمال الجلسة", + "title": "تحميل نموذج Ollama المحدد" + } + }, "oauth": { "description": "فتح المسؤول توثيق تسجيل الدخول الموحد، انقر فوق الزر أدناه لتسجيل الدخول وفتح التطبيق", "success": "تم تسجيل الدخول بنجاح", diff --git a/locales/ar/setting.json b/locales/ar/setting.json index c6b1e9340a2b8..10d7ce6fbe186 100644 --- a/locales/ar/setting.json +++ b/locales/ar/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "يتم <1>التخطيط لتوفير المزيد من النماذج، ترقبوا المزيد ✨" }, + "ollama": { + "download": { + "desc": "جارٍ تنزيل النموذج، يرجى عدم إغلاق هذه الصفحة. سيتم استئناف التنزيل من حيث توقف في حالة إعادة التنزيل", + "remainingTime": "الوقت المتبقي", + "speed": "سرعة التنزيل", + "title": "جارٍ تنزيل النموذج {{model}}" + } + }, "plugin": { "addTooltip": "إضافة البرنامج المساعد", "clearDeprecated": "مسح البرامج المساعدة الغير صالحة", @@ -428,4 +436,4 @@ }, "title": "أدوات الامتداد" } -} \ No newline at end of file +} diff --git a/locales/de-DE/error.json b/locales/de-DE/error.json index efe6a0d32e3c3..9acc70accfc7d 100644 --- a/locales/de-DE/error.json +++ b/locales/de-DE/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Fehler beim Abrufen des Dark Side of the Moon-Services. Bitte überprüfen Sie die folgenden Informationen oder versuchen Sie es erneut.", "NoOpenAIAPIKey": "Der OpenAI-API-Schlüssel ist leer. Bitte fügen Sie einen benutzerdefinierten OpenAI-API-Schlüssel hinzu", "OllamaBizError": "Fehler bei der Anforderung des Ollama-Dienstes. Bitte überprüfen Sie die folgenden Informationen oder versuchen Sie es erneut.", + "OllamaServiceUnavailable": "Ollama-Dienst nicht verfügbar. Bitte überprüfen Sie, ob er ordnungsgemäß gestartet wurde.", "OpenAIBizError": "Fehler bei der OpenAI-Serviceanfrage. Bitte überprüfen Sie die folgenden Informationen oder versuchen Sie es erneut", "PerplexityBizError": "Fehler bei der Anforderung des Perplexity AI-Dienstes. Bitte überprüfen Sie die folgenden Informationen oder versuchen Sie es erneut.", "PluginApiNotFound": "Entschuldigung, das API des Plugins im Plugin-Manifest existiert nicht. Bitte überprüfen Sie, ob Ihre Anfragemethode mit dem Plugin-Manifest-API übereinstimmt", @@ -114,6 +115,13 @@ }, "closeMessage": "Hinweis schließen", "confirm": "Bestätigen und erneut versuchen", + "model": { + "Ollama": { + "confirm": "Herunterladen", + "description": "Geben Sie Ihre Ollama-Modellbezeichnung ein, um fortzufahren", + "title": "Bestimmtes Ollama-Modell herunterladen" + } + }, "oauth": { "description": "Der Administrator hat die einheitliche Anmeldeauthentifizierung aktiviert. Klicken Sie unten auf die Schaltfläche, um sich anzumelden und die App zu entsperren.", "success": "Anmeldung erfolgreich", diff --git a/locales/de-DE/setting.json b/locales/de-DE/setting.json index cf3a5039c6b29..91453e7a8a5d9 100644 --- a/locales/de-DE/setting.json +++ b/locales/de-DE/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Weitere Modelle werden <1>geplant, bitte freuen Sie sich auf weitere Updates ✨" }, + "ollama": { + "download": { + "desc": "Ollama lädt dieses Modell herunter. Bitte schließen Sie diese Seite nicht. Der Download wird an der abgebrochenen Stelle fortgesetzt, wenn Sie ihn erneut starten.", + "remainingTime": "Verbleibende Zeit", + "speed": "Download-Geschwindigkeit", + "title": "Modell {{model}} wird heruntergeladen" + } + }, "plugin": { "addTooltip": "Benutzerdefiniertes Plugin", "clearDeprecated": "Entfernen Sie ungültige Plugins", @@ -428,4 +436,4 @@ }, "title": "Erweiterungswerkzeuge" } -} \ No newline at end of file +} diff --git a/locales/en-US/error.json b/locales/en-US/error.json index 6d52efa3d8db2..05c9e3d037571 100644 --- a/locales/en-US/error.json +++ b/locales/en-US/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "There was an error with the Moonshot service, please troubleshoot or retry based on the following information.", "NoOpenAIAPIKey": "OpenAI API Key is empty, please add a custom OpenAI API Key", "OllamaBizError": "Error requesting Ollama service, please troubleshoot or retry based on the following information", + "OllamaServiceUnavailable": "Ollama service not detected, please check if it is running properly", "OpenAIBizError": "Error requesting OpenAI service. Please troubleshoot or retry based on the following information.", "PerplexityBizError": "Error requesting Perplexity AI service. Please troubleshoot or retry based on the following information.", "PluginApiNotFound": "Sorry, the API does not exist in the plugin's manifest. Please check if your request method matches the plugin manifest API", @@ -114,6 +115,13 @@ }, "closeMessage": "Close message", "confirm": "Confirm and Retry", + "model": { + "Ollama": { + "confirm": "Download", + "description": "Enter your Ollama model label to proceed with the conversation", + "title": "Download specified Ollama model" + } + }, "oauth": { "description": "The administrator has enabled unified login authentication. Click the button below to log in and unlock the application.", "success": "Login successful", diff --git a/locales/en-US/setting.json b/locales/en-US/setting.json index c6d395521bcff..fb5d9e177927d 100644 --- a/locales/en-US/setting.json +++ b/locales/en-US/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "More models are <1>planned to be added, stay tuned ✨" }, + "ollama": { + "download": { + "desc": "Ollama is currently downloading the model. Please try not to close this page. It will resume from where it left off if you restart the download.", + "remainingTime": "Remaining Time", + "speed": "Download Speed", + "title": "Downloading model {{model}}" + } + }, "plugin": { "addTooltip": "Custom Plugin", "clearDeprecated": "Remove Deprecated Plugins", @@ -428,4 +436,4 @@ }, "title": "Extension Tools" } -} \ No newline at end of file +} diff --git a/locales/es-ES/error.json b/locales/es-ES/error.json index d9f0d8b2e29c1..164fc59fbb8eb 100644 --- a/locales/es-ES/error.json +++ b/locales/es-ES/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Se produjo un error al solicitar el servicio de Moonshot en el lado oscuro de la luna. Por favor, revise la siguiente información o inténtelo de nuevo.", "NoOpenAIAPIKey": "La clave de API de OpenAI está vacía. Agregue una clave de API de OpenAI personalizada", "OllamaBizError": "Error al solicitar el servicio de Ollama, por favor verifica la siguiente información o inténtalo de nuevo", + "OllamaServiceUnavailable": "Servicio Ollama no disponible: Ollama no detectado. Por favor, verifica si está iniciado correctamente.", "OpenAIBizError": "Error al solicitar el servicio OpenAI. Depure o reintente según la siguiente información", "PerplexityBizError": "Error comercial al solicitar el servicio de IA de Perplexity. Por favor, revisa la siguiente información o inténtalo de nuevo", "PluginApiNotFound": "Lo sentimos, el API especificado no existe en el manifiesto del complemento. Verifique si su método de solicitud coincide con el API del manifiesto del complemento", @@ -114,6 +115,13 @@ }, "closeMessage": "Cerrar mensaje", "confirm": "Confirmar y volver a intentar", + "model": { + "Ollama": { + "confirm": "Descargar", + "description": "Ingresa las etiquetas de tu modelo Ollama para continuar la sesión", + "title": "Descargar el modelo Ollama especificado" + } + }, "oauth": { "description": "El administrador ha habilitado la autenticación de inicio de sesión única. Haz clic en el botón a continuación para iniciar sesión y desbloquear la aplicación.", "success": "Inicio de sesión exitoso", diff --git a/locales/es-ES/setting.json b/locales/es-ES/setting.json index 23688d6b48c69..5b9e0f69df8da 100644 --- a/locales/es-ES/setting.json +++ b/locales/es-ES/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Más modelos están en <1>planificación para su incorporación, ¡estén atentos! ✨" }, + "ollama": { + "download": { + "desc": "Ollama está descargando este modelo. Por favor, no cierres esta página. La descarga se reanudará desde donde se detuvo si se reinicia.", + "remainingTime": "Tiempo restante", + "speed": "Velocidad de descarga", + "title": "Descargando el modelo {{model}}" + } + }, "plugin": { "addTooltip": "Agregar complemento personalizado", "clearDeprecated": "Eliminar complementos obsoletos", @@ -428,4 +436,4 @@ }, "title": "Herramientas de extensión" } -} \ No newline at end of file +} diff --git a/locales/fr-FR/error.json b/locales/fr-FR/error.json index 11c6beb4903d6..c30bb1db65aea 100644 --- a/locales/fr-FR/error.json +++ b/locales/fr-FR/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Erreur de service Moonshot : une erreur s'est produite lors de la demande du service Côté Obscur de la Lune. Veuillez vérifier les informations suivantes ou réessayer.", "NoOpenAIAPIKey": "La clé API OpenAI est vide. Veuillez ajouter une clé API OpenAI personnalisée", "OllamaBizError": "Erreur commerciale lors de la demande de service Ollama, veuillez vérifier les informations ci-dessous ou réessayer", + "OllamaServiceUnavailable": "Service Ollama non disponible, veuillez vérifier s'il est démarré correctement", "OpenAIBizError": "Erreur de service OpenAI. Veuillez diagnostiquer ou réessayer en fonction des informations ci-dessous", "PerplexityBizError": "Erreur commerciale lors de la demande de service Perplexity AI. Veuillez vérifier les informations suivantes ou réessayer.", "PluginApiNotFound": "Désolé, l'API spécifiée n'existe pas dans le manifeste du plugin. Veuillez vérifier que votre méthode de requête correspond à l'API du manifeste du plugin", @@ -114,6 +115,13 @@ }, "closeMessage": "Fermer le message", "confirm": "Confirmer et réessayer", + "model": { + "Ollama": { + "confirm": "Télécharger", + "description": "Saisissez l'étiquette de votre modèle Ollama pour continuer la session", + "title": "Télécharger le modèle Ollama spécifié" + } + }, "oauth": { "description": "L'administrateur a activé l'authentification de connexion unique. Cliquez sur le bouton ci-dessous pour vous connecter et déverrouiller l'application.", "success": "Connexion réussie", diff --git a/locales/fr-FR/setting.json b/locales/fr-FR/setting.json index 87fc9206d94c6..b4650c6fa7a1e 100644 --- a/locales/fr-FR/setting.json +++ b/locales/fr-FR/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Plus de modèles sont en cours de <1>planification pour être ajoutés, restez à l'écoute ✨" }, + "ollama": { + "download": { + "desc": "Ollama est en train de télécharger ce modèle. Veuillez ne pas fermer cette page. Le téléchargement reprendra là où il s'est arrêté en cas de reprise.", + "remainingTime": "Temps restant", + "speed": "Vitesse de téléchargement", + "title": "Téléchargement du modèle {{model}} en cours" + } + }, "plugin": { "addTooltip": "Ajouter un plugin personnalisé", "clearDeprecated": "Effacer les plugins obsolètes", @@ -428,4 +436,4 @@ }, "title": "Outils supplémentaires" } -} \ No newline at end of file +} diff --git a/locales/it-IT/error.json b/locales/it-IT/error.json index 17f6a40315f64..5b488acaa1197 100644 --- a/locales/it-IT/error.json +++ b/locales/it-IT/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Si è verificato un errore nel servizio Moonshot, si prega di controllare le informazioni seguenti o riprovare", "NoOpenAIAPIKey": "La chiave API OpenAI è vuota. Aggiungi una chiave API personalizzata OpenAI", "OllamaBizError": "Errore di servizio Ollama, controllare le informazioni seguenti o riprovare", + "OllamaServiceUnavailable": "Servizio Ollama non disponibile: controlla se è avviato correttamente", "OpenAIBizError": "Errore nella richiesta del servizio OpenAI. Segui le informazioni seguenti per individuare e riprovare", "PerplexityBizError": "Errore di business nella richiesta del servizio Perplexity AI, controlla le informazioni seguenti o riprova", "PluginApiNotFound": "Spiacenti, l'API specificata non esiste nel manifesto del plugin. Verifica che il metodo di richiesta corrisponda all'API del manifesto del plugin", @@ -114,6 +115,13 @@ }, "closeMessage": "Chiudi messaggio", "confirm": "Conferma e riprova", + "model": { + "Ollama": { + "confirm": "Scarica", + "description": "Inserisci l'etichetta del tuo modello Ollama per continuare la sessione", + "title": "Scarica il modello Ollama specificato" + } + }, "oauth": { "description": "L'amministratore ha abilitato l'autenticazione di accesso unificata. Fai clic sul pulsante sottostante per accedere e sbloccare l'applicazione.", "success": "Accesso riuscito", diff --git a/locales/it-IT/setting.json b/locales/it-IT/setting.json index c6fbca622e1dc..67f8ac7d2242e 100644 --- a/locales/it-IT/setting.json +++ b/locales/it-IT/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Altri modelli sono in fase di <1> pianificazione per l'integrazione , resta sintonizzato ✨" }, + "ollama": { + "download": { + "desc": "Ollama sta scaricando questo modello. Si prega di non chiudere questa pagina. Il download verrà ripreso dal punto in cui è stato interrotto in caso di riavvio.", + "remainingTime": "Tempo rimanente", + "speed": "Velocità di download", + "title": "Download del modello {{model}} in corso" + } + }, "plugin": { "addTooltip": "Aggiungi plugin personalizzato", "clearDeprecated": "Rimuovi plugin non validi", @@ -428,4 +436,4 @@ }, "title": "Strumenti aggiuntivi" } -} \ No newline at end of file +} diff --git a/locales/ja-JP/error.json b/locales/ja-JP/error.json index 4ba48b2a11688..b45af0c26b4c3 100644 --- a/locales/ja-JP/error.json +++ b/locales/ja-JP/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "月の裏側サービスのリクエストでエラーが発生しました。以下の情報を確認して再試行してください。", "NoOpenAIAPIKey": "OpenAI APIキーが空です。カスタムOpenAI APIキーを追加してください。", "OllamaBizError": "Ollamaサービスのリクエストでエラーが発生しました。以下の情報に基づいてトラブルシューティングを行うか、再度お試しください", + "OllamaServiceUnavailable": "Ollamaサービスが利用できません。正常に起動しているかどうかを確認してください", "OpenAIBizError": "OpenAIサービスのリクエストエラーが発生しました。以下の情報に基づいて問題を解決したり、再試行したりしてください", "PerplexityBizError": "Perplexity AIサービスのリクエストでエラーが発生しました。以下の情報に基づいてトラブルシューティングするか、再度お試しください", "PluginApiNotFound": "申し訳ありませんが、プラグインのマニフェストに指定されたAPIが見つかりませんでした。リクエストメソッドとプラグインのマニフェストのAPIが一致しているかどうかを確認してください", @@ -114,6 +115,13 @@ }, "closeMessage": "ヒントを閉じる", "confirm": "確認して再試行", + "model": { + "Ollama": { + "confirm": "ダウンロード", + "description": "Ollamaモデルのラベルを入力して、会話を続行してください", + "title": "指定のOllamaモデルをダウンロード" + } + }, "oauth": { "description": "管理者が統一ログイン認証を有効にしました。下のボタンをクリックしてログインすると、アプリがロック解除されます。", "success": "ログインに成功しました", diff --git a/locales/ja-JP/setting.json b/locales/ja-JP/setting.json index 81f69d8165564..549b8a909d26f 100644 --- a/locales/ja-JP/setting.json +++ b/locales/ja-JP/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "さらに多くのモデルが <1>計画されています。お楽しみに ✨" }, + "ollama": { + "download": { + "desc": "Ollama はモデルをダウンロード中です。ページを閉じないようにしてください。再度ダウンロードすると、中断した箇所から再開されます。", + "remainingTime": "残り時間", + "speed": "ダウンロード速度", + "title": "モデル {{model}} をダウンロード中" + } + }, "plugin": { "addTooltip": "カスタムプラグイン", "clearDeprecated": "無効なプラグインをクリア", @@ -428,4 +436,4 @@ }, "title": "拡張ツール" } -} \ No newline at end of file +} diff --git a/locales/ko-KR/error.json b/locales/ko-KR/error.json index 13ba63ba3b6e8..485da03f35d44 100644 --- a/locales/ko-KR/error.json +++ b/locales/ko-KR/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "요청한 문샷 비즈니스에 오류가 발생했습니다. 아래 정보를 확인하고 다시 시도해주세요.", "NoOpenAIAPIKey": "OpenAI API 키가 비어 있습니다. 사용자 정의 OpenAI API 키를 추가해주세요.", "OllamaBizError": "Ollama 서비스 요청 중 오류가 발생했습니다. 아래 정보를 확인하고 다시 시도하십시오.", + "OllamaServiceUnavailable": "Ollama 서비스를 찾을 수 없습니다. 정상적으로 시작되었는지 확인하십시오.", "OpenAIBizError": "OpenAI 서비스 요청 중 오류가 발생했습니다. 아래 정보를 확인하고 문제를 해결하거나 다시 시도해주세요.", "PerplexityBizError": "Perplexity AI 서비스 요청 중 오류가 발생했습니다. 아래 정보를 확인하고 다시 시도하십시오.", "PluginApiNotFound": "죄송합니다. 플러그인 설명서에 해당 API가 없습니다. 요청 메서드와 플러그인 설명서 API가 일치하는지 확인해주세요.", @@ -114,6 +115,13 @@ }, "closeMessage": "알림 닫기", "confirm": "확인 및 다시 시도", + "model": { + "Ollama": { + "confirm": "다운로드", + "description": "Ollama 모델 태그를 입력하여 세션을 계속할 수 있습니다.", + "title": "지정된 Ollama 모델 다운로드" + } + }, "oauth": { "description": "관리자가 통합 로그인 인증을 활성화했습니다. 아래 버튼을 클릭하여 로그인하면 앱을 잠금 해제할 수 있습니다.", "success": "로그인 성공", diff --git a/locales/ko-KR/setting.json b/locales/ko-KR/setting.json index b99755ed81813..0a0aa5388e176 100644 --- a/locales/ko-KR/setting.json +++ b/locales/ko-KR/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "<1>계획에 따라 더 많은 모델이 추가될 예정이니 기대해 주세요 ✨" }, + "ollama": { + "download": { + "desc": "Ollama 모델을 다운로드 중입니다. 이 페이지를 닫지 말아주세요. 다시 다운로드하면 중단된 곳부터 계속됩니다.", + "remainingTime": "남은 시간", + "speed": "다운로드 속도", + "title": "{{model}} 모델 다운로드 중" + } + }, "plugin": { "addTooltip": "플러그인 추가", "clearDeprecated": "사용되지 않는 플러그인 제거", @@ -428,4 +436,4 @@ }, "title": "확장 도구" } -} \ No newline at end of file +} diff --git a/locales/nl-NL/error.json b/locales/nl-NL/error.json index 4b81111ff1efd..f60cc4a110911 100644 --- a/locales/nl-NL/error.json +++ b/locales/nl-NL/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "请求月球AI服务出错,请根据以下信息排查或重试", "NoOpenAIAPIKey": "OpenAI API-sleutel ontbreekt. Voeg een aangepaste OpenAI API-sleutel toe", "OllamaBizError": "Fout bij het aanroepen van de Ollama-service, controleer de onderstaande informatie en probeer opnieuw", + "OllamaServiceUnavailable": "Ollama 服务不可用,请检查是否已正常启动", "OpenAIBizError": "Fout bij het aanvragen van OpenAI-service. Controleer de onderstaande informatie voor probleemoplossing of probeer opnieuw", "PerplexityBizError": "Er is een fout opgetreden bij het aanvragen van de Perplexity AI-service. Controleer de onderstaande informatie en probeer het opnieuw.", "PluginApiNotFound": "Sorry, de API van de plug-inbeschrijvingslijst bestaat niet. Controleer of uw verzoeksmethode overeenkomt met de plug-inbeschrijvingslijst API", @@ -114,6 +115,13 @@ }, "closeMessage": "Sluit bericht", "confirm": "Bevestigen en opnieuw proberen", + "model": { + "Ollama": { + "confirm": "下载", + "description": "输入您的 Ollama 模型标签,完成后即可继续会话", + "title": "下载指定的 Ollama 模型" + } + }, "oauth": { "description": "De beheerder heeft een uniforme aanmeldingsverificatie ingeschakeld. Klik op de onderstaande knop om in te loggen en de app te ontgrendelen.", "success": "Succesvol ingelogd", diff --git a/locales/nl-NL/setting.json b/locales/nl-NL/setting.json index 72332d4b8fba5..ca6618bfcdba2 100644 --- a/locales/nl-NL/setting.json +++ b/locales/nl-NL/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Meer modellen worden <1>gepland om te worden toegevoegd, dus blijf op de hoogte ✨" }, + "ollama": { + "download": { + "desc": "Ollama is bezig met het downloaden van dit model. Gelieve deze pagina niet te sluiten. Het downloaden zal worden hervat vanaf het onderbroken punt als u opnieuw begint te downloaden.", + "remainingTime": "Resterende tijd", + "speed": "Downloadsnelheid", + "title": "Model {{model}} wordt gedownload" + } + }, "plugin": { "addTooltip": "Voeg aangepaste plug-in toe", "clearDeprecated": "Verwijder verouderde plug-ins", @@ -428,4 +436,4 @@ }, "title": "Uitbreidingsgereedschap" } -} \ No newline at end of file +} diff --git a/locales/pl-PL/error.json b/locales/pl-PL/error.json index 318b2a8cebad6..30d33f33cbec5 100644 --- a/locales/pl-PL/error.json +++ b/locales/pl-PL/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "请求月球AI服务出错,请根据以下信息排查或重试", "NoOpenAIAPIKey": "Klucz API OpenAI jest pusty. Proszę dodać niestandardowy klucz API OpenAI", "OllamaBizError": "Błąd usługi Ollama, sprawdź poniższe informacje lub spróbuj ponownie", + "OllamaServiceUnavailable": "Usługa Ollama jest niedostępna. Sprawdź, czy została poprawnie uruchomiona.", "OpenAIBizError": "Błąd żądania usługi OpenAI. Proszę sprawdź poniższe informacje i spróbuj ponownie", "PerplexityBizError": "Błąd biznesowy podczas żądania usługi Perplexity AI. Sprawdź poniższe informacje lub spróbuj ponownie.", "PluginApiNotFound": "Przepraszamy, w manifestach wtyczki nie istnieje to API. Proszę sprawdź, czy metoda żądania jest zgodna z API w manifestach wtyczki", @@ -114,6 +115,13 @@ }, "closeMessage": "Zamknij komunikat", "confirm": "Potwierdź i spróbuj ponownie", + "model": { + "Ollama": { + "confirm": "Pobierz", + "description": "Wprowadź etykietę modelu Ollama, aby kontynuować sesję.", + "title": "Pobierz określony model Ollama" + } + }, "oauth": { "description": "Administrator włączył jednolite uwierzytelnianie logowania. Kliknij poniższy przycisk, aby się zalogować i odblokować aplikację.", "success": "Zalogowano pomyślnie", diff --git a/locales/pl-PL/setting.json b/locales/pl-PL/setting.json index cc8eb25943576..baa2d4a71be99 100644 --- a/locales/pl-PL/setting.json +++ b/locales/pl-PL/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Więcej modeli jest obecnie w <1>planach dołączenia, prosimy o cierpliwość ✨" }, + "ollama": { + "download": { + "desc": "Ollama pobiera ten model. Prosimy nie zamykać tej strony. Gdy pobieranie zostanie wznowione, będzie kontynuowane od miejsca przerwania.", + "remainingTime": "Czas pozostały", + "speed": "Prędkość pobierania", + "title": "Pobieranie modelu {{model}}" + } + }, "plugin": { "addTooltip": "Dodaj niestandardowy dodatek", "clearDeprecated": "Usuń przestarzałe dodatki", @@ -428,4 +436,4 @@ }, "title": "Narzędzia rozszerzeń" } -} \ No newline at end of file +} diff --git a/locales/pt-BR/error.json b/locales/pt-BR/error.json index ee7780c330544..a5c73090c2145 100644 --- a/locales/pt-BR/error.json +++ b/locales/pt-BR/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "O serviço Moonshot na face oculta da lua encontrou um erro. Por favor, verifique as informações abaixo ou tente novamente.", "NoOpenAIAPIKey": "A chave de API do OpenAI está em branco. Adicione uma chave de API personalizada do OpenAI", "OllamaBizError": "Erro de negócio ao solicitar o serviço Ollama, verifique as informações a seguir ou tente novamente", + "OllamaServiceUnavailable": "O serviço Ollama não está disponível, verifique se está iniciado corretamente", "OpenAIBizError": "Erro ao solicitar o serviço OpenAI. Verifique ou tente novamente com base nas informações abaixo", "PerplexityBizError": "Erro de negócios ao solicitar o serviço de IA Perplexity, verifique as informações a seguir ou tente novamente", "PluginApiNotFound": "Desculpe, o API especificado não existe no manifesto do plugin. Verifique se o método de solicitação corresponde ao API do manifesto do plugin", @@ -114,6 +115,13 @@ }, "closeMessage": "Fechar mensagem", "confirm": "Confirmar e tentar novamente", + "model": { + "Ollama": { + "confirm": "Baixar", + "description": "Digite as tags do seu modelo Ollama para continuar a sessão", + "title": "Baixar o modelo Ollama especificado" + } + }, "oauth": { "description": "O administrador ativou a autenticação de login unificado. Clique no botão abaixo para fazer login e desbloquear o aplicativo.", "success": "Login bem-sucedido", diff --git a/locales/pt-BR/setting.json b/locales/pt-BR/setting.json index 411576ca60aa1..e567ba663ad00 100644 --- a/locales/pt-BR/setting.json +++ b/locales/pt-BR/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Mais modelos estão sendo <1>planejados para serem adicionados, aguarde ansiosamente ✨" }, + "ollama": { + "download": { + "desc": "Ollama está baixando este modelo. Por favor, evite fechar esta página. O download será retomado do ponto em que parou, caso seja reiniciado.", + "remainingTime": "Tempo restante", + "speed": "Velocidade de download", + "title": "Baixando modelo {{model}}" + } + }, "plugin": { "addTooltip": "Adicionar plug-in personalizado", "clearDeprecated": "Remover plug-ins inválidos", @@ -428,4 +436,4 @@ }, "title": "Ferramentas de Extensão" } -} \ No newline at end of file +} diff --git a/locales/ru-RU/error.json b/locales/ru-RU/error.json index e11c811cbb696..713d289aeb0f2 100644 --- a/locales/ru-RU/error.json +++ b/locales/ru-RU/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "请求月球暗面服务出错,请根据以下信息排查或重试", "NoOpenAIAPIKey": "Ключ OpenAI API пуст, пожалуйста, добавьте свой собственный ключ OpenAI API", "OllamaBizError": "Ошибка обращения к сервису Ollama, пожалуйста, проверьте следующую информацию или повторите попытку", + "OllamaServiceUnavailable": "Сервис Ollama недоступен. Пожалуйста, проверьте, запущен ли он корректно.", "OpenAIBizError": "Ошибка запроса службы OpenAI. Устраните неполадку или повторите попытку, основываясь на следующей информации.", "PerplexityBizError": "Ошибка обращения к сервису Perplexity AI. Пожалуйста, проверьте информацию ниже или повторите попытку", "PluginApiNotFound": "К сожалению, API не существует в манифесте плагина. Пожалуйста, проверьте, соответствует ли ваш метод запроса API манифеста плагина", @@ -114,6 +115,13 @@ }, "closeMessage": "Закрыть сообщение", "confirm": "Подтвердить и повторить попытку", + "model": { + "Ollama": { + "confirm": "Скачать", + "description": "Введите метку вашей модели Ollama, чтобы продолжить сеанс", + "title": "Скачать указанную модель Ollama" + } + }, "oauth": { "description": "Администратор включил единую систему аутентификации. Нажмите кнопку ниже, чтобы войти и разблокировать приложение.", "success": "Успешный вход", diff --git a/locales/ru-RU/setting.json b/locales/ru-RU/setting.json index aae52b122e832..1d94c234ccc56 100644 --- a/locales/ru-RU/setting.json +++ b/locales/ru-RU/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Больше моделей доступно в <1>плане подключения, ожидайте ✨" }, + "ollama": { + "download": { + "desc": "Ollama загружает эту модель. Пожалуйста, не закрывайте эту страницу. Загрузка будет возобновлена с того же места, если вы решите начать её заново.", + "remainingTime": "Оставшееся время", + "speed": "Скорость загрузки", + "title": "Загрузка модели {{model}}" + } + }, "plugin": { "addTooltip": "Добавить настраиваемый плагин", "clearDeprecated": "Удалить устаревшие плагины", @@ -428,4 +436,4 @@ }, "title": "Дополнительные инструменты" } -} \ No newline at end of file +} diff --git a/locales/tr-TR/error.json b/locales/tr-TR/error.json index b5c387e744701..1112552b95079 100644 --- a/locales/tr-TR/error.json +++ b/locales/tr-TR/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Moonshot hizmetinde bir hata oluştu, lütfen aşağıdaki bilgilere göre sorunu giderin veya tekrar deneyin", "NoOpenAIAPIKey": "OpenAI API Anahtarı boş, lütfen özel bir OpenAI API Anahtarı ekleyin", "OllamaBizError": "Ollama servisine yapılan istekte hata oluştu, lütfen aşağıdaki bilgilere göre sorunu gidermeye çalışın veya tekrar deneyin", + "OllamaServiceUnavailable": "Ollama 服务不可用,请检查是否已正常启动", "OpenAIBizError": "OpenAI hizmeti talep ederken hata oluştu. Aşağıdaki bilgilere dayanarak sorun giderin veya tekrar deneyin.", "PerplexityBizError": "Perplexity AI hizmetine yapılan istekte hata oluştu, lütfen aşağıdaki bilgilere göre sorunu gidermeye çalışın veya tekrar deneyin", "PluginApiNotFound": "Üzgünüm, eklentinin bildiriminde API mevcut değil. Lütfen istek yönteminizin eklenti bildirim API'sı ile eşleşip eşleşmediğini kontrol edin", @@ -114,6 +115,13 @@ }, "closeMessage": "Mesajı kapat", "confirm": "Onayla ve Yeniden Dene", + "model": { + "Ollama": { + "confirm": "下载", + "description": "Ollama model etiketinizi girin ve devam etmek için tamamlayın", + "title": "Belirli Ollama modelini indir" + } + }, "oauth": { "description": "Yönetici, tek oturum açma kimlik doğrulamasını etkinleştirdi. Aşağıdaki düğmeye tıklayarak giriş yapabilir ve uygulamayı kilidini açabilirsiniz.", "success": "Giriş başarılı", diff --git a/locales/tr-TR/setting.json b/locales/tr-TR/setting.json index 6dfd2b2748e46..f6f9eac773760 100644 --- a/locales/tr-TR/setting.json +++ b/locales/tr-TR/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Daha fazla model eklenmesi planlanıyor ✨" }, + "ollama": { + "download": { + "desc": "Ollama bu modeli indiriyor, lütfen bu sayfayı kapatmamaya çalışın. Yeniden indirme durumunda kaldığı yerden devam edecektir.", + "remainingTime": "Kalan Zaman", + "speed": "İndirme Hızı", + "title": "{{model}} Modeli İndiriliyor" + } + }, "plugin": { "addTooltip": "Eklenti Ekle", "clearDeprecated": "Kullanım Dışı Eklentileri Kaldır", @@ -428,4 +436,4 @@ }, "title": "Uzantı Araçları" } -} \ No newline at end of file +} diff --git a/locales/vi-VN/error.json b/locales/vi-VN/error.json index f46d9225e60fd..d68b62e389cc3 100644 --- a/locales/vi-VN/error.json +++ b/locales/vi-VN/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "Yêu cầu dịch vụ Mặt Trăng Tối gặp sự cố, vui lòng kiểm tra thông tin dưới đây hoặc thử lại", "NoOpenAIAPIKey": "Khóa API OpenAI trống, vui lòng thêm Khóa API OpenAI tùy chỉnh", "OllamaBizError": "Yêu cầu dịch vụ Ollama gặp lỗi, vui lòng kiểm tra thông tin dưới đây hoặc thử lại", + "OllamaServiceUnavailable": "Dịch vụ Ollama không khả dụng, vui lòng kiểm tra xem nó đã được khởi động chưa", "OpenAIBizError": "Yêu cầu dịch vụ OpenAI gặp lỗi, vui lòng xác minh hoặc thử lại dựa trên thông tin dưới đây", "PerplexityBizError": "Yêu cầu dịch vụ AI Perplexity gặp lỗi, vui lòng kiểm tra thông tin dưới đây hoặc thử lại sau", "PluginApiNotFound": "Xin lỗi, không có API nào trong tệp mô tả plugin, vui lòng kiểm tra phương thức yêu cầu của bạn có khớp với API mô tả plugin không", @@ -114,6 +115,13 @@ }, "closeMessage": "Đóng thông báo", "confirm": "Xác nhận và thử lại", + "model": { + "Ollama": { + "confirm": "Tải xuống", + "description": "Nhập nhãn mô hình Ollama của bạn để tiếp tục cuộc trò chuyện", + "title": "Tải xuống mô hình Ollama cụ thể" + } + }, "oauth": { "description": "Quản trị viên đã mở tính năng xác thực đăng nhập thống nhất. Nhấn vào nút bên dưới để đăng nhập và mở khóa ứng dụng", "success": "Đăng nhập thành công", diff --git a/locales/vi-VN/setting.json b/locales/vi-VN/setting.json index 7a2c24a98673d..47a56f540795e 100644 --- a/locales/vi-VN/setting.json +++ b/locales/vi-VN/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "Có thêm mô hình đang <1>được lên kế hoạch tích hợp, hãy chờ đợi ✨" }, + "ollama": { + "download": { + "desc": "Ollama đang tải xuống mô hình này, vui lòng không đóng trang này. Quá trình tải xuống sẽ tiếp tục từ nơi đã bị gián đoạn khi tải lại", + "remainingTime": "Thời gian còn lại", + "speed": "Tốc độ tải xuống", + "title": "Đang tải xuống mô hình {{model}}" + } + }, "plugin": { "addTooltip": "Thêm tiện ích", "clearDeprecated": "Xóa tiện ích không còn hỗ trợ", @@ -428,4 +436,4 @@ }, "title": "Công cụ mở rộng" } -} \ No newline at end of file +} diff --git a/locales/zh-CN/error.json b/locales/zh-CN/error.json index 43382d291144b..d172c93f4d431 100644 --- a/locales/zh-CN/error.json +++ b/locales/zh-CN/error.json @@ -67,6 +67,7 @@ "AnthropicBizError": "请求 Anthropic AI 服务出错,请根据以下信息排查或重试", "InvalidOllamaArgs": "Ollama 配置不正确,请检查 Ollama 配置后重试", "OllamaBizError": "请求 Ollama 服务出错,请根据以下信息排查或重试", + "OllamaServiceUnavailable": "未检测到 Ollama 服务,请检查是否正常启动", "AgentRuntimeError": "Lobe 语言模型运行时执行出错,请根据以下信息排查或重试" }, "stt": { @@ -114,6 +115,13 @@ }, "closeMessage": "关闭提示", "confirm": "确认并重试", + "model": { + "Ollama": { + "confirm": "下载", + "description": "输入你的 Ollama 模型标签,完成即可继续会话", + "title": "下载指定的 Ollama 模型" + } + }, "oauth": { "description": "管理员已开启统一登录认证,点击下方按钮登录,即可解锁应用", "success": "登录成功", diff --git a/locales/zh-CN/setting.json b/locales/zh-CN/setting.json index 16815fe428eaf..500650a859f08 100644 --- a/locales/zh-CN/setting.json +++ b/locales/zh-CN/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "更多模型正在 <1>计划接入 中,敬请期待 ✨" }, + "ollama": { + "download": { + "desc": "Ollama 正在下载该模型,请尽量不要关闭本页面。重新下载时将会中断处继续", + "remainingTime": "剩余时间", + "speed": "下载速度", + "title": "正在下载模型 {{model}} " + } + }, "plugin": { "addTooltip": "自定义插件", "clearDeprecated": "移除无效插件", @@ -428,4 +436,4 @@ }, "title": "扩展插件" } -} \ No newline at end of file +} diff --git a/locales/zh-TW/error.json b/locales/zh-TW/error.json index 9777104e807ed..ccfa5e9109f20 100644 --- a/locales/zh-TW/error.json +++ b/locales/zh-TW/error.json @@ -53,6 +53,7 @@ "MoonshotBizError": "請求月球背面服務出錯,請根據以下信息排查或重試", "NoOpenAIAPIKey": "OpenAI API 金鑰為空,請添加自訂 OpenAI API 金鑰", "OllamaBizError": "請求 Ollama 服務出錯,請根據以下資訊排查或重試", + "OllamaServiceUnavailable": "未偵測到 Ollama 服務,請檢查是否正常啟動", "OpenAIBizError": "請求 OpenAI 服務出錯。請根據以下資訊進行排查或重試。", "PerplexityBizError": "請求 Perplexity AI 服務出錯,請根據以下信息排查或重試", "PluginApiNotFound": "抱歉,外掛描述檔案中不存在該 API。請檢查您的請求方法與外掛清單 API 是否相符", @@ -114,6 +115,13 @@ }, "closeMessage": "關閉提示", "confirm": "確認並重試", + "model": { + "Ollama": { + "confirm": "下載", + "description": "輸入你的 Ollama 模型標籤,完成即可繼續會話", + "title": "下載指定的 Ollama 模型" + } + }, "oauth": { "description": "管理員已開啟統一登錄認證,點擊下方按鈕登錄,即可解鎖應用", "success": "登錄成功", diff --git a/locales/zh-TW/setting.json b/locales/zh-TW/setting.json index 30932d9725af4..68f313b043837 100644 --- a/locales/zh-TW/setting.json +++ b/locales/zh-TW/setting.json @@ -193,6 +193,14 @@ }, "waitingForMore": "更多模型正在 <1>計劃接入 中,敬請期待 ✨" }, + "ollama": { + "download": { + "desc": "Ollama 正在下載該模型,請儘量不要關閉本頁面。重新下載時將會中斷處繼續", + "remainingTime": "剩餘時間", + "speed": "下載速度", + "title": "正在下載模型 {{model}}" + } + }, "plugin": { "addTooltip": "新增自訂外掛程式", "clearDeprecated": "清除已棄用的外掛", @@ -428,4 +436,4 @@ }, "title": "擴展工具" } -} \ No newline at end of file +} diff --git a/package.json b/package.json index 8bd567f82313c..8fa91bd85f681 100644 --- a/package.json +++ b/package.json @@ -120,6 +120,7 @@ "next-sitemap": "^4", "numeral": "^2", "nuqs": "^1", + "ollama": "^0.5.0", "openai": "^4.22", "polished": "^4", "posthog-js": "^1", diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index a57acb03bedca..cc242fb70630d 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -10,6 +10,7 @@ export const runtime = 'edge'; */ export const GET = async () => { const { + ENABLE_LANGFUSE, CUSTOM_MODELS, ENABLED_MOONSHOT, ENABLED_ZHIPU, @@ -21,7 +22,7 @@ export const GET = async () => { ENABLED_ANTHROPIC, ENABLED_MISTRAL, DEFAULT_AGENT_CONFIG, - ENABLE_LANGFUSE, + OLLAMA_CUSTOM_MODELS, } = getServerConfig(); const config: GlobalServerConfig = { @@ -37,7 +38,7 @@ export const GET = async () => { google: { enabled: ENABLED_GOOGLE }, mistral: { enabled: ENABLED_MISTRAL }, moonshot: { enabled: ENABLED_MOONSHOT }, - ollama: { enabled: ENABLE_OLLAMA }, + ollama: { customModelName: OLLAMA_CUSTOM_MODELS, enabled: ENABLE_OLLAMA }, perplexity: { enabled: ENABLED_PERPLEXITY }, zhipu: { enabled: ENABLED_ZHIPU }, }, diff --git a/src/app/api/errorResponse.ts b/src/app/api/errorResponse.ts index c035619731fd4..b58ea9315efe0 100644 --- a/src/app/api/errorResponse.ts +++ b/src/app/api/errorResponse.ts @@ -1,5 +1,5 @@ import { AgentRuntimeErrorType, ILobeAgentRuntimeErrorType } from '@/libs/agent-runtime'; -import { ErrorResponse, ErrorType } from '@/types/fetch'; +import { ChatErrorType, ErrorResponse, ErrorType } from '@/types/fetch'; const getStatus = (errorType: ILobeAgentRuntimeErrorType | ErrorType) => { // InvalidAccessCode / InvalidAzureAPIKey / InvalidOpenAIAPIKey / InvalidZhipuAPIKey .... @@ -37,6 +37,7 @@ const getStatus = (errorType: ILobeAgentRuntimeErrorType | ErrorType) => { case AgentRuntimeErrorType.MoonshotBizError: { return 476; } + case ChatErrorType.OllamaServiceUnavailable: case AgentRuntimeErrorType.OllamaBizError: { return 478; } diff --git a/src/app/settings/llm/Ollama/Checker.tsx b/src/app/settings/llm/Ollama/Checker.tsx new file mode 100644 index 0000000000000..5dade1c356438 --- /dev/null +++ b/src/app/settings/llm/Ollama/Checker.tsx @@ -0,0 +1,73 @@ +import { CheckCircleFilled } from '@ant-design/icons'; +import { Alert, Highlighter } from '@lobehub/ui'; +import { Button } from 'antd'; +import { useTheme } from 'antd-style'; +import { ListResponse } from 'ollama/browser'; +import { memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Flexbox } from 'react-layout-kit'; +import useSWR from 'swr'; + +import { useIsMobile } from '@/hooks/useIsMobile'; +import { ollamaService } from '@/services/ollama'; + +const OllamaChecker = memo(() => { + const { t } = useTranslation('setting'); + + const theme = useTheme(); + + const { data, error, isLoading, mutate } = useSWR( + 'ollama.list', + ollamaService.getModels, + { + revalidateOnFocus: false, + revalidateOnMount: false, + revalidateOnReconnect: false, + }, + ); + + const checkConnection = () => { + mutate(); + }; + + const isMobile = useIsMobile(); + + return ( + + + {!error && data?.models && ( + + + {t('llm.checker.pass')} + + )} + + + {error && ( + + + + {JSON.stringify(error.body || error, null, 2)} + + + } + message={t(`response.${error.type}` as any, { ns: 'error' })} + showIcon + type={'error'} + /> + + )} + + ); +}); + +export default OllamaChecker; diff --git a/src/app/settings/llm/Ollama/index.tsx b/src/app/settings/llm/Ollama/index.tsx index 6703d7a21c158..b5e3c78853c95 100644 --- a/src/app/settings/llm/Ollama/index.tsx +++ b/src/app/settings/llm/Ollama/index.tsx @@ -4,9 +4,7 @@ import { useTheme } from 'antd-style'; import { memo } from 'react'; import { useTranslation } from 'react-i18next'; -import { ModelProvider } from '@/libs/agent-runtime'; - -import Checker from '../components/Checker'; +import Checker from './Checker'; import ProviderConfig from '../components/ProviderConfig'; import { LLMProviderBaseUrlKey, LLMProviderConfigKey } from '../const'; @@ -38,7 +36,7 @@ const OllamaProvider = memo(() => { name: [LLMProviderConfigKey, providerKey, 'customModelName'], }, { - children: , + children: , desc: t('llm.Ollama.checker.desc'), label: t('llm.checker.title'), minWidth: undefined, diff --git a/src/app/settings/llm/components/Checker.tsx b/src/app/settings/llm/components/Checker.tsx index bea64975934b7..02a7bfe0625b4 100644 --- a/src/app/settings/llm/components/Checker.tsx +++ b/src/app/settings/llm/components/Checker.tsx @@ -16,6 +16,28 @@ interface ConnectionCheckerProps { provider: string; } +const Error = memo<{ error: ChatMessageError }>(({ error }) => { + const { t } = useTranslation('error'); + + return ( + + + + {JSON.stringify(error.body || error, null, 2)} + + + } + message={t(`response.${error.type}` as any)} + showIcon + type={'error'} + /> + + ); +}); + const Checker = memo(({ model, provider }) => { const { t } = useTranslation('setting'); @@ -75,23 +97,7 @@ const Checker = memo(({ model, provider }) => { {t('llm.checker.button')} - {error && ( - - - - {JSON.stringify(error.body || error, null, 2)} - - - } - message={t(`response.${error.type}` as any, { ns: 'error' })} - showIcon - type={'error'} - /> - - )} + {error && } ); }); diff --git a/src/components/ModelIcon/index.tsx b/src/components/ModelIcon/index.tsx index cd281f612d277..d23f49fededde 100644 --- a/src/components/ModelIcon/index.tsx +++ b/src/components/ModelIcon/index.tsx @@ -5,6 +5,7 @@ import { Claude, Gemini, Gemma, + LLaVA, Meta, Minimax, Mistral, @@ -29,6 +30,7 @@ const ModelIcon = memo(({ model, size = 12 }) => { if (model.includes('claude')) return ; if (model.includes('titan')) return ; if (model.includes('llama')) return ; + if (model.includes('llava')) return ; if (model.includes('gemini')) return ; if (model.includes('gemma')) return ; if (model.includes('qwen')) return ; diff --git a/src/components/ModelTag/ModelIcon.tsx b/src/components/ModelTag/ModelIcon.tsx index 66668c3382852..307693c2964d2 100644 --- a/src/components/ModelTag/ModelIcon.tsx +++ b/src/components/ModelTag/ModelIcon.tsx @@ -5,6 +5,7 @@ import { Claude, Gemini, Gemma, + LLaVA, Meta, Minimax, Mistral, @@ -28,6 +29,7 @@ const ModelIcon = memo(({ model, size = 12 }) => { if (model.includes('claude')) return ; if (model.includes('titan')) return ; if (model.includes('llama')) return ; + if (model.includes('llava')) return ; if (model.includes('gemini')) return ; if (model.includes('gemma')) return ; if (model.includes('moonshot')) return ; diff --git a/src/config/modelProviders/ollama.ts b/src/config/modelProviders/ollama.ts index 094c345050028..fb037575d30b1 100644 --- a/src/config/modelProviders/ollama.ts +++ b/src/config/modelProviders/ollama.ts @@ -92,6 +92,20 @@ const Ollama: ModelProviderCard = { tokens: 4800, vision: false, }, + { + displayName: 'Mixtral 8x7B', + functionCall: false, + id: 'mixtral', + tokens: 32_000, + vision: false, + }, + { + displayName: 'Qwen Chat 4B', + functionCall: false, + id: 'qwen', + tokens: 32_768, + vision: false, + }, { displayName: 'Qwen Chat 7B', functionCall: false, diff --git a/src/config/server/provider.ts b/src/config/server/provider.ts index 55a702938b1e2..785125ed59f89 100644 --- a/src/config/server/provider.ts +++ b/src/config/server/provider.ts @@ -47,6 +47,7 @@ declare global { // Ollama Provider; OLLAMA_PROXY_URL?: string; + OLLAMA_CUSTOM_MODELS?: string; } } } @@ -116,5 +117,6 @@ export const getProviderConfig = () => { ENABLE_OLLAMA: !!process.env.OLLAMA_PROXY_URL, OLLAMA_PROXY_URL: process.env.OLLAMA_PROXY_URL || '', + OLLAMA_CUSTOM_MODELS: process.env.OLLAMA_CUSTOM_MODELS, }; }; diff --git a/src/features/Conversation/Error/InvalidOllamaModel/index.tsx b/src/features/Conversation/Error/InvalidOllamaModel/index.tsx new file mode 100644 index 0000000000000..014eddb515a9b --- /dev/null +++ b/src/features/Conversation/Error/InvalidOllamaModel/index.tsx @@ -0,0 +1,138 @@ +import { Ollama } from '@lobehub/icons'; +import { Button, Input, Progress } from 'antd'; +import { useTheme } from 'antd-style'; +import { memo, useMemo, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Center, Flexbox } from 'react-layout-kit'; +import useSWR from 'swr'; + +import { ollamaService } from '@/services/ollama'; +import { useChatStore } from '@/store/chat'; + +import { ErrorActionContainer, FormAction } from '../style'; +import { useDownloadMonitor } from './useDownloadMonitor'; + +interface OllamaModelFormProps { + id: string; + model: string; +} + +const OllamaModelForm = memo(({ id, model }) => { + const { t } = useTranslation('error'); + const { t: settingT } = useTranslation('setting'); + + const [modelToPull, setModelToPull] = useState(model); + const [completed, setCompleted] = useState(0); + const [total, setTotal] = useState(0); + const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed); + const percent = useMemo(() => { + return total ? Number(((completed / total) * 100).toFixed(0)) : 0; + }, [completed, total]); + + const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [ + s.delAndRegenerateMessage, + s.deleteMessage, + ]); + const theme = useTheme(); + + const { mutate, isLoading: isDownloading } = useSWR( + [id, modelToPull], + async ([, model]) => { + const generator = await ollamaService.pullModel(model); + for await (const progress of generator) { + if (progress.completed) { + setCompleted(progress.completed); + setTotal(progress.total); + } + } + return null; + }, + { + onSuccess: () => { + delAndRegenerateMessage(id); + }, + revalidateOnFocus: false, + revalidateOnMount: false, + }, + ); + + return ( +
+ } + description={ + isDownloading ? settingT('ollama.download.desc') : t('unlock.model.Ollama.description') + } + title={ + isDownloading + ? settingT('ollama.download.title', { model: modelToPull }) + : t('unlock.model.Ollama.title') + } + > + {!isDownloading && ( + { + setModelToPull(e.target.value); + }} + value={modelToPull} + /> + )} + + {isDownloading && ( + + + + + {settingT('ollama.download.remainingTime')}: {remainingTime} + + + {settingT('ollama.download.speed')}: {downloadSpeed} + + + + )} + + + + +
+ ); +}); + +interface InvalidOllamaModelProps { + id: string; + model: string; +} + +const InvalidOllamaModel = memo(({ id, model }) => ( + + + +)); + +export default InvalidOllamaModel; diff --git a/src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts b/src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts new file mode 100644 index 0000000000000..8f9b55e71dbf2 --- /dev/null +++ b/src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts @@ -0,0 +1,48 @@ +import { useEffect, useMemo, useState } from 'react'; + +const formatSpeed = (speed: number): string => { + const kbPerSecond = speed / 1024; + if (kbPerSecond < 1024) { + return `${kbPerSecond.toFixed(1)} KB/s`; + } else { + const mbPerSecond = kbPerSecond / 1024; + return `${mbPerSecond.toFixed(1)} MB/s`; + } +}; + +const formatTime = (timeInSeconds: number): string => { + if (timeInSeconds < 60) { + return `${timeInSeconds.toFixed(1)} s`; + } else if (timeInSeconds < 3600) { + return `${(timeInSeconds / 60).toFixed(1)} min`; + } else { + return `${(timeInSeconds / 3600).toFixed(2)} h`; + } +}; + +export const useDownloadMonitor = (totalSize: number, completedSize: number) => { + const [startTime, setStartTime] = useState(Date.now()); + const [downloadSpeed, setDownloadSpeed] = useState('0 KB/s'); + const [remainingTime, setRemainingTime] = useState('-'); + + const isReady = useMemo(() => completedSize > 0, [completedSize]); + + useEffect(() => { + const currentTime = Date.now(); + // mark as start download + if (isReady) { + const elapsedTime = (currentTime - startTime) / 1000; // in seconds + const speed = completedSize / elapsedTime; // in bytes per second + + const remainingSize = totalSize - completedSize; + const time = remainingSize / speed; // in seconds + + setDownloadSpeed(formatSpeed(speed)); + setRemainingTime(formatTime(time)); + } else { + setStartTime(currentTime); + } + }, [isReady, completedSize]); + + return { downloadSpeed, remainingTime }; +}; diff --git a/src/features/Conversation/Error/OllamaBizError.tsx b/src/features/Conversation/Error/OllamaBizError.tsx new file mode 100644 index 0000000000000..ee1f13d76050a --- /dev/null +++ b/src/features/Conversation/Error/OllamaBizError.tsx @@ -0,0 +1,34 @@ +import { memo } from 'react'; + +import { ChatMessage } from '@/types/message'; + +import ErrorJsonViewer from './ErrorJsonViewer'; +import InvalidModel from './InvalidOllamaModel'; + +interface OllamaError { + code: string | null; + message: string; + param?: any; + type: string; +} + +interface OllamaErrorResponse { + error: OllamaError; +} + +const UNRESOLVED_MODEL_REGEXP = /model '([\w+,-_]+)' not found/; + +const OllamaBizError = memo(({ error, id }) => { + const errorBody: OllamaErrorResponse = (error as any)?.body; + + const errorMessage = errorBody.error?.message; + + const unresolvedModel = errorMessage?.match(UNRESOLVED_MODEL_REGEXP)?.[1]; + if (unresolvedModel) { + return ; + } + + return ; +}); + +export default OllamaBizError; diff --git a/src/features/Conversation/Error/index.tsx b/src/features/Conversation/Error/index.tsx index 8b83bcffe5bcf..246c68b7e5fef 100644 --- a/src/features/Conversation/Error/index.tsx +++ b/src/features/Conversation/Error/index.tsx @@ -9,6 +9,7 @@ import { ChatMessage, ChatMessageError } from '@/types/message'; import ErrorJsonViewer from './ErrorJsonViewer'; import InvalidAPIKey from './InvalidAPIKey'; import InvalidAccessCode from './InvalidAccessCode'; +import OllamaBizError from './OllamaBizError'; import OpenAiBizError from './OpenAiBizError'; import PluginSettings from './PluginSettings'; @@ -58,6 +59,10 @@ const ErrorMessageExtra = memo<{ data: ChatMessage }>(({ data }) => { return ; } + case AgentRuntimeErrorType.OllamaBizError: { + return ; + } + case ChatErrorType.InvalidAccessCode: { return ; } diff --git a/src/features/Conversation/Error/style.tsx b/src/features/Conversation/Error/style.tsx index 7a9dbeaa50feb..ee7d6d6d0355a 100644 --- a/src/features/Conversation/Error/style.tsx +++ b/src/features/Conversation/Error/style.tsx @@ -36,14 +36,14 @@ export const FormAction = memo<{ const { styles, theme } = useStyles(); return ( -
+
- {title} + {title} {description} {children}
diff --git a/src/locales/default/error.ts b/src/locales/default/error.ts index 5e08ac6b36a54..ba00343c34bb3 100644 --- a/src/locales/default/error.ts +++ b/src/locales/default/error.ts @@ -84,6 +84,7 @@ export default { InvalidOllamaArgs: 'Ollama 配置不正确,请检查 Ollama 配置后重试', OllamaBizError: '请求 Ollama 服务出错,请根据以下信息排查或重试', + OllamaServiceUnavailable: '未检测到 Ollama 服务,请检查是否正常启动', AgentRuntimeError: 'Lobe 语言模型运行时执行出错,请根据以下信息排查或重试', /* eslint-enable */ @@ -134,6 +135,13 @@ export default { }, closeMessage: '关闭提示', confirm: '确认并重试', + model: { + Ollama: { + confirm: '下载', + description: '输入你的 Ollama 模型标签,完成即可继续会话', + title: '下载指定的 Ollama 模型', + }, + }, oauth: { description: '管理员已开启统一登录认证,点击下方按钮登录,即可解锁应用', success: '登录成功', diff --git a/src/locales/default/setting.ts b/src/locales/default/setting.ts index ac9955eb75969..ade6cd400db80 100644 --- a/src/locales/default/setting.ts +++ b/src/locales/default/setting.ts @@ -195,6 +195,14 @@ export default { }, waitingForMore: '更多模型正在 <1>计划接入 中,敬请期待 ✨', }, + ollama: { + download: { + desc: 'Ollama 正在下载该模型,请尽量不要关闭本页面。重新下载时将会中断处继续', + remainingTime: '剩余时间', + speed: '下载速度', + title: '正在下载模型 {{model}} ', + }, + }, plugin: { addTooltip: '自定义插件', clearDeprecated: '移除无效插件', @@ -410,6 +418,7 @@ export default { placeholder: '请输入助手的标识符,需要是唯一的,比如 web-development', tooltips: '分享到助手市场', }, + tab: { about: '关于', agent: '默认助手', @@ -417,7 +426,6 @@ export default { llm: '语言模型', tts: '语音服务', }, - tools: { builtins: { groupName: '内置插件', diff --git a/src/services/__tests__/ollama.test.ts b/src/services/__tests__/ollama.test.ts new file mode 100644 index 0000000000000..0e46c046c7df8 --- /dev/null +++ b/src/services/__tests__/ollama.test.ts @@ -0,0 +1,26 @@ +import { Mock, describe, expect, it, vi } from 'vitest'; + +import { ollamaService } from '../ollama'; + +vi.stubGlobal('fetch', vi.fn()); + +describe('OllamaService', () => { + describe('list models', async () => { + it('should make a GET request with the correct payload', async () => { + (fetch as Mock).mockResolvedValueOnce(new Response(JSON.stringify({ models: [] }))); + + expect(await ollamaService.getModels()).toEqual({ models: [] }); + + expect(global.fetch).toHaveBeenCalled(); + }); + + it('should make a GET request with the error', async () => { + const mockResponse = new Response(null, { status: 503 }); + (fetch as Mock).mockResolvedValueOnce(mockResponse); + + await expect(ollamaService.getModels()).rejects.toThrow(); + + expect(global.fetch).toHaveBeenCalled(); + }); + }); +}); diff --git a/src/services/ollama.ts b/src/services/ollama.ts new file mode 100644 index 0000000000000..fa33666bb556a --- /dev/null +++ b/src/services/ollama.ts @@ -0,0 +1,64 @@ +import { ListResponse, Ollama as OllamaBrowser, ProgressResponse } from 'ollama/browser'; + +import { createErrorResponse } from '@/app/api/errorResponse'; +import { ModelProvider } from '@/libs/agent-runtime'; +import { useGlobalStore } from '@/store/global'; +import { modelProviderSelectors } from '@/store/global/selectors'; +import { ChatErrorType } from '@/types/fetch'; +import { getMessageError } from '@/utils/fetch'; + +const DEFAULT_BASE_URL = 'http://127.0.0.1:11434/v1'; + +class OllamaService { + getHost = (): string => { + const endpoint = modelProviderSelectors.ollamaProxyUrl(useGlobalStore.getState()); + const url = new URL(endpoint || DEFAULT_BASE_URL); + return url.host; + }; + + getOllamaClient = () => { + return new OllamaBrowser({ host: this.getHost() }); + }; + + pullModel = async (model: string): Promise> => { + let response: Response | AsyncGenerator; + try { + response = await this.getOllamaClient().pull({ insecure: true, model, stream: true }); + return response; + } catch { + response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, { + host: this.getHost(), + message: 'please check whether your ollama service is available', + provider: ModelProvider.Ollama, + }); + } + + if (!response.ok) { + const messageError = await getMessageError(response); + throw messageError; + } + return response.json(); + }; + + getModels = async (): Promise => { + let response: Response | ListResponse; + try { + const response = await this.getOllamaClient().list(); + return response; + } catch { + response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, { + host: this.getHost(), + message: 'please check whether your ollama service is available', + provider: ModelProvider.Ollama, + }); + } + + if (!response.ok) { + const messageError = await getMessageError(response); + throw messageError; + } + return response.json(); + }; +} + +export const ollamaService = new OllamaService(); diff --git a/src/store/global/slices/settings/selectors/modelProvider.ts b/src/store/global/slices/settings/selectors/modelProvider.ts index 86874532baae0..1980a8c0be436 100644 --- a/src/store/global/slices/settings/selectors/modelProvider.ts +++ b/src/store/global/slices/settings/selectors/modelProvider.ts @@ -48,9 +48,6 @@ const mistralAPIKey = (s: GlobalStore) => modelProvider(s).mistral.apiKey; const enableMoonshot = (s: GlobalStore) => modelProvider(s).moonshot.enabled; const moonshotAPIKey = (s: GlobalStore) => modelProvider(s).moonshot.apiKey; -const enableOllamaConfigInSettings = (s: GlobalStore) => - s.serverConfig.languageModel?.ollama?.enabled || false; - const enableOllama = (s: GlobalStore) => modelProvider(s).ollama.enabled; const ollamaProxyUrl = (s: GlobalStore) => modelProvider(s).ollama.endpoint; @@ -118,27 +115,32 @@ const processChatModels = ( }; const modelSelectList = (s: GlobalStore): ModelProviderCard[] => { - const string = [ + const openaiModelString = [ s.serverConfig.customModelName, currentSettings(s).languageModel.openAI.customModelName, ] .filter(Boolean) .join(','); - const modelConfig = parseModelString(string); + const openaiModelConfig = parseModelString(openaiModelString); - const chatModels = processChatModels(modelConfig); + const openaiChatModels = processChatModels(openaiModelConfig); - const ollamaModelConfig = parseModelString( + const ollamaModelString = [ + s.serverConfig.languageModel?.ollama?.customModelName, currentSettings(s).languageModel.ollama.customModelName, - ); + ] + .filter(Boolean) + .join(','); + + const ollamaModelConfig = parseModelString(ollamaModelString); const ollamaChatModels = processChatModels(ollamaModelConfig, OllamaProvider.chatModels); return [ { ...OpenAIProvider, - chatModels, + chatModels: openaiChatModels, }, // { ...azureModelList(s), enabled: enableAzure(s) }, { ...ZhiPuProvider, enabled: enableZhipu(s) }, @@ -216,7 +218,6 @@ export const modelProviderSelectors = { moonshotAPIKey, // Ollama - enableOllamaConfigInSettings, enableOllama, ollamaProxyUrl, diff --git a/src/store/middleware/createHyperStorage/index.ts b/src/store/middleware/createHyperStorage/index.ts index ded7bdb7bc8bb..b43dbcbb9855d 100644 --- a/src/store/middleware/createHyperStorage/index.ts +++ b/src/store/middleware/createHyperStorage/index.ts @@ -1,5 +1,4 @@ -import { PersistStorage } from 'zustand/middleware'; -import { StorageValue } from 'zustand/middleware/persist'; +import { PersistStorage, StorageValue } from 'zustand/middleware'; import { createIndexedDB } from './indexedDB'; import { createKeyMapper } from './keyMapper'; diff --git a/src/store/middleware/createHyperStorage/indexedDB.ts b/src/store/middleware/createHyperStorage/indexedDB.ts index 0ce36c489fbbf..9bbdc67bec41f 100644 --- a/src/store/middleware/createHyperStorage/indexedDB.ts +++ b/src/store/middleware/createHyperStorage/indexedDB.ts @@ -1,5 +1,5 @@ import { createStore, delMany, getMany, setMany } from 'idb-keyval'; -import { StorageValue } from 'zustand/middleware/persist'; +import { StorageValue } from 'zustand/middleware'; export const createIndexedDB = (dbName: string = 'indexedDB') => ({ getItem: async (name: string): Promise | undefined> => { diff --git a/src/store/middleware/createHyperStorage/localStorage.ts b/src/store/middleware/createHyperStorage/localStorage.ts index e230af3d49c6b..9b466b6a2a385 100644 --- a/src/store/middleware/createHyperStorage/localStorage.ts +++ b/src/store/middleware/createHyperStorage/localStorage.ts @@ -1,4 +1,4 @@ -import { StorageValue } from 'zustand/middleware/persist'; +import { StorageValue } from 'zustand/middleware'; export const createLocalStorage = () => ({ getItem: (name: string): StorageValue | undefined => { diff --git a/src/store/middleware/createHyperStorage/urlStorage.ts b/src/store/middleware/createHyperStorage/urlStorage.ts index 950eb8b0a172e..570ad72dcaf6e 100644 --- a/src/store/middleware/createHyperStorage/urlStorage.ts +++ b/src/store/middleware/createHyperStorage/urlStorage.ts @@ -1,5 +1,5 @@ import { isEmpty } from 'lodash-es'; -import { StorageValue } from 'zustand/middleware/persist'; +import { StorageValue } from 'zustand/middleware'; interface UrlSearchHelper { getUrlSearch: () => string; diff --git a/src/types/fetch.ts b/src/types/fetch.ts index eabd380f50651..059be2248e672 100644 --- a/src/types/fetch.ts +++ b/src/types/fetch.ts @@ -7,6 +7,7 @@ export const ChatErrorType = { InvalidAccessCode: 'InvalidAccessCode', // 密码无效 OpenAIBizError: 'OpenAIBizError', // OpenAI 返回的业务错误 NoOpenAIAPIKey: 'NoOpenAIAPIKey', + OllamaServiceUnavailable: 'OllamaServiceUnavailable', // 未启动/检测到 Ollama 服务 // ******* 客户端错误 ******* // BadRequest: 400, diff --git a/tsconfig.json b/tsconfig.json index 4d0ff329a02a5..e53c40e6d49d3 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,7 +10,7 @@ "noEmit": true, "esModuleInterop": true, "module": "esnext", - "moduleResolution": "node", + "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, "jsx": "preserve",