From 939f72d16cc7be83ecb321c77aa89e4e4cf23761 Mon Sep 17 00:00:00 2001 From: Ariya Hidayat Date: Fri, 20 Sep 2024 22:29:48 -0700 Subject: [PATCH] Support Mistral Le Plateforme --- .github/workflows/test-mistral.yml | 24 ++++++++++++++++++++++++ README.md | 10 +++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/test-mistral.yml diff --git a/.github/workflows/test-mistral.yml b/.github/workflows/test-mistral.yml new file mode 100644 index 0000000..3707f8c --- /dev/null +++ b/.github/workflows/test-mistral.yml @@ -0,0 +1,24 @@ +name: Test on Mistral + +on: + push: + branches: [main] + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-22.04 + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + + - run: node --version + + - run: echo 'Which planet in our solar system is the largest?' | ./ask-llm.js | tee output.txt | grep -i jupiter + timeout-minutes: 3 + env: + LLM_API_BASE_URL: 'https://api.mistral.ai/v1' + LLM_API_KEY: ${{ secrets.MISTRAL_API_KEY }} + LLM_CHAT_MODEL: 'open-mistral-7b' + + - run: cat output.txt diff --git a/README.md b/README.md index 0e4e94e..e1f2746 100644 --- a/README.md +++ b/README.md @@ -69,13 +69,14 @@ export LLM_API_BASE_URL=http://localhost:3928/v1 [![Test on Groq](https://github.com/ariya/ask-llm/actions/workflows/test-groq.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-groq.yml) [![Test on Hyperbolic](https://github.com/ariya/ask-llm/actions/workflows/test-hyperbolic.yml/badge.svg)](https://github.com/ariya//ask-llm/actions/workflows/test-hyperbolic.yml) [![Test on Lepton](https://github.com/ariya/ask-llm/actions/workflows/test-lepton.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-lepton.yml) +[![Test on Mistral](https://github.com/ariya/ask-llm/actions/workflows/test-mistral.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-mistral.yml) [![Test on Novita](https://github.com/ariya/ask-llm/actions/workflows/test-novita.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-novita.yml) [![Test on Octo](https://github.com/ariya/ask-llm/actions/workflows/test-octo.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-octo.yml) [![Test on OpenAI](https://github.com/ariya/ask-llm/actions/workflows/test-openai.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-openai.yml) [![Test on OpenRouter](https://github.com/ariya/ask-llm/actions/workflows/test-openrouter.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-openrouter.yml) [![Test on Together](https://github.com/ariya/ask-llm/actions/workflows/test-together.yml/badge.svg)](https://github.com/ariya/ask-llm/actions/workflows/test-together.yml) -Supported LLM services include [AI21](https://studio.ai21.com), [Deep Infra](https://deepinfra.com), [DeepSeek](https://platform.deepseek.com/), [Fireworks](https://fireworks.ai), [Groq](https://groq.com), [Hyperbolic](https://www.hyperbolic.xyz), [Lepton](https://lepton.ai), [Novita](https://novita.ai), [Octo](https://octo.ai), [OpenAI](https://platform.openai.com), [OpenRouter](https://openrouter.ai), and [Together](https://www.together.ai). +Supported LLM services include [AI21](https://studio.ai21.com), [Deep Infra](https://deepinfra.com), [DeepSeek](https://platform.deepseek.com/), [Fireworks](https://fireworks.ai), [Groq](https://groq.com), [Hyperbolic](https://www.hyperbolic.xyz), [Lepton](https://lepton.ai), [Mistral](https://console.mistral.ai), [Novita](https://novita.ai), [Octo](https://octo.ai), [OpenAI](https://platform.openai.com), [OpenRouter](https://openrouter.ai), and [Together](https://www.together.ai). For configuration specifics, refer to the relevant section. The examples use Llama-3.1 8B (or GPT-4o Mini for OpenAI), but any LLM with at least 7B parameters should work just as well, such as Mistral 7B, Qwen-2 7B, or Gemma-2 9B. @@ -128,6 +129,13 @@ export LLM_API_KEY="yourownapikey" export LLM_CHAT_MODEL="llama3-1-8b" ``` +* [Mistral](https://console.mistral.ai) +```bash +export LLM_API_BASE_URL=https://api.mistral.ai/v1 +export LLM_API_KEY="yourownapikey" +export LLM_CHAT_MODEL="open-mistral-7b" +``` + * [Novita](https://novita.ai) ```bash export LLM_API_BASE_URL=https://api.novita.ai/v3/openai