From cf038e099f9ad0a0f4177048874fbd230e2dfb99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=BB=84=E8=85=BE?= <101850389+hangters@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:42:00 +0800 Subject: [PATCH] update groq llm (#2103) ### What problem does this PR solve? #2076 update groq llm. ### Type of change - [x] New Feature (non-breaking change which adds functionality) Co-authored-by: Zhedong Cen --- conf/llm_factories.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/conf/llm_factories.json b/conf/llm_factories.json index 289dcffa1..0082a3f75 100644 --- a/conf/llm_factories.json +++ b/conf/llm_factories.json @@ -906,6 +906,18 @@ "max_tokens": 8192, "model_type": "chat" }, + { + "llm_name": "llama-3.1-70b-versatile", + "tags": "LLM,CHAT,128k", + "max_tokens": 131072, + "model_type": "chat" + }, + { + "llm_name": "llama-3.1-8b-instant", + "tags": "LLM,CHAT,128k", + "max_tokens": 131072, + "model_type": "chat" + }, { "llm_name": "mixtral-8x7b-32768", "tags": "LLM,CHAT,5k",