From e7a4cfac4da398ae0789d1def930bc53d5175e45 Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:33:49 +0800 Subject: [PATCH] fix: name of llama-3.3-70b-specdec (#11596) --- .../groq/llm/llama-3.3-70b-specdec\t.yaml" | 25 ------------------- 1 file changed, 25 deletions(-) delete mode 100644 "api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec\t.yaml" diff --git "a/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec\t.yaml" "b/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec\t.yaml" deleted file mode 100644 index ac6d529c34..0000000000 --- "a/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec\t.yaml" +++ /dev/null @@ -1,25 +0,0 @@ -model: llama-3.3-70b-specdec -label: - zh_Hans: Llama 3.3 70b Speculative Decoding (PREVIEW) - en_US: Llama 3.3 70b Speculative Decoding (PREVIEW) -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - use_template: temperature - - name: top_p - use_template: top_p - - name: max_tokens - use_template: max_tokens - default: 512 - min: 1 - max: 8192 -pricing: - input: '0.05' - output: '0.1' - unit: '0.000001' - currency: USD \ No newline at end of file