add support for Anthropic (#2148)

### What problem does this PR solve?

#1853  add support for Anthropic

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
This commit is contained in:
黄腾 2024-08-29 13:30:06 +08:00 committed by GitHub
parent 0abc01311b
commit 06abef66ef
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 124 additions and 5 deletions

View File

@ -3240,6 +3240,56 @@
"tags": "SPEECH2TEXT",
"status": "1",
"llm": []
},
{
"name": "Anthropic",
"logo": "",
"tags": "LLM",
"status": "1",
"llm": [
{
"llm_name": "claude-3-5-sonnet-20240620",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "claude-3-opus-20240229",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "claude-3-sonnet-20240229",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "claude-3-haiku-20240307",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "claude-2.1",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "claude-2.0",
"tags": "LLM,CHAT,100k",
"max_tokens": 102400,
"model_type": "chat"
},
{
"llm_name": "claude-instant-1.2",
"tags": "LLM,CHAT,100k",
"max_tokens": 102400,
"model_type": "chat"
}
]
}
]
}

View File

@ -104,7 +104,8 @@ ChatModel = {
"Replicate": ReplicateChat,
"Tencent Hunyuan": HunyuanChat,
"XunFei Spark": SparkChat,
"BaiduYiyan": BaiduYiyanChat
"BaiduYiyan": BaiduYiyanChat,
"Anthropic": AnthropicChat
}

View File

@ -1193,3 +1193,67 @@ class BaiduYiyanChat(Base):
return ans + "\n**ERROR**: " + str(e), 0
yield total_tokens
class AnthropicChat(Base):
def __init__(self, key, model_name, base_url=None):
import anthropic
self.client = anthropic.Anthropic(api_key=key)
self.model_name = model_name
self.system = ""
def chat(self, system, history, gen_conf):
if system:
self.system = system
if "max_tokens" not in gen_conf:
gen_conf["max_tokens"] = 4096
try:
response = self.client.messages.create(
model=self.model_name,
messages=history,
system=self.system,
stream=False,
**gen_conf,
).json()
ans = response["content"][0]["text"]
if response["stop_reason"] == "max_tokens":
ans += (
"...\nFor the content length reason, it stopped, continue?"
if is_english([ans])
else "······\n由于长度的原因,回答被截断了,要继续吗?"
)
return (
ans,
response["usage"]["input_tokens"] + response["usage"]["output_tokens"],
)
except Exception as e:
return ans + "\n**ERROR**: " + str(e), 0
def chat_streamly(self, system, history, gen_conf):
if system:
self.system = system
if "max_tokens" not in gen_conf:
gen_conf["max_tokens"] = 4096
ans = ""
total_tokens = 0
try:
response = self.client.messages.create(
model=self.model_name,
messages=history,
system=self.system,
stream=True,
**gen_conf,
)
for res in response.iter_lines():
res = res.decode("utf-8")
if "content_block_delta" in res and "data" in res:
text = json.loads(res[6:])["delta"]["text"]
ans += text
total_tokens += num_tokens_from_string(text)
except Exception as e:
yield ans + "\n**ERROR**: " + str(e)
yield total_tokens

View File

@ -1,3 +1,4 @@
anthropic===0.34.1
arxiv==2.1.3
Aspose.Slides==24.2.0
BCEmbedding==0.1.3

View File

@ -2,6 +2,7 @@ accelerate==0.27.2
aiohttp==3.9.4
aiosignal==1.3.1
annotated-types==0.6.0
anthropic===0.34.1
anyio==4.3.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0

View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1724743631411" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4224" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><path d="M710 196.2H572.9l250 631.6H960L710 196.2z m-396 0L64 827.8h139.8l51.1-132.6h261.5l51.1 132.6h139.8l-250-631.6H314z m-13.9 381.7l85.5-222 85.5 222h-171z" p-id="4225"></path></svg>

After

Width:  |  Height:  |  Size: 511 B

View File

@ -37,6 +37,7 @@ export const IconMap = {
BaiduYiyan: 'yiyan',
'Fish Audio': 'fish-audio',
'Tencent Cloud': 'tencent-cloud',
Anthropic: 'anthropic',
};
export const BedrockRegionList = [