refine TTS (#2500)

### What problem does this PR solve?

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2024-09-19 19:15:16 +08:00 committed by GitHub
parent d545633a6c
commit b5d1d2fec4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 15 additions and 10 deletions

View File

@ -228,7 +228,8 @@ def tts():
def stream_audio():
try:
for chunk in tts_mdl.tts(text):
for txt in re.split(r"[,。/《》?;:!\n\r:;]+", text):
for chunk in tts_mdl.tts(txt):
yield chunk
except Exception as e:
yield ("data:" + json.dumps({"retcode": 500, "retmsg": str(e),

View File

@ -93,24 +93,27 @@ def set_api_key():
if msg:
return get_data_error_result(retmsg=msg)
llm = {
llm_config = {
"api_key": req["api_key"],
"api_base": req.get("base_url", "")
}
for n in ["model_type", "llm_name"]:
if n in req:
llm[n] = req[n]
llm_config[n] = req[n]
if not TenantLLMService.filter_update(
[TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory], llm):
for llm in LLMService.query(fid=factory):
if not TenantLLMService.filter_update(
[TenantLLM.tenant_id == current_user.id,
TenantLLM.llm_factory == factory,
TenantLLM.llm_name == llm.llm_name],
llm_config):
TenantLLMService.save(
tenant_id=current_user.id,
llm_factory=factory,
llm_name=llm.llm_name,
model_type=llm.model_type,
api_key=req["api_key"],
api_base=req.get("base_url", "")
api_key=llm_config["api_key"],
api_base=llm_config["api_base"]
)
return get_json_result(data=True)

View File

@ -161,6 +161,7 @@ class QwenTTS(Base):
class OpenAITTS(Base):
def __init__(self, key, model_name="tts-1", base_url="https://api.openai.com/v1"):
if not base_url: base_url="https://api.openai.com/v1"
self.api_key = key
self.model_name = model_name
self.base_url = base_url
@ -181,6 +182,6 @@ class OpenAITTS(Base):
if response.status_code != 200:
raise Exception(f"**Error**: {response.status_code}, {response.text}")
for chunk in response.iter_content(chunk_size=1024):
for chunk in response.iter_content():
if chunk:
yield chunk