Add Llama3.3 (#4174)

### What problem does this PR solve?

#4168

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Kevin Hu 2024-12-23 11:18:01 +08:00 committed by GitHub
parent cac7851fc5
commit 2cbe064080
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 15 additions and 3 deletions

View File

@ -1014,6 +1014,18 @@
"max_tokens": 131072,
"model_type": "chat"
},
{
"llm_name": "llama-3.3-70b-versatile",
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "llama-3.3-70b-specdec",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "mixtral-8x7b-32768",
"tags": "LLM,CHAT,5k",

View File

@ -26,6 +26,7 @@ from api.utils.file_utils import get_project_base_directory
from .operators import * # noqa: F403
from .operators import preprocess
class Recognizer(object):
def __init__(self, label_list, task_name, model_dir=None):
"""
@ -144,11 +145,11 @@ class Recognizer(object):
return 0
x0_ = max(b["x0"], x0)
x1_ = min(b["x1"], x1)
assert x0_ <= x1_, "Fuckedup! T:{},B:{},X0:{},X1:{} ==> {}".format(
assert x0_ <= x1_, "Bbox mismatch! T:{},B:{},X0:{},X1:{} ==> {}".format(
tp, btm, x0, x1, b)
tp_ = max(b["top"], tp)
btm_ = min(b["bottom"], btm)
assert tp_ <= btm_, "Fuckedup! T:{},B:{},X0:{},X1:{} => {}".format(
assert tp_ <= btm_, "Bbox mismatch! T:{},B:{},X0:{},X1:{} => {}".format(
tp, btm, x0, x1, b)
ov = (btm_ - tp_) * (x1_ - x0_) if x1 - \
x0 != 0 and btm - tp != 0 else 0

View File

@ -1,6 +1,5 @@
import logging
import boto3
import os
from botocore.exceptions import ClientError
from botocore.client import Config
import time