mirror of
https://git.mirrors.martin98.com/https://github.com/bytedance/deer-flow
synced 2025-08-16 13:05:57 +08:00
feat: use doubao model by default
This commit is contained in:
parent
9269699ba7
commit
afb8571cbd
@ -1,15 +1,15 @@
|
||||
# LLM Config
|
||||
REASONING_MODEL:
|
||||
model: "gemini-2.0-flash"
|
||||
model: "doubao-1-5-thinking-pro-250415"
|
||||
api_key: xxxx
|
||||
base_url: https://generativelanguage.googleapis.com/v1beta/openai/
|
||||
base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
|
||||
BASIC_MODEL:
|
||||
model: "gemini-2.0-flash"
|
||||
model: "doubao-1-5-pro-32k-250115"
|
||||
api_key: xxxx
|
||||
base_url: https://generativelanguage.googleapis.com/v1beta/openai/
|
||||
base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
|
||||
VISION_MODEL:
|
||||
model: "gemini-2.0-flash"
|
||||
model: "doubao-1.5-vision-pro-250328"
|
||||
api_key: xxxx
|
||||
base_url: https://generativelanguage.googleapis.com/v1beta/openai/
|
||||
base_url: https://ark.cn-beijing.volces.com/api/v3
|
||||
|
@ -42,7 +42,8 @@ def planner_node(
|
||||
messages = apply_prompt_template("planner", state, configurable)
|
||||
if AGENT_LLM_MAP["planner"] == "basic":
|
||||
llm = get_llm_by_type(AGENT_LLM_MAP["planner"]).with_structured_output(
|
||||
Plan, method="json_mode"
|
||||
Plan,
|
||||
method="json_mode",
|
||||
)
|
||||
else:
|
||||
llm = get_llm_by_type(AGENT_LLM_MAP["planner"])
|
||||
@ -147,9 +148,17 @@ def coordinator_node(state: State) -> Command[Literal["planner", "__end__"]]:
|
||||
def reporter_node(state: State):
|
||||
"""Reporter node that write a final report."""
|
||||
logger.info("Reporter write final report")
|
||||
messages = apply_prompt_template("reporter", state)
|
||||
current_plan = state.get("current_plan")
|
||||
input_ = {
|
||||
"messages": [
|
||||
HumanMessage(
|
||||
f"# Research Requirements\n\n## Task\n\n{current_plan.title}\n\n## Description\n\n{current_plan.thought}"
|
||||
)
|
||||
],
|
||||
"locale": state.get("locale", "en-US"),
|
||||
}
|
||||
invoke_messages = apply_prompt_template("reporter", input_)
|
||||
observations = state.get("observations", [])
|
||||
invoke_messages = messages[:2]
|
||||
|
||||
# Add a reminder about the new report format, citation style, and table usage
|
||||
invoke_messages.append(
|
||||
|
Loading…
x
Reference in New Issue
Block a user