feat:注释大模型数据控制台打印

This commit is contained in:
liailing1026
2026-01-30 15:21:58 +08:00
parent b3e6c7a618
commit aedfd0594c
7 changed files with 28 additions and 28 deletions

View File

@@ -122,7 +122,7 @@ def _call_with_custom_config(messages: list[dict], stream: bool, model_config: d
if full_reply_content is None: if full_reply_content is None:
raise Exception(f"API returned None content for model {api_model}") raise Exception(f"API returned None content for model {api_model}")
print(colored(full_reply_content, "blue", "on_white"), end="") #print(colored(full_reply_content, "blue", "on_white"), end="")
return full_reply_content return full_reply_content
except Exception as e: except Exception as e:
print_colored(f"Custom API error for model {api_model} :{str(e)}","red") print_colored(f"Custom API error for model {api_model} :{str(e)}","red")
@@ -151,9 +151,9 @@ async def _achat_completion_stream_custom(messages:list[dict], temp_async_client
chunk_message = choices[0].delta chunk_message = choices[0].delta
if chunk_message is not None: if chunk_message is not None:
collected_messages.append(chunk_message) collected_messages.append(chunk_message)
if chunk_message.content: # if chunk_message.content:
print(colored(chunk_message.content, "blue", "on_white"), end="") # print(colored(chunk_message.content, "blue", "on_white"), end="")
print() # print()
full_reply_content = "".join( full_reply_content = "".join(
[m.content or "" for m in collected_messages if m is not None] [m.content or "" for m in collected_messages if m is not None]
) )
@@ -209,8 +209,8 @@ async def _achat_completion_stream_groq(messages: list[dict]) -> str:
if full_reply_content is None: if full_reply_content is None:
raise Exception("Groq API returned None content") raise Exception("Groq API returned None content")
print(colored(full_reply_content, "blue", "on_white"), end="") # print(colored(full_reply_content, "blue", "on_white"), end="")
print() # print()
return full_reply_content return full_reply_content
@@ -251,8 +251,8 @@ async def _achat_completion_stream_mixtral(messages: list[dict]) -> str:
if full_reply_content is None: if full_reply_content is None:
raise Exception("Mistral API returned None content") raise Exception("Mistral API returned None content")
print(colored(full_reply_content, "blue", "on_white"), end="") # print(colored(full_reply_content, "blue", "on_white"), end="")
print() # print()
return full_reply_content return full_reply_content
@@ -277,12 +277,12 @@ async def _achat_completion_stream_gpt35(messages: list[dict]) -> str:
chunk_message = choices[0].delta chunk_message = choices[0].delta
if chunk_message is not None: if chunk_message is not None:
collected_messages.append(chunk_message) # save the message collected_messages.append(chunk_message) # save the message
if chunk_message.content: # if chunk_message.content:
print( # print(
colored(chunk_message.content, "blue", "on_white"), # colored(chunk_message.content, "blue", "on_white"),
end="", # end="",
) # )
print() # print()
full_reply_content = "".join( full_reply_content = "".join(
[m.content or "" for m in collected_messages if m is not None] [m.content or "" for m in collected_messages if m is not None]
@@ -324,8 +324,8 @@ def _achat_completion_json(messages: list[dict] ) -> str:
if full_reply_content is None: if full_reply_content is None:
raise Exception("OpenAI API returned None content") raise Exception("OpenAI API returned None content")
print(colored(full_reply_content, "blue", "on_white"), end="") # print(colored(full_reply_content, "blue", "on_white"), end="")
print() # print()
return full_reply_content return full_reply_content
@@ -346,12 +346,12 @@ async def _achat_completion_stream(messages: list[dict]) -> str:
chunk_message = choices[0].delta chunk_message = choices[0].delta
if chunk_message is not None: if chunk_message is not None:
collected_messages.append(chunk_message) # save the message collected_messages.append(chunk_message) # save the message
if chunk_message.content: # if chunk_message.content:
print( # print(
colored(chunk_message.content, "blue", "on_white"), # colored(chunk_message.content, "blue", "on_white"),
end="", # end="",
) # )
print() # print()
full_reply_content = "".join( full_reply_content = "".join(
[m.content or "" for m in collected_messages if m is not None] [m.content or "" for m in collected_messages if m is not None]

View File

@@ -104,7 +104,7 @@ def agentAbilityScoring(Agent_Board, Ability_Requirement_List):
), ),
}, },
] ]
print(messages[1]["content"]) #print(messages[1]["content"])
scoreTable[Ability_Requirement] = read_LLM_Completion(messages) scoreTable[Ability_Requirement] = read_LLM_Completion(messages)
return scoreTable return scoreTable

View File

@@ -100,7 +100,7 @@ def generate_AgentSelection(General_Goal, Current_Task, Agent_Board):
), ),
}, },
] ]
print(messages[1]["content"]) #print(messages[1]["content"])
agentboard_set = {agent["Name"] for agent in Agent_Board} agentboard_set = {agent["Name"] for agent in Agent_Board}

View File

@@ -87,7 +87,7 @@ def branch_PlanOutline(
InitialObject_List=str(InitialObject_List), InitialObject_List=str(InitialObject_List),
General_Goal=General_Goal, General_Goal=General_Goal,
) )
print(prompt) #print(prompt)
branch_List = [] branch_List = []
for _ in range(branch_Number): for _ in range(branch_Number):
messages = [ messages = [

View File

@@ -155,7 +155,7 @@ def branch_TaskProcess(
General_Goal=General_Goal, General_Goal=General_Goal,
Act_Set=ACT_SET, Act_Set=ACT_SET,
) )
print(prompt) #print(prompt)
branch_List = [] branch_List = []
for i in range(branch_Number): for i in range(branch_Number):
messages = [ messages = [

View File

@@ -124,7 +124,7 @@ def generate_TaskProcess(General_Goal, Current_Task_Description):
), ),
}, },
] ]
print(messages[1]["content"]) #print(messages[1]["content"])
# write a callback function, if read_LLM_Completion(messages)["Task_Process_Plan"] dont have the right format, call this function again # write a callback function, if read_LLM_Completion(messages)["Task_Process_Plan"] dont have the right format, call this function again
while True: while True:

View File

@@ -107,7 +107,7 @@ class BaseAction():
Action_Description = self.info["Description"], Action_Description = self.info["Description"],
Action_Custom_Note = self.Action_Custom_Note Action_Custom_Note = self.Action_Custom_Note
) )
print_colored(text = prompt, text_color="red") #print_colored(text = prompt, text_color="red")
messages = [{"role":"system", "content": prompt}] messages = [{"role":"system", "content": prompt}]
ActionResult = LLM_Completion(messages,True,False,model_config=model_config) ActionResult = LLM_Completion(messages,True,False,model_config=model_config)
ActionInfo_with_Result = copy.deepcopy(self.info) ActionInfo_with_Result = copy.deepcopy(self.info)