From 446a33c3a4b3305e4fdf877f0202050fe1c60564 Mon Sep 17 00:00:00 2001 From: jknapp Date: Wed, 26 Mar 2025 02:34:14 +0000 Subject: [PATCH] Update script to handle streaming properly. If a response was set to streaming, the sub agent will return unusable data. --- multi-agent-collaboration.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/multi-agent-collaboration.py b/multi-agent-collaboration.py index ace12f7..7ea53b4 100644 --- a/multi-agent-collaboration.py +++ b/multi-agent-collaboration.py @@ -1,7 +1,7 @@ """ title: Multi Agent Collaboration System for Open WebUI Description: Allows for Multiple Models to act as Agents in collaboration -version: 0.5.6 +version: 0.7.6 """ from pydantic import BaseModel, Field @@ -72,9 +72,16 @@ class Pipe: agents = __user__["valves"].agent_list operator_model = __user__["valves"].operator_model number_of_agents = len(agents) + if "### Task:" in body["messages"][0]["content"]: + body["model"] = operator_model + print("Internal Request") + return await generate_chat_completion(__request__, body, user) + # Capture Stream Setting + original_stream = body["stream"] if number_of_agents > 0: # Process through each agent in the list for agent_model in agents: + body["stream"] = False # Temporarily change the model to the agent model body["model"] = agent_model print(f"Model being use: {agent_model}") @@ -83,6 +90,8 @@ class Pipe: description=message, status="agent_processing", done=True ) response = await generate_chat_completion(__request__, body, user) + content = response["choices"][0]["message"]["content"] + print(f"This is the content from {agent_model}: {content}") # Add Agent response as context body["messages"].append( { @@ -90,11 +99,9 @@ class Pipe: "content": f"{response} \n (Provided by Agent: {agent_model})", } ) - # set Operator for final processing - body["model"] = operator_model - print(f"Model being use: {operator_model}") - message = f"Final Response from {operator_model}" - await emitter.emit( - description=message, status="final_processing", done=True - ) + body["model"] = operator_model + body["stream"] = original_stream + print(f"Model being use: {operator_model}") + message = f"Final Response from {operator_model}" + await emitter.emit(description=message, status="final_processing", done=True) return await generate_chat_completion(__request__, body, user)