Update script to handle streaming properly.

If a response was set to streaming, the sub agent will return unusable data.
This commit is contained in:
jknapp 2025-03-26 02:34:14 +00:00
parent 7fd6363421
commit 446a33c3a4

View File

@ -1,7 +1,7 @@
"""
title: Multi Agent Collaboration System for Open WebUI
Description: Allows for Multiple Models to act as Agents in collaboration
version: 0.5.6
version: 0.7.6
"""
from pydantic import BaseModel, Field
@ -72,9 +72,16 @@ class Pipe:
agents = __user__["valves"].agent_list
operator_model = __user__["valves"].operator_model
number_of_agents = len(agents)
if "### Task:" in body["messages"][0]["content"]:
body["model"] = operator_model
print("Internal Request")
return await generate_chat_completion(__request__, body, user)
# Capture Stream Setting
original_stream = body["stream"]
if number_of_agents > 0:
# Process through each agent in the list
for agent_model in agents:
body["stream"] = False
# Temporarily change the model to the agent model
body["model"] = agent_model
print(f"Model being use: {agent_model}")
@ -83,6 +90,8 @@ class Pipe:
description=message, status="agent_processing", done=True
)
response = await generate_chat_completion(__request__, body, user)
content = response["choices"][0]["message"]["content"]
print(f"This is the content from {agent_model}: {content}")
# Add Agent response as context
body["messages"].append(
{
@ -90,11 +99,9 @@ class Pipe:
"content": f"{response} \n (Provided by Agent: {agent_model})",
}
)
# set Operator for final processing
body["model"] = operator_model
print(f"Model being use: {operator_model}")
message = f"Final Response from {operator_model}"
await emitter.emit(
description=message, status="final_processing", done=True
)
body["model"] = operator_model
body["stream"] = original_stream
print(f"Model being use: {operator_model}")
message = f"Final Response from {operator_model}"
await emitter.emit(description=message, status="final_processing", done=True)
return await generate_chat_completion(__request__, body, user)