54 lines
2.0 KiB
Python
54 lines
2.0 KiB
Python
"""
|
|
title: Multi Agent Collaboration System for Open WebUI
|
|
Description: Allows for Multiple Models to act as Agents in collaboration
|
|
version: 0.5.3
|
|
"""
|
|
|
|
from pydantic import BaseModel, Field
|
|
from fastapi import Request
|
|
from typing import Optional
|
|
from open_webui.models.users import Users
|
|
from open_webui.utils.chat import generate_chat_completion
|
|
|
|
|
|
class Pipe:
|
|
|
|
class UserValves(BaseModel):
|
|
agent_list: list = (
|
|
Field(default=[], description="List of Models to process as agents"),
|
|
)
|
|
operator_model: str = Field(
|
|
default="us.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
|
description="Default Operator Model to use",
|
|
)
|
|
pass
|
|
|
|
def __init__(self):
|
|
pass
|
|
|
|
async def pipe(self, body: dict, __user__: dict, __request__: Request) -> str:
|
|
# Use the unified endpoint with the updated signature
|
|
user = Users.get_user_by_id(__user__["id"])
|
|
agents = __user__["valves"].agent_list
|
|
operator_model = __user__["valves"].operator_model
|
|
number_of_agents = len(agents)
|
|
if number_of_agents > 0:
|
|
# Process through each agent in the list
|
|
for agent_model in agents:
|
|
# Temporarily change the model to the agent model
|
|
body["model"] = agent_model
|
|
print(f"Model being use: {agent_model}")
|
|
response = await generate_chat_completion(__request__, body, user)
|
|
# Add Agent response as context
|
|
body["messages"].append(
|
|
{
|
|
"role": "assistant",
|
|
"content": f"{response} \n (Provided by Agent: {agent_model})",
|
|
}
|
|
)
|
|
# set Operator for final processing
|
|
body["model"] = operator_model
|
|
print(f"Model being use: {operator_model}")
|
|
#print(f"Body Response: {body['messages']}")
|
|
return await generate_chat_completion(__request__, body, user)
|