Update to add Event Emitter calls and updated README.md
This commit is contained in:
parent
28370e4b2a
commit
7fd6363421
@ -2,11 +2,9 @@
|
|||||||
|
|
||||||
A powerful pipe function for Open WebUI that enables collaboration between multiple AI models as agents.
|
A powerful pipe function for Open WebUI that enables collaboration between multiple AI models as agents.
|
||||||
|
|
||||||
[TOC]
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
OWUI-Multi-Agent-Processing (v0.5.3) allows you to leverage the strengths of different language models by having them work together on the same prompt. The system functions as a pipeline where each agent processes the input sequentially, and a final operator model synthesizes all the agent responses.
|
OWUI-Multi-Agent-Processing (v0.5.6) allows you to leverage the strengths of different language models by having them work together on the same prompt. The system functions as a pipeline where each agent processes the input sequentially, and a final operator model synthesizes all the agent responses.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
|
@ -1,16 +1,41 @@
|
|||||||
"""
|
"""
|
||||||
title: Multi Agent Collaboration System for Open WebUI
|
title: Multi Agent Collaboration System for Open WebUI
|
||||||
Description: Allows for Multiple Models to act as Agents in collaboration
|
Description: Allows for Multiple Models to act as Agents in collaboration
|
||||||
version: 0.5.3
|
version: 0.5.6
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
from typing import Optional
|
from typing import Callable, Any
|
||||||
from open_webui.models.users import Users
|
from open_webui.models.users import Users
|
||||||
from open_webui.utils.chat import generate_chat_completion
|
from open_webui.utils.chat import generate_chat_completion
|
||||||
|
|
||||||
|
|
||||||
|
class EventEmitter:
|
||||||
|
def __init__(self, event_emitter: Callable[[dict], Any] = None):
|
||||||
|
self.event_emitter = event_emitter
|
||||||
|
|
||||||
|
async def emit(self, description="Unknown state", status="in_progress", done=False):
|
||||||
|
"""
|
||||||
|
Send a status event to the event emitter.
|
||||||
|
|
||||||
|
:param description: Event description
|
||||||
|
:param status: Event status
|
||||||
|
:param done: Whether the event is complete
|
||||||
|
"""
|
||||||
|
if self.event_emitter:
|
||||||
|
await self.event_emitter(
|
||||||
|
{
|
||||||
|
"type": "status",
|
||||||
|
"data": {
|
||||||
|
"status": status,
|
||||||
|
"description": description,
|
||||||
|
"done": done,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Pipe:
|
class Pipe:
|
||||||
|
|
||||||
class UserValves(BaseModel):
|
class UserValves(BaseModel):
|
||||||
@ -26,7 +51,22 @@ class Pipe:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def pipe(self, body: dict, __user__: dict, __request__: Request) -> str:
|
async def pipe(
|
||||||
|
self,
|
||||||
|
body: dict,
|
||||||
|
__user__: dict,
|
||||||
|
__request__: Request,
|
||||||
|
__event_emitter__: Callable[[dict], Any] = None,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
:param __user__: User dictionary containing the user ID
|
||||||
|
:param __event_emitter__: Optional event emitter for tracking status"
|
||||||
|
:param body: body dictionary containing the prompt and messages and other settings sent to the LLM(s)
|
||||||
|
:param __request__: User Request object
|
||||||
|
:return: The final response from the operator model after processing through all agents
|
||||||
|
"""
|
||||||
|
# Initialize Event Emitter
|
||||||
|
emitter = EventEmitter(__event_emitter__)
|
||||||
# Use the unified endpoint with the updated signature
|
# Use the unified endpoint with the updated signature
|
||||||
user = Users.get_user_by_id(__user__["id"])
|
user = Users.get_user_by_id(__user__["id"])
|
||||||
agents = __user__["valves"].agent_list
|
agents = __user__["valves"].agent_list
|
||||||
@ -38,6 +78,10 @@ class Pipe:
|
|||||||
# Temporarily change the model to the agent model
|
# Temporarily change the model to the agent model
|
||||||
body["model"] = agent_model
|
body["model"] = agent_model
|
||||||
print(f"Model being use: {agent_model}")
|
print(f"Model being use: {agent_model}")
|
||||||
|
message = f"Processing request through agent model {agent_model}"
|
||||||
|
await emitter.emit(
|
||||||
|
description=message, status="agent_processing", done=True
|
||||||
|
)
|
||||||
response = await generate_chat_completion(__request__, body, user)
|
response = await generate_chat_completion(__request__, body, user)
|
||||||
# Add Agent response as context
|
# Add Agent response as context
|
||||||
body["messages"].append(
|
body["messages"].append(
|
||||||
@ -49,5 +93,8 @@ class Pipe:
|
|||||||
# set Operator for final processing
|
# set Operator for final processing
|
||||||
body["model"] = operator_model
|
body["model"] = operator_model
|
||||||
print(f"Model being use: {operator_model}")
|
print(f"Model being use: {operator_model}")
|
||||||
#print(f"Body Response: {body['messages']}")
|
message = f"Final Response from {operator_model}"
|
||||||
|
await emitter.emit(
|
||||||
|
description=message, status="final_processing", done=True
|
||||||
|
)
|
||||||
return await generate_chat_completion(__request__, body, user)
|
return await generate_chat_completion(__request__, body, user)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user