# start with the following comando: panel serve chat-panel.py import os import platform # Environment Variablen importieren from dotenv import load_dotenv load_dotenv() from crewai import Crew, Process, Agent, Task from langchain_openai import ChatOpenAI from langchain_core.callbacks import BaseCallbackHandler from typing import TYPE_CHECKING, Any, Dict, Optional from langchain.agents import load_tools #human = load_tools(["human"]) # pip install panel import panel as pn pn.extension(design="material") import threading from crewai.agents import CrewAgentExecutor import time def custom_ask_human_input(self, final_answer: dict) -> str: global user_input prompt = self._i18n.slice("getting_input").format(final_answer=final_answer) chat_interface.send(prompt, user="Lehrkraft", respond=False) while user_input == None: time.sleep(1) human_comments = user_input user_input = None return human_comments CrewAgentExecutor._ask_human_input = custom_ask_human_input user_input = None initiate_chat_task_created = False def initiate_chat(message): global initiate_chat_task_created # Indicate that the task has been created initiate_chat_task_created = True StartCrew(message) def callback(contents: str, user: str, instance: pn.chat.ChatInterface): global initiate_chat_task_created global user_input if not initiate_chat_task_created: thread = threading.Thread(target=initiate_chat, args=(contents,)) thread.start() else: user_input = contents avators = {"Writer":"https://cdn-icons-png.flaticon.com/512/320/320336.png", "Reviewer":"https://cdn-icons-png.freepik.com/512/9408/9408201.png"} class MyCustomHandler(BaseCallbackHandler): def __init__(self, agent_name: str) -> None: self.agent_name = agent_name def on_chain_start( self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any ) -> None: """Print out that we are entering a chain.""" chat_interface.send(inputs['input'], user=BaseCallbackHandler, respond=False) def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: """Print out that we finished a chain.""" chat_interface.send(outputs['output'], user=self.agent_name, avatar=avators[self.agent_name], respond=False) llm = ChatOpenAI(model="gpt-3.5-turbo-0125") #llm = ChatOpenAI(model="gpt-4") writer = Agent( role='Theologische Inhalte entwickeln', backstory='''Langjähriger Theologieprofessor mit Spezialisierung auf historisch-kritische Bibelinterpretation''', goal="Verständnis für biblische Texte und christliche Lehren fördern.", llm=llm, callbacks=[MyCustomHandler("Thelogical Content Developer")], ) reviewer = Agent( role='Interreligiösen Dialog fördern', backstory='''Religionswissenschaftler mit internationaler Erfahrung im Dialog zwischen Weltreligionen.''', goal="Förderung von ethischer Argumentation und philosophischem Denken", llm=llm, callbacks=[MyCustomHandler("Religious Dialogue Promoter")], allow_delegation=True ) def StartCrew(prompt): task1 = Task( description=f"""Entwickeln Sie einen Unterrichtsplan zum Thema {prompt}. """, agent=writer, expected_output="Unterrichtsplan mit relevante theologischen Perspektiven und zeitgemäßen Methoden." ) task2 = Task( description=("Entwickeln Sie zusammen mit Ihren Kollegen an einen Unterrichtsplan unter Berücksichtigung der religionswissenschaftlicher und interreligiöser Aspekte"), agent=reviewer, expected_output="Kollaborativ entwickelter Unterrichtsplan", human_input=True, allow_delegation=True ) # Establishing the crew with a hierarchical process project_crew = Crew( tasks=[task1, task2], # Tasks to be delegated and executed under the manager's supervision agents=[writer, reviewer], manager_llm=llm, language="de", process=Process.hierarchical # Specifies the hierarchical management approach ) result = project_crew.kickoff() chat_interface.send("## Final Result\n"+result, user="assistant", respond=False) chat_interface = pn.chat.ChatInterface(callback=callback) chat_interface.send("Beschreibe dein Unterrichttsvorhaben!", user="System", respond=False) chat_interface.servable()