add research-gui
This commit is contained in:
parent
ea5e8702e9
commit
da262f75fb
BIN
__pycache__/docx.cpython-312.pyc
Normal file
BIN
__pycache__/docx.cpython-312.pyc
Normal file
Binary file not shown.
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
# packages = ["open-interpreter", "gradio"]
|
# packages = ["open-interpreter", "gradio"]
|
||||||
|
@ -22,7 +23,6 @@ load_dotenv()
|
||||||
from crewai import Agent, Task, Crew, Process
|
from crewai import Agent, Task, Crew, Process
|
||||||
|
|
||||||
# LLMs von OpenAI und Ollama importieren
|
# LLMs von OpenAI und Ollama importieren
|
||||||
from langchain_community.llms import OpenAI, Ollama
|
|
||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
from langchain.tools import tool
|
from langchain.tools import tool
|
||||||
# Interpreter importieren
|
# Interpreter importieren
|
||||||
|
@ -30,22 +30,11 @@ from interpreter import interpreter
|
||||||
|
|
||||||
|
|
||||||
# Configurations and Tools
|
# Configurations and Tools
|
||||||
llm = ChatOpenAI(model_name="gpt-4-turbo-preview", temperature=0.8)
|
gpt4turbo = ChatOpenAI(model_name="gpt-4-turbo-preview", temperature=0.8)
|
||||||
interpreter.auto_run = True
|
interpreter.auto_run = True
|
||||||
interpreter.llm.model = "openai/gpt-4-turbo-preview"
|
interpreter.llm.model = "openai/gpt-4-turbo-preview"
|
||||||
interpreter.llm.temperature = 0.8
|
interpreter.llm.temperature = 0.8
|
||||||
|
|
||||||
# loacl LLM Configuration and Tools
|
|
||||||
# llm = Ollama(model="mistral")
|
|
||||||
# interpreter.auto_run = True
|
|
||||||
# interpreter.llm.model = "openai/mistral"
|
|
||||||
# interpreter.llm.api_base = "http://localhost:11434/v1"
|
|
||||||
# interpreter.llm.api_key = "fake_key"
|
|
||||||
# interpreter.offline = True
|
|
||||||
# interpreter.llm.max_tokens = 1000
|
|
||||||
# interpreter.llm.max_retries = 20
|
|
||||||
# interpreter.llm.context_window = 3000
|
|
||||||
|
|
||||||
|
|
||||||
@tool('Executor')
|
@tool('Executor')
|
||||||
def execute_cli_command(command: str):
|
def execute_cli_command(command: str):
|
||||||
|
@ -61,7 +50,7 @@ cli_agent = Agent(
|
||||||
goal='Helpfull Assistant. Ability to perform CLI operations, write programms and execute code using Executor Tool.',
|
goal='Helpfull Assistant. Ability to perform CLI operations, write programms and execute code using Executor Tool.',
|
||||||
backstory='Expert in command line operations and automating tasks.',
|
backstory='Expert in command line operations and automating tasks.',
|
||||||
tools=[execute_cli_command],
|
tools=[execute_cli_command],
|
||||||
llm=llm,
|
llm=gpt4turbo,
|
||||||
deligate_tools=False,
|
deligate_tools=False,
|
||||||
verbose=True
|
verbose=True
|
||||||
)
|
)
|
||||||
|
@ -78,7 +67,7 @@ cli_crew = Crew(
|
||||||
agents=[cli_agent],
|
agents=[cli_agent],
|
||||||
tasks=[cli_task],
|
tasks=[cli_task],
|
||||||
process=Process.sequential,
|
process=Process.sequential,
|
||||||
manager_llm=llm
|
manager_llm=gpt4turbo
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run the crew in the CLI Interface
|
# Run the crew in the CLI Interface
|
||||||
|
@ -91,7 +80,7 @@ def cli_interface(command):
|
||||||
|
|
||||||
iface = gr.Interface(
|
iface = gr.Interface(
|
||||||
fn=cli_interface,
|
fn=cli_interface,
|
||||||
inputs=gr.Textbox(lines=2, placeholder="Was soll ich tun?"),
|
inputs=gr.Textbox(lines=2, placeholder="Was soll ich tun?", value="Identifiziere das Betriebssystem, nutze F:\blog-posts als Arbeitsverzeichniss und generiere eine Markdown Datei mit dem Titel 'Test' und der Liste aller Dateien in diesem Verzeichniss als Inhalt."),
|
||||||
outputs="text",
|
outputs="text",
|
||||||
title="CLI Command Executor",
|
title="CLI Command Executor",
|
||||||
description="Execute CLI commands via a natural language interface."
|
description="Execute CLI commands via a natural language interface."
|
||||||
|
|
|
@ -77,3 +77,64 @@ Please note that changing a file's content with the given tools requires an exte
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}",,,2024-03-27 18:31:00.882620
|
}",,,2024-03-27 18:31:00.882620
|
||||||
|
Erstelle auf meinem Windows PC eine Worddatei im Ordner F:\blog-posts mit beliebigen Inhalt,"To create and save a new Microsoft Word document with the initial text ""Your desired initial text here"" in your Windows PC, following these steps will work:
|
||||||
|
|
||||||
|
1. Open Microsoft Word using its executable file (assuming default installation location): `start """"C:\Program Files (x86)\Microsoft Office\Root\Office16\WINWORD.EXE""""`
|
||||||
|
|
||||||
|
2. If you want to automate the creation and saving of the Word document using Python, use the Executor tool with the following command input:
|
||||||
|
|
||||||
|
```python
|
||||||
|
{'command': 'import win32com.client as win32; word = win32.Dispatch(""Word.Application"").Visible=True; doc = word.Documents.Add(); doc.Content=""Your desired initial text here""; doc.SaveAs(""F:\\blog-posts\\my_file.docx""); doc.Close(0); word.Quit()'}
|
||||||
|
```
|
||||||
|
|
||||||
|
After these steps, the Word document will be successfully created with your given content and saved in the specified location ""F:\blog-posts\my_file.docx"".",,,2024-03-27 22:55:02.046169
|
||||||
|
Determine the operating system and create a Word file in the folder F:\blog-posts with any content,"Log of Actions and Results for Task Execution:
|
||||||
|
```python
|
||||||
|
# Obtained OS Information
|
||||||
|
import platform; print(platform.system())
|
||||||
|
# Output: 'windows (on this specific virtual environment)'
|
||||||
|
|
||||||
|
# Create Word File in F:\blog-posts Directory
|
||||||
|
import win32com.client; word = win32com.client.Dispatch('Word.Application'); doc = word.Documents.Add(); doc.SaveAs(r'F:\blog-posts\assistant_output.docx'); word.Quit()
|
||||||
|
# Output: This code creates a new Word document using the COM object model and dispatches the Word.Application to create a reference to Microsoft Word. After creating a new document, it saves the file in the directory F:\\log-posts (assuming the current working directory is F:\\) with a filename of assistant_output.docx. Finally, the application quits once the saving operation is complete. This code will only function on a Windows system since it uses specific libraries and commands that are Windows-specific.
|
||||||
|
```",,,2024-03-27 23:11:53.438707
|
||||||
|
Determine the operating system and create a Word file in the folder F:\blog-posts with any content,"Log of Actions and Results for Task Execution:
|
||||||
|
```python
|
||||||
|
# Obtained OS Information
|
||||||
|
import platform; print(platform.system())
|
||||||
|
# Output: 'windows (on this specific virtual environment)'
|
||||||
|
|
||||||
|
# Create Word File in F:\blog-posts Directory
|
||||||
|
import win32com.client; word = win32com.client.Dispatch('Word.Application'); doc = word.Documents.Add(); doc.SaveAs(r'F:\blog-posts\assistant_output.docx'); word.Quit()
|
||||||
|
# Output: This code creates a new Word document using the COM object model and dispatches the Word.Application to create a reference to Microsoft Word. After creating a new document, it saves the file in the directory F:\\log-posts (assuming the current working directory is F:\\) with a filename of assistant_output.docx. Finally, the application quits once the saving operation is complete. This code will only function on a Windows system since it uses specific libraries and commands that are Windows-specific.
|
||||||
|
```",,,2024-03-27 23:11:55.769591
|
||||||
|
Determine the operating system and create a Word file in the folder F:\blog-posts with any content,"Log of Actions and Results for Task Execution:
|
||||||
|
```python
|
||||||
|
# Obtained OS Information
|
||||||
|
import platform; print(platform.system())
|
||||||
|
# Output: 'windows (on this specific virtual environment)'
|
||||||
|
|
||||||
|
# Create Word File in F:\blog-posts Directory
|
||||||
|
import win32com.client; word = win32com.client.Dispatch('Word.Application'); doc = word.Documents.Add(); doc.SaveAs(r'F:\blog-posts\assistant_output.docx'); word.Quit()
|
||||||
|
# Output: This code creates a new Word document using the COM object model and dispatches the Word.Application to create a reference to Microsoft Word. After creating a new document, it saves the file in the directory F:\\log-posts (assuming the current working directory is F:\\) with a filename of assistant_output.docx. Finally, the application quits once the saving operation is complete. This code will only function on a Windows system since it uses specific libraries and commands that are Windows-specific.
|
||||||
|
```",,,2024-03-27 23:11:57.159086
|
||||||
|
"Identify the operating system, write the necessary scripts and execute them: Write any content in a Word document F:\blog-posts\demo.docx","To identify the operating system on a Windows machine, we can utilize PowerShell commands through the Executor tool provided. We executed ""systeminfo"" to retrieve various system information like installed components, BIOS data, operating system details, user profile, and network card information. The output provided was extensive and detailed, showcasing your system's configuration.
|
||||||
|
|
||||||
|
To specifically detect the OS and CPU architecture, we attempted to use an external PowerShell script from creationix/nsh repository (https://raw.githubusercontent.com/creationix/nsh/master/src/detectos.ps1) through the Executor tool as well. However, being a text-based AI assistant, I couldn't execute or display its output on my end; it would be seen only when run locally on PowerShell.
|
||||||
|
|
||||||
|
In conclusion, the provided Executor tool can help gather essential system information and execute necessary commands to identify operating systems on Windows machines.",,,2024-03-28 10:11:16.496722
|
||||||
|
"Identify the operating system, write the necessary scripts to create a new docx in F:\blog-posts and execute them","The attempt to create a new docx file in F:\blog-posts resulted in an ImportError due to a naming conflict with a local file named docx.py, preventing the correct importation of the Document class from the python-docx library. To resolve this issue, it is necessary to rename or remove the conflicting local file and ensure the python-docx library is correctly installed before retrying the operation.",,,2024-03-28 10:36:18.509629
|
||||||
|
"Welche berühmten Personen lebten in Bingen?
|
||||||
|
","Kategorie: Johann Gottfried Herder
|
||||||
|
- Verbindung zu Bingen: Es gibt keine spezifischen Informationen über seine Verbindung zu Bingen.
|
||||||
|
- Bekannt für: Er war ein einflussreicher Philosoph, Theologe und Literaturkritiker während der Aufklärung.
|
||||||
|
|
||||||
|
Kategorie: Johann Nepomuk Hummel
|
||||||
|
- Verbindung zu Bingen: Es wurde festgestellt, dass er in Pressburg, nicht in Bingen, geboren wurde.
|
||||||
|
- Bekannt für: Er war ein prominenter Komponist und Pianist während der klassischen Ära.
|
||||||
|
|
||||||
|
Kategorie: Helmut Kohl
|
||||||
|
- Verbindung zu Bingen: Es wurden keine spezifischen Informationen über seine Verbindung zu Bingen gefunden.
|
||||||
|
- Bekannt für: Kohl diente 16 Jahre lang (von 1982 bis 1998) als Kanzler Deutschlands und spielte eine bedeutende Rolle bei der Wiedervereinigung Deutschlands.
|
||||||
|
|
||||||
|
Hinweis: Die ursprünglichen Informationen möglicherweise nicht vollständig korrekt waren und dass weitere Untersuchungen erforderlich sind, um die Verbindung dieser Persönlichkeiten zu Bingen zu klären.",,,2024-03-28 14:09:13.039461
|
||||||
|
|
|
103
research-gui.py
Normal file
103
research-gui.py
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
# pip install arxiv
|
||||||
|
import os
|
||||||
|
import gradio as gr
|
||||||
|
from crewai import Agent, Task, Crew, Process
|
||||||
|
|
||||||
|
# Importieren Sie die benötigten Tools
|
||||||
|
from langchain_community.llms import Ollama
|
||||||
|
from langchain_community.tools import DuckDuckGoSearchRun
|
||||||
|
|
||||||
|
# ChatCPT 3.5 importieren
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
turbo35 = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.7)
|
||||||
|
|
||||||
|
# Wikipedia Suche importieren
|
||||||
|
from langchain_community.tools.wikipedia.tool import WikipediaQueryRun
|
||||||
|
from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
|
||||||
|
from langchain_community.tools.arxiv.tool import ArxivQueryRun
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Initialisieren Sie die Tools
|
||||||
|
ollama_llm = Ollama(model="mistral")
|
||||||
|
search_tool = DuckDuckGoSearchRun()
|
||||||
|
wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())
|
||||||
|
arxiv = ArxivQueryRun()
|
||||||
|
|
||||||
|
# Definieren Sie Ihre Agenten mit Rollen und Zielen
|
||||||
|
informations_sammler = Agent(
|
||||||
|
role='Forscher',
|
||||||
|
goal='Sammeln von Basisinformationen und Zitate zum Thema',
|
||||||
|
backstory="""Spezialisiert auf die Sammlung und Analyse von Grundinformationen und wichtiger Zitate.""",
|
||||||
|
verbose=True,
|
||||||
|
allow_delegation=False,
|
||||||
|
llm=ollama_llm
|
||||||
|
)
|
||||||
|
|
||||||
|
informations_verifizierer = Agent(
|
||||||
|
role='Datenanalyst',
|
||||||
|
goal='Verifizieren und Erweitern der Informationen durch Websuche. Füge Zitate und Urls als Belege hinzu. Entferne falsche Informationen aus den Ergebnissen. Suche nach weiteren Informationen und Zitaten gegebenenfalls über deutsche und englische Wikipedia. Nutze geignete Begriffe für die Suche in Arxiv.',
|
||||||
|
backstory="""Experte in der Überprüfung von Informationen und der Erweiterung durch effektive Websuchen.""",
|
||||||
|
verbose=True,
|
||||||
|
allow_delegation=False,
|
||||||
|
tools=[search_tool,wikipedia,arxiv],
|
||||||
|
llm=ollama_llm
|
||||||
|
)
|
||||||
|
|
||||||
|
informations_organisator = Agent(
|
||||||
|
role='Inhaltsmanager',
|
||||||
|
goal='Gliedern der Informationen, Zitate und Links nach Kategorien',
|
||||||
|
backstory="""Zuständig für die Organisation der gesammelten Informationen in einer strukturierten Form.""",
|
||||||
|
verbose=True,
|
||||||
|
allow_delegation=False,
|
||||||
|
llm=ollama_llm
|
||||||
|
)
|
||||||
|
|
||||||
|
# Erstellen Sie Aufgaben für Ihre Agenten
|
||||||
|
task1 = Task(
|
||||||
|
description="""Sammle grundlegende Informationen über eun Thema.""",
|
||||||
|
#agent=informations_sammler,
|
||||||
|
expected_output="""Eine Zusammenfassung von Basisinformationen über das Thema,
|
||||||
|
einschließlich Definitionen, wichtigsten Fakten und allgemeinem Überblick."""
|
||||||
|
)
|
||||||
|
|
||||||
|
task2 = Task(
|
||||||
|
description="""Verifiziere die gesammelten Informationen und erweitere sie durch zusätzliche Websuchen.""",
|
||||||
|
#agent=informations_verifizierer,
|
||||||
|
expected_output="""Eine verifizierte und erweiterte Liste von Informationen zu Thema X,
|
||||||
|
inklusive zusätzlicher Quellen und tiefergehenden Einsichten."""
|
||||||
|
)
|
||||||
|
|
||||||
|
task3 = Task(
|
||||||
|
description="""Erzeuge aus den gesammelten und verifizierten Informationen einegeliedertes Essay von 2000 Worten, dass die Zusammenstellung der recherchierten und verifizierten Informationen mit entsprechenden Quellenangaben berücksichtigt.""",
|
||||||
|
#agent=informations_organisator,
|
||||||
|
expected_output="""Ein gut strukturiertes Dokument, das die Informationen zu Thema X
|
||||||
|
klar gegliedert präsentiert, für einen umfassenden einblick in das Thema."""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Instanziieren Sie Ihre Crew mit einem sequenziellen Prozess
|
||||||
|
crew = Crew(
|
||||||
|
agents=[informations_sammler, informations_verifizierer, informations_organisator],
|
||||||
|
tasks=[task1, task2, task3],
|
||||||
|
verbose=2,
|
||||||
|
manager_llm=turbo35,
|
||||||
|
process=Process.hierarchical
|
||||||
|
)
|
||||||
|
|
||||||
|
# Starten Sie den Arbeitsprozess!
|
||||||
|
|
||||||
|
def cli_interface(thema):
|
||||||
|
task1.description = thema
|
||||||
|
result = crew.kickoff()
|
||||||
|
return result
|
||||||
|
|
||||||
|
iface = gr.Interface(
|
||||||
|
fn=cli_interface,
|
||||||
|
inputs=gr.Textbox(lines=2, placeholder="Recherche zu welchem Theme", value=""),
|
||||||
|
outputs="text",
|
||||||
|
title="Recherche",
|
||||||
|
description="Führt eine Recherche zu einem bestimmten Thema durch und organisiert die gesammelten Informationen."
|
||||||
|
)
|
||||||
|
|
||||||
|
iface.launch(share=False)
|
Reference in a new issue