Skip to content

Commit

Permalink
WIP Dataset builder
Browse files Browse the repository at this point in the history
  • Loading branch information
Josh-XT committed Dec 10, 2023
1 parent 4567b69 commit 7559943
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 1 deletion.
8 changes: 7 additions & 1 deletion agixt/Interactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,7 @@ async def run(
conversation_name: str = "",
browse_links: bool = False,
prompt_category: str = "Default",
persist_context_in_history: bool = False,
**kwargs,
):
shots = int(shots)
Expand Down Expand Up @@ -450,11 +451,16 @@ async def run(
websearch=websearch,
**kwargs,
)
log_message = (
user_input
if user_input != "" and persist_context_in_history == False
else formatted_prompt
)
log_interaction(
agent_name=self.agent_name,
conversation_name=conversation_name,
role="USER",
message=user_input if user_input != "" else formatted_prompt,
message=log_message,
user=self.user,
)
try:
Expand Down
31 changes: 31 additions & 0 deletions agixt/extensions/agixt_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from ApiClient import Chain
import logging
import docker
import asyncio

IMAGE_NAME = "joshxt/safeexecute:latest"

Expand Down Expand Up @@ -634,3 +635,33 @@ async def get_csv_preview_text(self, text: str):

async def get_csv_from_response(self, response: str) -> str:
return response.split("```csv")[1].split("```")[0]

# Convert LLM response of a list of either numbers like a numbered list, *'s, -'s to a list from the string response
async def convert_llm_response_to_list(self, response):
response = response.split("\n")
response = [item.lstrip("0123456789.*- ") for item in response if item.lstrip()]
response = [item for item in response if item]
response = [item.lstrip("0123456789.*- ") for item in response]
return response

async def convert_questions_to_dataset(self, response):
questions = await self.convert_llm_response_to_list(response)
tasks = []
i = 0
for question in questions:
i += 1
if i % 10 == 0:
await asyncio.gather(*tasks)
tasks = []
task = asyncio.create_task(
await self.ApiClient.prompt_agent(
agent_name=self.agent_name,
prompt_name="Basic With Memory",
prompt_args={
"user_input": question,
"context_results": 10,
"conversation_name": self.conversation_name,
},
)
)
tasks.append(task)
75 changes: 75 additions & 0 deletions agixt/extensions/datasets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import os
from Extensions import Extensions
import asyncio
import json


class agixt_actions(Extensions):
def __init__(self, **kwargs):
self.commands = {
"Create Task Chain": self.create_task_chain,
"Generate Extension from OpenAPI": self.generate_openapi_chain,
}
self.command_name = (
kwargs["command_name"] if "command_name" in kwargs else "Smart Prompt"
)
self.agent_name = kwargs["agent_name"] if "agent_name" in kwargs else "gpt4free"
self.conversation_name = (
kwargs["conversation_name"] if "conversation_name" in kwargs else ""
)
self.WORKING_DIRECTORY = os.path.join(os.getcwd(), "WORKSPACE")
os.makedirs(self.WORKING_DIRECTORY, exist_ok=True)
self.ApiClient = kwargs["ApiClient"] if "ApiClient" in kwargs else None

# Convert LLM response of a list of either numbers like a numbered list, *'s, -'s to a list from the string response
async def convert_llm_response_to_list(self, response):
response = response.split("\n")
response = [item.lstrip("0123456789.*- ") for item in response if item.lstrip()]
response = [item for item in response if item]
response = [item.lstrip("0123456789.*- ") for item in response]
return response

async def ask_questions_about_memories(self):
memories = await self.ApiClient.export_agent_memories(self.agent_name)
tasks = []
for memory in memories:
task = asyncio.create_task(
await self.ApiClient.prompt_agent(
agent_name=self.agent_name,
prompt_name="Ask Questions",
prompt_args={
"memory": memory["text"],
"context_results": 10,
"conversation_name": f"{self.conversation_name} Dataset",
"persist_context_in_history": True,
},
)
)
tasks.append(task)
await asyncio.gather(*tasks)
return {"status": "Success"}

async def convert_questions_to_dataset(self, response):
questions = await self.convert_llm_response_to_list(response)
tasks = []
i = 0
for question in questions:
i += 1
if i % 10 == 0:
await asyncio.gather(*tasks)
tasks = []
task = asyncio.create_task(
await self.ApiClient.prompt_agent(
agent_name=self.agent_name,
prompt_name="Basic With Memory",
prompt_args={
"user_input": question,
"context_results": 10,
"conversation_name": f"{self.conversation_name} Dataset",
"persist_context_in_history": True,
},
)
)
tasks.append(task)
await asyncio.gather(*tasks)
return {"status": "Success"}
5 changes: 5 additions & 0 deletions agixt/prompts/Default/Basic With Memory.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
### Context from Memory
{context}

### User input
{user_input}

0 comments on commit 7559943

Please sign in to comment.