Skip to content

Commit

Permalink
Selectable providers; add'l error handling, etc.
Browse files Browse the repository at this point in the history
  • Loading branch information
jgravelle committed Jun 22, 2024
1 parent bd0e4b3 commit 26ceb2a
Show file tree
Hide file tree
Showing 25 changed files with 1,089 additions and 602 deletions.
1,023 changes: 656 additions & 367 deletions AutoGroq.md

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions AutoGroq/agent_management.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# agent_management.py

import base64
import os
import re
import streamlit as st

from config import MODEL_CHOICES, MODEL_TOKEN_LIMITS
from configs.config import MODEL_CHOICES, MODEL_TOKEN_LIMITS

from utils.auth_utils import get_api_key
from utils.api_utils import get_api_key
from utils.ui_utils import get_llm_provider, update_discussion_and_whiteboard


Expand Down
2 changes: 1 addition & 1 deletion AutoGroq/cli/create_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# Add the root directory to the Python module search path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from config import MODEL_TOKEN_LIMITS
from configs.config import MODEL_TOKEN_LIMITS
from prompts import get_agent_prompt
from utils.api_utils import get_llm_provider
from utils.agent_utils import create_agent_data
Expand Down
2 changes: 1 addition & 1 deletion AutoGroq/cli/rephrase_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# Add the root directory to the Python module search path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from config import MODEL_TOKEN_LIMITS, LLM_PROVIDER
from configs.config import MODEL_TOKEN_LIMITS, LLM_PROVIDER
from utils.api_utils import get_llm_provider
from utils.auth_utils import get_api_key
from utils.ui_utils import rephrase_prompt
Expand Down
97 changes: 0 additions & 97 deletions AutoGroq/config_local.py.example

This file was deleted.

69 changes: 52 additions & 17 deletions AutoGroq/config.py → AutoGroq/configs/config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# configs/config.py:

import os

# Get user home directory
Expand All @@ -8,12 +10,12 @@
DEFAULT_DEBUG = False

# Default configurations
DEFAULT_LLM_PROVIDER = "groq"
DEFAULT_LLM_PROVIDER = "anthropic" # Supported values: "anthropic", "groq", "openai", "ollama", "lmstudio", "fireworks"
DEFAULT_GROQ_API_URL = "https://api.groq.com/openai/v1/chat/completions"
DEFAULT_LMSTUDIO_API_URL = "http://localhost:1234/v1/chat/completions"
DEFAULT_OLLAMA_API_URL = "http://127.0.0.1:11434/api/generate"
DEFAULT_OPENAI_API_KEY = None
DEFAULT_OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"
DEFAULT_ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages"

# Try to import user-specific configurations from config_local.py
try:
Expand All @@ -25,18 +27,19 @@
DEBUG = locals().get('DEBUG', DEFAULT_DEBUG)

LLM_PROVIDER = locals().get('LLM_PROVIDER', DEFAULT_LLM_PROVIDER)

GROQ_API_URL = locals().get('GROQ_API_URL', DEFAULT_GROQ_API_URL)
LMSTUDIO_API_URL = locals().get('LMSTUDIO_API_URL', DEFAULT_LMSTUDIO_API_URL)
OLLAMA_API_URL = locals().get('OLLAMA_API_URL', DEFAULT_OLLAMA_API_URL)
OPENAI_API_KEY = locals().get('OPENAI_API_KEY', DEFAULT_OPENAI_API_KEY)
OPENAI_API_URL = locals().get('OPENAI_API_URL', DEFAULT_OPENAI_API_URL)
ANTHROPIC_API_URL = locals().get('ANTHROPIC_API_URL', DEFAULT_ANTHROPIC_API_URL)

API_KEY_NAMES = {
"groq": "GROQ_API_KEY",
"lmstudio": None,
"ollama": None,
"openai": "OPENAI_API_KEY",
# Add other LLM providers and their respective API key names here
"anthropic": "ANTHROPIC_API_KEY",
}

# Retry settings
Expand Down Expand Up @@ -68,23 +71,55 @@
API_URL = OLLAMA_API_URL
MODEL_TOKEN_LIMITS = {
'llama3': 8192,
}
}
elif LLM_PROVIDER == "anthropic":
API_URL = ANTHROPIC_API_URL
MODEL_TOKEN_LIMITS = {
"claude-3-5-sonnet-20240620": 200000,
"claude-3-opus-20240229": 200000,
"claude-3-sonnet-20240229": 200000,
"claude-3-haiku-20240307": 200000,
"claude-2.1": 100000,
"claude-2.0": 100000,
"claude-instant-1.2": 100000,
}
else:
API_URL = None
MODEL_TOKEN_LIMITS = {}


# Database path
# FRAMEWORK_DB_PATH="/path/to/custom/database.sqlite"
FRAMEWORK_DB_PATH = os.environ.get('FRAMEWORK_DB_PATH', default_db_path)

MODEL_CHOICES = {
'default': None,
'gemma-7b-it': 8192,
'gpt-4o': 4096,
'instructlab/granite-7b-lab-GGUF': 2048,
'MaziyarPanahi/Codestral-22B-v0.1-GGUF': 32768,
'llama3': 8192,
'llama3-70b-8192': 8192,
'llama3-8b-8192': 8192,
'mixtral-8x7b-32768': 32768
}
"anthropic": {
"claude-3-5-sonnet-20240620": 200000,
"claude-3-opus-20240229": 200000,
"claude-3-sonnet-20240229": 200000,
"claude-3-haiku-20240307": 200000,
"claude-2.1": 100000,
"claude-2.0": 100000,
"claude-instant-1.2": 100000,
},
"groq": {
"mixtral-8x7b-32768": 32768,
"llama3-70b-8192": 8192,
"llama3-8b-8192": 8192,
"gemma-7b-it": 8192,
},
"openai": {
"gpt-4": 8192,
"gpt-3.5-turbo": 4096,
},
"fireworks": {
"fireworks": 4096,
},
"ollama": {
"llama3": 8192,
},
"lmstudio": {
"instructlab/granite-7b-lab-GGUF": 2048,
"MaziyarPanahi/Codestral-22B-v0.1-GGUF": 32768,
},
}

SUPPORTED_PROVIDERS = ["anthropic", "fireworks", "groq", "lmstudio", "ollama", "openai"]
13 changes: 13 additions & 0 deletions AutoGroq/configs/config_local.py.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# User-specific configurations

LLM_PROVIDER = "anthropic"
ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages"
GROQ_API_URL = "https://api.groq.com/openai/v1/chat/completions"
LMSTUDIO_API_URL = "http://localhost:1234/v1/chat/completions"
OLLAMA_API_URL = "http://127.0.0.1:11434/api/generate"
# OPENAI_API_KEY = "your_openai_api_key"
OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"

DEBUG = True

RETRY_DELAY = 2
56 changes: 56 additions & 0 deletions AutoGroq/configs/config_sessions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# config_sessions.py

from datetime import datetime
from typing import Dict

DEFAULT_AGENT_CONFIG: Dict = {
"name": "Default Agent",
"description": "A default agent for initialization purposes in AutoGroq",
"tools": [], # Empty list as default
"config": {
"llm_config": {
"config_list": [
{
"model": "default",
"api_key": None,
"base_url": None,
"api_type": None,
"api_version": None,
}
],
"temperature": 0.7,
"max_tokens": 1000,
"top_p": 1.0,
"frequency_penalty": 0.0,
"presence_penalty": 0.0,
},
"human_input_mode": "NEVER",
"max_consecutive_auto_reply": 10,
},
"role": "Default Assistant",
"goal": "Assist users with general tasks in AutoGroq",
"backstory": "I am a default AI assistant created to help initialize the AutoGroq system.",
"id": None, # Will be set dynamically when needed
"created_at": datetime.now().isoformat(),
"updated_at": datetime.now().isoformat(),
"user_id": "default_user",
"workflows": None,
"type": "assistant",
"models": [], # Empty list as default
"verbose": False,
"allow_delegation": True,
"new_description": None,
"timestamp": datetime.now().isoformat(),
"is_termination_msg": None,
"code_execution_config": {
"work_dir": "./agent_workspace",
"use_docker": False,
},
"llm": None,
"function_calling_llm": None,
"max_iter": 25,
"max_rpm": None,
"max_execution_time": 600, # 10 minutes default
"step_callback": None,
"cache": True
}
File renamed without changes.
37 changes: 37 additions & 0 deletions AutoGroq/llm_providers/anthropic_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# llm_providers/anthropic_provider.py

import anthropic
import streamlit as st

from llm_providers.base_provider import BaseLLMProvider

class AnthropicProvider(BaseLLMProvider):
def __init__(self, api_key, api_url=None):
self.client = anthropic.Anthropic(api_key=api_key)
self.api_url = api_url

def send_request(self, data):
try:
response = self.client.messages.create(
model=data['model'],
max_tokens=data.get('max_tokens', 1000),
temperature=data.get('temperature', st.session_state.temperature),
messages=data['messages']
)
return response
except anthropic.APIError as e:
print(f"Anthropic API error: {e}")
return None

def process_response(self, response):
if response is not None:
return {
"choices": [
{
"message": {
"content": response.content[0].text
}
}
]
}
return None
8 changes: 5 additions & 3 deletions AutoGroq/llm_providers/base_provider.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@

from abc import ABC, abstractmethod

class BaseLLMProvider(ABC):
@abstractmethod
def __init__(self, api_key, api_url=None):
pass

@abstractmethod
def send_request(self, data):
pass

@abstractmethod
def process_response(self, response):
pass

pass
1 change: 1 addition & 0 deletions AutoGroq/llm_providers/ollama_provider.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import requests
import streamlit as st

from llm_providers.base_provider import BaseLLMProvider

Expand Down
Loading

0 comments on commit 26ceb2a

Please sign in to comment.