Skip to content

Commit 9d7d4ea

Browse files
committed
add finetuned
1 parent 17039b1 commit 9d7d4ea

File tree

6 files changed

+12
-87
lines changed

6 files changed

+12
-87
lines changed

agent_video/pipecat/agent/src/agents/agent.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,7 @@
1919
DailySendDataInput,
2020
daily_send_data,
2121
)
22-
from src.functions.llm_chat import (
23-
LlmChatInput,
24-
Message,
25-
llm_chat,
26-
)
27-
from src.functions.llm_talk import LlmTalkInput, llm_talk
22+
from src.functions.llm_talk import LlmTalkInput, llm_talk, Message, ModelType
2823

2924

3025
class MessagesEvent(BaseModel):
@@ -34,10 +29,9 @@ class MessagesEvent(BaseModel):
3429
class EndEvent(BaseModel):
3530
end: bool
3631

37-
3832
class AgentInput(BaseModel):
3933
room_url: str
40-
model: Literal["restack", "gpt-4o-mini", "gpt-4o", "openpipe:twenty-lions-fall"] = "restack"
34+
model: ModelType
4135
interactive_prompt: str | None = None
4236
reasoning_prompt: str | None = None
4337

agent_video/pipecat/agent/src/functions/llm_chat.py

Lines changed: 0 additions & 69 deletions
This file was deleted.

agent_video/pipecat/agent/src/functions/llm_logic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from pydantic import BaseModel
66
from restack_ai.function import NonRetryableError, function
77

8-
from src.functions.llm_chat import Message
8+
from src.functions.llm_talk import Message
99

1010

1111
class LlmLogicResponse(BaseModel):

agent_video/pipecat/agent/src/functions/llm_talk.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,19 @@
1010
)
1111

1212
from src.client import api_address
13-
from src.functions.llm_chat import Message
1413

14+
class Message(BaseModel):
15+
role: str
16+
content: str
17+
18+
ModelType = Literal["gpt-4o-mini", "ft:gpt-4o-mini-2024-07-18:restack::BJymdMm8", "openpipe:twenty-lions-fall"]
1519

1620
class LlmTalkInput(BaseModel):
1721
messages: list[Message] = Field(default_factory=list)
1822
context: str | None = None # Updated context from Slow AI
1923
mode: Literal["default", "interrupt"]
2024
stream: bool = True
21-
model: Literal[
22-
"gpt-4o-mini",
23-
"ft:gpt-4o-mini-2024-07-18:restack::BJymdMm8",
24-
"openpipe:twenty-lions-fall"
25-
]
25+
model: ModelType = "gpt-4o-mini"
2626
interactive_prompt: str | None = None
2727

2828

agent_video/pipecat/agent/src/services.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
from src.functions.context_docs import context_docs
1212
from src.functions.daily_create_room import daily_create_room
1313
from src.functions.daily_send_data import daily_send_data
14-
from src.functions.llm_chat import llm_chat
1514
from src.functions.llm_logic import llm_logic
1615
from src.functions.llm_talk import llm_talk
1716
from src.functions.send_agent_event import send_agent_event
@@ -25,7 +24,6 @@ async def main() -> None:
2524
agents=[AgentVideo],
2625
workflows=[RoomWorkflow, LogicWorkflow],
2726
functions=[
28-
llm_chat,
2927
llm_logic,
3028
llm_talk,
3129
context_docs,

agent_video/pipecat/agent/src/workflows/logic.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525
LlmTalkInput,
2626
Message,
2727
llm_talk,
28+
LlmTalkInput,
29+
ModelType
2830
)
2931
from src.functions.send_agent_event import (
3032
SendAgentEventInput,
@@ -38,7 +40,7 @@ class LogicWorkflowInput(BaseModel):
3840
room_url: str
3941
interactive_prompt: str | None = None
4042
reasoning_prompt: str | None = None
41-
model: Literal["gpt-4o-mini", "openpipe:twenty-lions-fall", "ft:gpt-4o-mini-2024-07-18:restack::BJymdMm8"] = "gpt-4o-mini"
43+
model: ModelType
4244

4345

4446
class LogicWorkflowOutput(BaseModel):

0 commit comments

Comments
 (0)