Skip to content

Commit b32311e

Browse files
committed
Ruff and black
1 parent 687227a commit b32311e

File tree

6 files changed

+26
-19
lines changed

6 files changed

+26
-19
lines changed

.github/workflows/python-code-quality.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,6 @@ jobs:
2020
python -m pip install --upgrade pip
2121
pip install -r requirements-dev.txt
2222
- name: Lint with ruff
23-
run: ruff .
23+
run: ruff check .
2424
- name: Check formatting with black
2525
run: black . --check --verbose

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
[tool.ruff]
22
line-length = 120
33
target-version = "py311"
4+
5+
[tool.ruff.lint]
46
select = ["E", "F", "I", "UP"]
57
ignore = ["D203"]
6-
show-source = true
78

89
[tool.ruff.lint.isort]
910
known-first-party = ["fastapi_app"]

src/fastapi_app/query_rewriter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def build_search_function() -> list[ChatCompletionToolParam]:
2626
"properties": {
2727
"comparison_operator": {
2828
"type": "string",
29-
"description": "Operator to compare the column value, either '>', '<', '>=', '<=', '=='",
29+
"description": "Operator to compare the column value, either '>', '<', '>=', '<=', '=='", # noqa
3030
},
3131
"value": {
3232
"type": "number",

src/fastapi_app/rag_advanced.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818

1919
class AdvancedRAGChat:
20-
2120
def __init__(
2221
self,
2322
*,
@@ -26,7 +25,8 @@ def __init__(
2625
chat_model: str,
2726
chat_deployment: str | None, # Not needed for non-Azure OpenAI
2827
openai_embed_client: AsyncOpenAI,
29-
embed_deployment: str | None, # Not needed for non-Azure OpenAI or for retrieval_mode="text"
28+
embed_deployment: str
29+
| None, # Not needed for non-Azure OpenAI or for retrieval_mode="text"
3030
embed_model: str,
3131
embed_dimensions: int,
3232
):
@@ -46,7 +46,6 @@ def __init__(
4646
async def run(
4747
self, messages: list[dict], overrides: dict[str, Any] = {}
4848
) -> dict[str, Any] | AsyncGenerator[dict[str, Any], None]:
49-
5049
text_search = overrides.get("retrieval_mode") in ["text", "hybrid", None]
5150
vector_search = overrides.get("retrieval_mode") in ["vectors", "hybrid", None]
5251
top = overrides.get("top", 3)
@@ -61,7 +60,8 @@ async def run(
6160
system_prompt=self.query_prompt_template,
6261
new_user_content=original_user_query,
6362
past_messages=past_messages,
64-
max_tokens=self.chat_token_limit - query_response_token_limit, # TODO: count functions
63+
max_tokens=self.chat_token_limit
64+
- query_response_token_limit, # TODO: count functions
6565
fallback_to_default=True,
6666
)
6767

@@ -70,7 +70,7 @@ async def run(
7070
# Azure OpenAI takes the deployment name as the model name
7171
model=self.chat_deployment if self.chat_deployment else self.chat_model,
7272
temperature=0.0, # Minimize creativity for search query generation
73-
max_tokens=query_response_token_limit, # Setting too low risks malformed JSON, setting too high may affect performance
73+
max_tokens=query_response_token_limit, # Setting too low risks malformed JSON, too high risks performance
7474
n=1,
7575
tools=build_search_function(),
7676
tool_choice="auto",
@@ -93,14 +93,17 @@ async def run(
9393

9494
results = await self.searcher.search(query_text, vector, top, filters)
9595

96-
sources_content = [f"[{(item.id)}]:{item.to_str_for_rag()}\n\n" for item in results]
96+
sources_content = [
97+
f"[{(item.id)}]:{item.to_str_for_rag()}\n\n" for item in results
98+
]
9799
content = "\n".join(sources_content)
98100

99101
# Generate a contextual and content specific answer using the search results and chat history
100102
response_token_limit = 1024
101103
messages = build_messages(
102104
model=self.chat_model,
103-
system_prompt=overrides.get("prompt_template") or self.answer_prompt_template,
105+
system_prompt=overrides.get("prompt_template")
106+
or self.answer_prompt_template,
104107
new_user_content=original_user_query + "\n\nSources:\n" + content,
105108
past_messages=past_messages,
106109
max_tokens=self.chat_token_limit - response_token_limit,

src/fastapi_app/setup_postgres_seeddata.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,33 +9,38 @@
99
from sqlalchemy import select, text
1010
from sqlalchemy.ext.asyncio import async_sessionmaker
1111

12-
from fastapi_app.postgres_engine import create_postgres_engine_from_args, create_postgres_engine_from_env
12+
from fastapi_app.postgres_engine import (
13+
create_postgres_engine_from_args,
14+
create_postgres_engine_from_env,
15+
)
1316
from fastapi_app.postgres_models import Item
1417

1518
logger = logging.getLogger("ragapp")
1619

1720

1821
async def seed_data(engine):
19-
2022
# Check if Item table exists
2123
async with engine.begin() as conn:
2224
result = await conn.execute(
2325
text(
24-
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'items')"
26+
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'items')" # noqa
2527
)
2628
)
2729
if not result.scalar():
28-
logger.error("Items table does not exist. Please run the database setup script first.")
30+
logger.error(
31+
"Items table does not exist. Please run the database setup script first."
32+
)
2933
return
3034

3135
async with async_sessionmaker(engine, expire_on_commit=False)() as session:
32-
3336
# Insert the items from the JSON file into the database
3437
current_dir = os.path.dirname(os.path.realpath(__file__))
3538
with open(os.path.join(current_dir, "seed_data.json")) as f:
3639
catalog_items = json.load(f)
3740
for catalog_item in catalog_items:
38-
item = await session.execute(select(Item).filter(Item.id == catalog_item["Id"]))
41+
item = await session.execute(
42+
select(Item).filter(Item.id == catalog_item["Id"])
43+
)
3944
if item.scalars().first():
4045
continue
4146
item = Item(
@@ -57,7 +62,6 @@ async def seed_data(engine):
5762

5863

5964
async def main():
60-
6165
parser = argparse.ArgumentParser(description="Create database schema")
6266
parser.add_argument("--host", type=str, help="Postgres host")
6367
parser.add_argument("--username", type=str, help="Postgres username")
@@ -78,7 +82,6 @@ async def main():
7882

7983

8084
if __name__ == "__main__":
81-
8285
logging.basicConfig(level=logging.WARNING)
8386
logger.setLevel(logging.INFO)
8487
load_dotenv(override=True)

src/gunicorn.conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@
88

99
worker_class = "uvicorn.workers.UvicornWorker"
1010

11-
timeout = 600
11+
timeout = 600

0 commit comments

Comments
 (0)