Skip to content

Commit

Permalink
quality: Code lint
Browse files Browse the repository at this point in the history
  • Loading branch information
clemlesne committed Aug 17, 2024
1 parent 4206772 commit 5927f3a
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 13 deletions.
22 changes: 12 additions & 10 deletions app/helpers/persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,6 @@
from azure.core.exceptions import ResourceExistsError
from azure.search.documents.aio import SearchClient
from azure.search.documents.indexes.aio import SearchIndexClient
from azure.storage.blob.aio import BlobServiceClient, ContainerClient
from azure.storage.queue.aio import QueueClient, QueueServiceClient
from openai import AsyncAzureOpenAI
from azure.search.documents.indexes.models import (
AzureOpenAIParameters,
AzureOpenAIVectorizer,
Expand All @@ -21,6 +18,9 @@
VectorSearch,
VectorSearchProfile,
)
from azure.storage.blob.aio import BlobServiceClient, ContainerClient
from azure.storage.queue.aio import QueueClient, QueueServiceClient
from openai import AsyncAzureOpenAI

from app.helpers.logging import logger

Expand Down Expand Up @@ -105,9 +105,9 @@ async def search_client(
deployment_id=azure_openai_embedding_deployment,
model_name=azure_openai_embedding_model,
resource_uri=azure_openai_endpoint,
)
),
)
]
],
)

# Create index if it does not exist
Expand All @@ -122,11 +122,13 @@ async def search_client(
credential=AzureKeyCredential(api_key),
) as client:
try:
await client.create_index(SearchIndex(
fields=fields,
name=index,
vector_search=vector_search,
))
await client.create_index(
SearchIndex(
fields=fields,
name=index,
vector_search=vector_search,
)
)
logger.info('Created Search "%s"', index)
except ResourceExistsError:
pass
Expand Down
7 changes: 6 additions & 1 deletion app/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,12 @@
queue_client,
search_client,
)
from app.helpers.resources import index_queue_name, hash_url, scrape_container_name, index_index_name
from app.helpers.resources import (
hash_url,
index_index_name,
index_queue_name,
scrape_container_name,
)
from app.helpers.threading import run_workers
from app.models.indexed import IndexedIngestModel
from app.models.scraped import ScrapedUrlModel
Expand Down
9 changes: 7 additions & 2 deletions app/scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
from app.helpers.logging import logger
from app.helpers.persistence import blob_client, queue_client
from app.helpers.resources import (
index_queue_name,
hash_url,
index_queue_name,
resources_dir,
scrape_container_name,
scrape_queue_name,
Expand Down Expand Up @@ -531,7 +531,11 @@ async def _wrapper(
)

# Store content size
size_bytes = int(content_length) if (content_length := res.headers.get("content-length")) else 0
size_bytes = (
int(content_length)
if (content_length := res.headers.get("content-length"))
else 0
)
size_callback(size_bytes)

# Continue the request
Expand Down Expand Up @@ -570,6 +574,7 @@ def _generic_error(
)

total_size_bytes = 0

def _size_callback(size_bytes: int) -> None:
nonlocal total_size_bytes
total_size_bytes += size_bytes
Expand Down

0 comments on commit 5927f3a

Please sign in to comment.