Skip to content

Commit e6d47ef

Browse files
committed
Reformat
1 parent 82f44a3 commit e6d47ef

File tree

2 files changed

+9
-29
lines changed

2 files changed

+9
-29
lines changed

servers/fai/alembic/versions/create_websites_table.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,7 @@ def upgrade() -> None:
4343
)
4444
op.create_index("idx_websites_domain", "websites", ["domain"], unique=False)
4545
op.create_index("idx_websites_base_url", "websites", ["base_url"], unique=False)
46-
op.create_index(
47-
"idx_websites_domain_base_url", "websites", ["domain", "base_url"], unique=False
48-
)
46+
op.create_index("idx_websites_domain_base_url", "websites", ["domain", "base_url"], unique=False)
4947
# ### end Alembic commands ###
5048

5149

servers/fai/src/fai/routes/website.py

Lines changed: 8 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,7 @@ async def index_website(
112112
asyncio.create_task(job_manager.execute_job(job_id, _crawl_website_job, index_source.id, domain, body, db))
113113

114114
LOGGER.info(f"Started website crawl job {job_id} for domain: {domain}, base_url: {body.base_url}")
115-
return JSONResponse(
116-
jsonable_encoder(IndexWebsiteResponse(job_id=job_id, base_url=body.base_url))
117-
)
115+
return JSONResponse(jsonable_encoder(IndexWebsiteResponse(job_id=job_id, base_url=body.base_url)))
118116

119117
except Exception as e:
120118
LOGGER.exception("Failed to start website crawl")
@@ -260,11 +258,7 @@ async def get_website_by_id(
260258
Get a single indexed website page by ID.
261259
"""
262260
try:
263-
website = await db.execute(
264-
select(WebsiteDb).where(
265-
WebsiteDb.id == website_id, WebsiteDb.domain == domain
266-
)
267-
)
261+
website = await db.execute(select(WebsiteDb).where(WebsiteDb.id == website_id, WebsiteDb.domain == domain))
268262
website = website.scalar_one_or_none()
269263

270264
if not website:
@@ -301,9 +295,7 @@ async def get_websites(
301295

302296
offset = (page - 1) * limit
303297

304-
total_count = await db.scalar(
305-
select(func.count()).select_from(WebsiteDb).where(WebsiteDb.domain == domain)
306-
)
298+
total_count = await db.scalar(select(func.count()).select_from(WebsiteDb).where(WebsiteDb.domain == domain))
307299

308300
stmt = select(WebsiteDb).where(WebsiteDb.domain == domain).offset(offset).limit(limit)
309301
result = await db.execute(stmt)
@@ -348,9 +340,7 @@ async def reindex_website(
348340
try:
349341
# Delete all existing pages from this website
350342
websites = await db.execute(
351-
select(WebsiteDb).where(
352-
WebsiteDb.domain == domain, WebsiteDb.base_url == body.base_url
353-
)
343+
select(WebsiteDb).where(WebsiteDb.domain == domain, WebsiteDb.base_url == body.base_url)
354344
)
355345
websites = websites.scalars().all()
356346

@@ -409,9 +399,7 @@ async def reindex_website(
409399
)
410400

411401
LOGGER.info(f"Started website re-crawl job {job_id} for domain: {domain}, base_url: {body.base_url}")
412-
return JSONResponse(
413-
jsonable_encoder(ReindexWebsiteResponse(job_id=job_id, base_url=body.base_url))
414-
)
402+
return JSONResponse(jsonable_encoder(ReindexWebsiteResponse(job_id=job_id, base_url=body.base_url)))
415403

416404
except Exception as e:
417405
LOGGER.exception("Failed to reindex website")
@@ -435,9 +423,7 @@ async def delete_website(
435423
"""
436424
try:
437425
websites = await db.execute(
438-
select(WebsiteDb).where(
439-
WebsiteDb.domain == domain, WebsiteDb.base_url == body.base_url
440-
)
426+
select(WebsiteDb).where(WebsiteDb.domain == domain, WebsiteDb.base_url == body.base_url)
441427
)
442428
websites = websites.scalars().all()
443429

@@ -453,9 +439,7 @@ async def delete_website(
453439
await sync_index_to_target(domain, get_website_index_name(), get_query_index_name())
454440

455441
LOGGER.info(f"Deleted {pages_deleted} pages from {body.base_url} for domain: {domain}")
456-
return JSONResponse(
457-
jsonable_encoder(DeleteWebsiteResponse(success=True, pages_deleted=pages_deleted))
458-
)
442+
return JSONResponse(jsonable_encoder(DeleteWebsiteResponse(success=True, pages_deleted=pages_deleted)))
459443

460444
except Exception as e:
461445
LOGGER.exception("Failed to delete website")
@@ -492,9 +476,7 @@ async def delete_all_websites(
492476
await sync_index_to_target(domain, get_website_index_name(), get_query_index_name())
493477

494478
LOGGER.info(f"Deleted all {pages_deleted} website pages for domain: {domain}")
495-
return JSONResponse(
496-
jsonable_encoder(DeleteAllWebsitesResponse(success=True, pages_deleted=pages_deleted))
497-
)
479+
return JSONResponse(jsonable_encoder(DeleteAllWebsitesResponse(success=True, pages_deleted=pages_deleted)))
498480

499481
except Exception as e:
500482
LOGGER.exception("Failed to delete all websites")

0 commit comments

Comments
 (0)