Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions conf/coverstore.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ db_parameters:
dbn: "postgres"
db: "coverstore"
host: db
driver: psycopg

data_root: "/var/lib/coverstore"
default_image: "static/images/empty.gif"
Expand Down
1 change: 1 addition & 0 deletions conf/infobase.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ db_parameters:
engine: postgres
database: openlibrary
host: db
driver: psycopg

account_bot: /people/AccountBot
user_root: /people/
Expand Down
4 changes: 4 additions & 0 deletions conf/openlibrary.yml
Original file line number Diff line number Diff line change
Expand Up @@ -188,3 +188,7 @@ sentry_cron_jobs:

# Observations cache settings:
observation_cache_duration: 86400

db_parameters:
driver: psycopg

41 changes: 39 additions & 2 deletions openlibrary/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,43 @@
import os
import sys

import web
import yaml

import infogami
from infogami import config
from infogami.infobase import server
from infogami.infobase import server as infobase_server


# TODO: Remove once infogami supports psycopg3 natively (#10258)
def _patch_infogami_for_psycopg3():
"""
Temporary patch: wraps infogami's parse_db_parameters to preserve
the 'driver' key, which infogami currently strips out.
"""
orig = infobase_server.parse_db_parameters
if getattr(orig, "_is_patched", False):
return

def patched(d):
try:
result = orig(d)
except KeyError as e:
# Only handle missing 'db'/'database' — let other KeyErrors propagate
if isinstance(d, dict) and "driver" in d and e.args[0] in ("db", "database"):
return d
raise

if result and isinstance(d, dict) and "driver" in d:
result["driver"] = d["driver"]
return result

patched._is_patched = True
infobase_server.parse_db_parameters = patched


_patch_infogami_for_psycopg3()


runtime_config = {}

Expand Down Expand Up @@ -41,7 +73,12 @@ def load_config(config_file):
setup_infobase_config(config_file)

# This sets web.config.db_parameters
server.update_config(config.infobase)
infobase_server.update_config(config.infobase)

# Safety net: ensure driver survives update_config
# TODO: Remove once infogami is updated (#10258)
if isinstance(web.config.get("db_parameters"), dict):
web.config.db_parameters.setdefault("driver", "psycopg")


def setup_infobase_config(config_file):
Expand Down
1 change: 0 additions & 1 deletion openlibrary/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@ def render_template(request):

from openlibrary.plugins.openlibrary import code

web.config.db_parameters = {}
code.setup_template_globals()

def render(name, *a, **kw):
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from sqlite3 import IntegrityError

import web
from psycopg2.errors import UniqueViolation
from psycopg.errors import UniqueViolation

from infogami.utils import stats

Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/edits.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from sqlite3 import IntegrityError
from types import MappingProxyType

from psycopg2.errors import UniqueViolation
from psycopg.errors import UniqueViolation

from infogami.utils.view import public
from openlibrary.core import cache
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from typing import TYPE_CHECKING, Any, Final

import web
from psycopg2.errors import UndefinedTable, UniqueViolation
from psycopg.errors import UndefinedTable, UniqueViolation
from pydantic import ValidationError
from web.db import ResultSet

Expand Down
1 change: 1 addition & 0 deletions openlibrary/coverstore/tests/test_webapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def setup_db():
system("dropdb coverstore_test")
system("createdb coverstore_test")
config.db_parameters = {
"driver": "psycopg",
"dbn": "postgres",
"db": "coverstore_test",
"user": "openlibrary",
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ lxml==4.9.4
multipart==0.2.4
Pillow==10.4.0
prometheus-fastapi-instrumentator==7.1.0
psycopg2==2.9.6
psycopg[binary]==3.3.3
pydantic==2.12.5
pymarc==5.1.0
python-dateutil==2.8.2
Expand Down
2 changes: 1 addition & 1 deletion scripts/migrations/write_prefs_to_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import argparse
from pathlib import Path

from psycopg2 import DatabaseError
from psycopg import DatabaseError

import infogami
from openlibrary.accounts import RunAs
Expand Down
4 changes: 2 additions & 2 deletions scripts/monitoring/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ log_workers_cur_fn() {
# Monitors the current function running on each gunicorn worker.
#
# Only explicitly names a few specific function to monitor:
# - connect: psycopg2; this was a bottleneck before we switched to using direct
# IPs with psycopg2
# - connect: psycopg; this was a bottleneck before we switched to using direct
# IPs with psycopg
# - sleep|wait: Normal gunicorn behavior denoting a worker not doing anything
# - getaddrinfo: Marker for DNS resolution time; saw this occasionally in Sentry's
# profiling for solr requests
Expand Down
7 changes: 3 additions & 4 deletions scripts/solr_builder/solr_builder/solr_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from typing import Any, Literal, Self

import aiofiles
import psycopg2
import psycopg

from openlibrary.core.bookshelves import Bookshelves
from openlibrary.core.ratings import Ratings, WorkRatingsSummary
Expand Down Expand Up @@ -64,7 +64,7 @@ def __init__(self, db_conf_file: str):
"""
super().__init__()
self._db_conf = config_section_to_dict(db_conf_file, "postgres")
self._conn: psycopg2._psycopg.connection = None
self._conn: psycopg.Connection = None
self.cache: dict = {}
self.cached_work_editions_ranges: list = []
self.cached_work_ratings: dict[str, WorkRatingsSummary] = {}
Expand All @@ -74,7 +74,7 @@ def __enter__(self) -> Self:
"""
:rtype: LocalPostgresDataProvider
"""
self._conn = psycopg2.connect(**self._db_conf)
self._conn = psycopg.connect(**self._db_conf)
return self

def __exit__(self, type, value, traceback):
Expand Down Expand Up @@ -129,7 +129,6 @@ def query_batched(
cursor_name or 'solr_builder_server_side_cursor_' + uuid.uuid4().hex
)
cur = self._conn.cursor(name=cursor_name)
cur.itersize = size
cur.execute(query)

while True:
Expand Down
Loading