Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions lmfdb/api/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from lmfdb import db
from psycodict.encoding import Json
from lmfdb.utils import flash_error
from datetime import datetime
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive
from flask import (render_template, request, url_for, current_app,
abort, redirect, Response)
from lmfdb.api import api_page, api_logger
Expand Down Expand Up @@ -324,7 +323,7 @@ def apierror(msg, flash_extras=[], code=404, table=True):
# the collected result
data = {
"table": table,
"timestamp": datetime.now(UTC).isoformat(),
"timestamp": utc_now_naive().isoformat(),
"data": data,
"start": start,
"offset": offset,
Expand Down Expand Up @@ -427,7 +426,7 @@ def apierror(msg, flash_extras=[], code=404, table=False):
"labels": labels,
"tables": tables,
"label_cols": label_cols,
"timestamp": datetime.now(UTC).isoformat(),
"timestamp": utc_now_naive().isoformat(),
"data": data,
}
if format.lower() == "json":
Expand Down
1 change: 1 addition & 0 deletions lmfdb/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,6 +713,7 @@ def sitemap():
def WhiteListedRoutes():
return [
'ArtinRepresentation',
'Belyi',
'Character/Dirichlet',
'Character/calc-gauss/Dirichlet',
'Character/calc-jacobi/Dirichlet',
Expand Down
4 changes: 3 additions & 1 deletion lmfdb/classical_modular_forms/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from psycodict.encoding import Json
from lmfdb.utils import Downloader, flash_error
from lmfdb.characters.TinyConrey import ConreyCharacter
from lmfdb.classical_modular_forms.web_newform import WebNewform
from lmfdb.classical_modular_forms.web_newform import WebNewform, valid_label
from lmfdb.classical_modular_forms.web_space import WebNewformSpace, WebGamma1Space


Expand Down Expand Up @@ -148,6 +148,8 @@ def _get_traces(self, label):
qexp_function_body_sparse_cyclotomic = {'sage': header + discrete_log_sage + extend_multiplicatively_sage + field_and_convert_sage_sparse_cyclotomic + convert_aps + char_values_sage_generic + an_code_sage}

def download_qexp(self, label, lang='sage'):
if not valid_label(label):
return abort(404, "Invalid label: %s" % label)
if isinstance(lang, str):
lang = self.languages.get(lang, self.languages['sage'])
hecke_nf = self._get_hecke_nf(label)
Expand Down
5 changes: 5 additions & 0 deletions lmfdb/classical_modular_forms/test_cmf2.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ def test_download_qexp(self):
page = self.tc.get('/ModularForm/GL2/Q/holomorphic/download_qexp/{}'.format(label), follow_redirects=True)
assert 'q-expansion not available for newform {}'.format(label) in page.get_data(as_text=True)

# Test invalid labels return 404 with proper error message
for label in ['safeboating', 'invalid.label', '11.2.a', '11.2.a.a.extra']:
page = self.tc.get('/ModularForm/GL2/Q/holomorphic/download_qexp/{}'.format(label), follow_redirects=True)
assert 'Invalid label: {}'.format(label) in page.get_data(as_text=True)

def test_download(self):
r"""
Test download function
Expand Down
5 changes: 2 additions & 3 deletions lmfdb/modular_curves/upload.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@

import re
from datetime import datetime
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive
from flask import url_for
from sage.all import ZZ, QQ, lazy_attribute, NumberField
from lmfdb import db
Expand Down Expand Up @@ -233,7 +232,7 @@ def final_process(self, ids, F, by_table, cols):
else:
status = 3
comment = ""
timestamp = datetime.now(UTC).isoformat()
timestamp = utc_now_naive().isoformat()
ids = {upid: (status, timestamp, comment) for upid, data in self.delayed}

# If other columns are added later, it's important that these be sorted (see lmfdb/uploads/process.py)
Expand Down
2 changes: 1 addition & 1 deletion lmfdb/number_fields/number_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -1237,7 +1237,7 @@ def __init__(self):
knowl="nf.narrow_class_number",
example="5")
narrow_class_group = TextBox(
name="class_group",
name="narrow_class_group",
label="Narrow class group structure",
short_label='Narrow class group',
knowl="nf.narrow_class_group",
Expand Down
6 changes: 3 additions & 3 deletions lmfdb/tests/test_dynamic_knowls.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

from lmfdb.tests import LmfdbTest
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive

class DynamicKnowlTest(LmfdbTest):
"""
Expand Down Expand Up @@ -50,7 +50,7 @@ def test_prod_knowl_sync(self):
# Create a different connection to devmirror to compare timestamps
from lmfdb.utils.config import Configuration
from psycopg2.sql import SQL
from datetime import timedelta, datetime
from datetime import timedelta
dev_config = Configuration()
# Modify configuration to connect to devmirror
for D in [dev_config.default_args["postgresql"], dev_config.postgresql_options, dev_config.options["postgresql"]]:
Expand All @@ -63,7 +63,7 @@ def test_prod_knowl_sync(self):
dev_db = PostgresDatabase(dev_config)

# Updates happen every 20 minutes, so we only compare knowls older than that (plus a buffer).
cutoff = datetime.now(UTC) - timedelta(minutes=30)
cutoff = utc_now_naive() - timedelta(minutes=30)

t_query = SQL("SELECT timestamp FROM kwl_knowls WHERE timestamp < %s LIMIT 1")
dev_t = dev_db._execute(t_query, [cutoff]).fetchone()[0]
Expand Down
7 changes: 3 additions & 4 deletions lmfdb/uploads/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
import sys
import tempfile
from collections import defaultdict
from datetime import datetime
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive
here = os.path.dirname(os.path.abspath(__file__))
data_folder = os.path.join(here, "data")
upone, _ = os.path.split(here)
Expand Down Expand Up @@ -52,7 +51,7 @@ def process_all():
else:
status = 3
comment = ""
timestamp = datetime.now(UTC).isoformat()
timestamp = utc_now_naive().isoformat()
status_update[rec["section"]][rec["id"]] = (status, timestamp, comment)

# There are some sections (like gonality propagation) that want to do more
Expand All @@ -66,7 +65,7 @@ def process_all():
db.data_uploads.update_from_file(F.name, "id")
db.data_uploads.cleanup_from_reload()
os.unlink(F.name)
timestamp = datetime.now(UTC).isoformat().replace(":", "-").replace("T", "-").replace(".", "-")
timestamp = utc_now_naive().isoformat().replace(":", "-").replace("T", "-").replace(".", "-")
uploads = []
for (table, newrows), lines in by_table.items():
nr = "t" if newrows else "f"
Expand Down
5 changes: 2 additions & 3 deletions lmfdb/uploads/verify.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import os
import sys
import tempfile
from datetime import datetime
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive
here = os.path.dirname(os.path.abspath(__file__))
upone, _ = os.path.split(here)
uptwo, _ = os.path.split(upone)
Expand All @@ -35,7 +34,7 @@ def verify_all():
else:
status = 1
comment = ""
timestamp = datetime.now(UTC).isoformat()
timestamp = utc_now_naive().isoformat()
_ = F.write(f"{rec['id']}|{status}|{timestamp}|{timestamp}|{comment}\n")
F.close()
db.data_uploads.update_from_file(F.name, "id")
Expand Down
10 changes: 5 additions & 5 deletions lmfdb/users/pwdmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from psycodict.base import PostgresBase
from psycodict.encoding import Array
from psycopg2.sql import SQL, Identifier, Placeholder
from datetime import datetime, timedelta
from lmfdb.utils.datetime_utils import UTC
from datetime import timedelta
from lmfdb.utils.datetime_utils import utc_now_naive

from .main import logger

Expand Down Expand Up @@ -97,7 +97,7 @@ def new_user(self, uid, pwd=None, full_name=None, about=None, url=None):
password = self.bchash(pwd)
#TODO: use identifiers
insertor = SQL("INSERT INTO userdb.users (username, bcpassword, created, full_name, about, url) VALUES (%s, %s, %s, %s, %s, %s)")
self._execute(insertor, [uid, password, datetime.now(UTC), full_name, about, url])
self._execute(insertor, [uid, password, utc_now_naive(), full_name, about, url])
new_user = LmfdbUser(uid)
return new_user

Expand Down Expand Up @@ -198,7 +198,7 @@ def create_tokens(self, tokens):
return

insertor = SQL("INSERT INTO userdb.tokens (id, expire) VALUES %s")
now = datetime.now(UTC)
now = utc_now_naive()
tdelta = timedelta(days=1)
exp = now + tdelta
self._execute(insertor, [(t, exp) for t in tokens], values_list=True)
Expand All @@ -216,7 +216,7 @@ def delete_old_tokens(self):
logger.info("no attempt to delete old tokens, not enough privileges")
return
deletor = SQL("DELETE FROM userdb.tokens WHERE expire < %s")
now = datetime.now(UTC)
now = utc_now_naive()
tdelta = timedelta(days=8)
cutoff = now - tdelta
self._execute(deletor, [cutoff])
Expand Down
7 changes: 3 additions & 4 deletions lmfdb/utils/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
import io
import codecs
import tempfile
from datetime import datetime
from lmfdb.utils.datetime_utils import UTC
from lmfdb.utils.datetime_utils import utc_now_naive
from flask import request, flash, send_file, render_template
from flask_login import current_user
from sage.misc.lazy_attribute import lazy_attribute
Expand Down Expand Up @@ -324,7 +323,7 @@ def save(self, data):
columns = ["section", "status", "submitter", "data", "submitted", "verified", "reviewed", "processed", "updated", "version", "comment"]
types = ["text", "smallint", "text", "jsonb", "timestamp without time zone", "timestamp without time zone", "timestamp without time zone", "timestamp without time zone", "timestamp without time zone", "smallint", "text"]
_ = F.write("|".join(columns) + "\n" + "|".join(types) + "\n\n")
timestamp = datetime.now(UTC).isoformat()
timestamp = utc_now_naive().isoformat()
for rec in data:
_ = F.write(f"{self.name}|0|{current_user.id}|{copy_dumps(rec, 'jsonb')}|{timestamp}|\\N|\\N|\\N|{timestamp}|{self.version}|\n")
F.close()
Expand Down Expand Up @@ -425,7 +424,7 @@ def review(self, info, reviewer, userid):
elif new_status in [2, -2] and not all(status == 1 for status in db.data_uploads.search({"id":{"$in":ids}}, "status")):
flash_error("You must select only rows that need review")
else:
t0 = datetime.now(UTC)
t0 = utc_now_naive()
payload = {"status": new_status, "reviewed": t0, "updated": t0}
if comment:
payload["comment"] = comment
Expand Down
Loading