Skip to content
This repository was archived by the owner on Jul 22, 2024. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions admin/pageaudit/pageaudit/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@
SECRET_KEY = 'qhk(v0g!4#(+_$$36hyks$nx!wkq$g&8qfgb92)92e)jkm1g%a'

# SECURITY WARNING: don't run with debug turned on in production!
# To test Django in "debug = false" mode, but using Django to server static files as in dev,
# To test Django in "debug = false" mode, but using Django to server static files as in dev,
# run this locally: `manage.py runserver --insecure`
DEBUG = os.getenv('DJANGO_DEBUG_FLAG', False)

ALLOWED_HOSTS = ['127.0.0.1', 'localhost']

if os.getenv('DJANGO_ALLOWED_HOST'):
if os.getenv('DJANGO_ALLOWED_HOST'):
ALLOWED_HOSTS.append(os.getenv('DJANGO_ALLOWED_HOST'))

INTERNAL_IPS = ['127.0.0.1',]

DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
Expand Down Expand Up @@ -64,6 +64,7 @@
'report',
'dbbackup',
'compressor',
'django_crontab',
]

MIDDLEWARE = [
Expand Down Expand Up @@ -118,7 +119,7 @@
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.getenv('DJANGO_DB_NAME', 'perf_lab'),
'USER': os.getenv('DJANGO_DB_USER', 'perf_lab'),
'PASSWORD': os.getenv('DJANGO_DB_PASSWORD', ''),
Expand Down Expand Up @@ -190,11 +191,11 @@


## Compressor module settings.
## Compressor is default set to OPPOSITE of DEBUG.
## Compressor is default set to OPPOSITE of DEBUG.
## To force compressor locally during debug, add "COMPRESS_ENABLED = True" var to your settings_local.py
COMPRESS_ROOT = os.path.join(BASE_DIR, "static/")
COMPRESS_CSS_FILTERS = [
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.rCSSMinFilter'
]

Expand All @@ -208,11 +209,13 @@
'django.contrib.auth.backends.ModelBackend',
]

CRONJOBS = [
('10 03 * * *', 'report.cron.daily_report')
]

## Local settings override for ease, instead of/in addition to using ENV vars.
## Create a settings_local.py and override any vars above, even if they were set in your ENV.
try:
from .settings_local import *
except ImportError:
pass

9 changes: 9 additions & 0 deletions admin/pageaudit/report/cron.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from report.daily import report_yesterday_data

def daily_report():
"""
Kick off the daily report
"""
DAILY_DUMP_PATH = '/data/reports/pagelab/daily'

report_yesterday_data(output_path=DAILY_DUMP_PATH)
99 changes: 99 additions & 0 deletions admin/pageaudit/report/daily.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
import csv
import datetime
import time

TODAY = datetime.datetime.today()
YESTERDAY = TODAY - datetime.timedelta(1)

DAILY_DUMP_PATH = '/data/reports/pagelab/daily'

field_names=['url', 'url2', 'views', 'hist', 'sequence',]

import django

from pageaudit.settings import *

from django.contrib.auth.models import User
from django.db import transaction
from django.db.models import Avg, Max, Min, Q, Sum

from report.models import (
Url,
LighthouseRun,
LighthouseDataRaw,
LighthouseDataUsertiming,
UserTimingMeasureName,
UserTimingMeasure,
UserTimingMeasureAverage
)

superuser = User.objects.get(id=1)

def report_yesterday_data(output_path='/tmp'):
"""
Get the data generated yesterday
"""

include_user_timing = True
timestr = time.strftime("%Y-%m-%d-%H%M%S")
path = "%s/%s.csv" % (output_path, timestr,)
file = open(path, 'w')
writer = csv.writer(file)

with file:
writer.writerow([
"test_id",
"url_id",
"created_date",
"url",
"performance_score",
"total_byte_weight",
"number_network_requests",
"time_to_first_byte",
"first_contentful_paint",
"first_meaningful_paint",
"dom_content_loaded",
"dom_loaded",
"interactive",
"masthead_onscreen",
"redirect_hops",
"redirect_wasted_ms",
"sequence",
"user_timing_data"
])

run_data = LighthouseRun.objects.filter(
created_date__date=YESTERDAY.date()
)

for run in run_data:
if include_user_timing:
try:
user_timing_data = str(LighthouseDataUsertiming.objects.get(lighthouse_run=run).report_data)
except Exception as ex:
user_timing_data = ''
else:
user_timing_data = ''
try:
writer.writerow([
run.id,
run.url.id,
run.created_date,
run.url.url,
run.performance_score,
run.total_byte_weight,
run.number_network_requests,
run.time_to_first_byte,
run.first_contentful_paint,
run.first_meaningful_paint,
run.dom_content_loaded,
run.dom_loaded,
run.interactive,
run.masthead_onscreen,
run.redirect_hops,
run.redirect_wasted_ms,
run.url.id,
user_timing_data
])
except Exception as ex:
print(ex)
98 changes: 94 additions & 4 deletions admin/pageaudit/report/import_csv.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,106 @@
#!/usr/bin/env python

import csv
import datetime
import time

TODAY = datetime.datetime.today()
YESTERDAY = TODAY - datetime.timedelta(1)

DAILY_DUMP_PATH = '/data/reports/pagelab/daily'

field_names=['url', 'url2', 'views', 'hist', 'sequence',]

import django

from pageaudit.settings import *

time.sleep(10)

from django.contrib.auth.models import User
from django.db import transaction
from django.db.models import Avg, Max, Min, Q, Sum
from django.utils import timezone

from report.models import (Url, LighthouseRun, LighthouseDataRaw, LighthouseDataUsertiming,
UserTimingMeasureName, UserTimingMeasure, UserTimingMeasureAverage)

from report.models import (
Url,
LighthouseRun,
LighthouseDataRaw,
LighthouseDataUsertiming,
UserTimingMeasureName,
UserTimingMeasure,
UserTimingMeasureAverage
)

superuser = User.objects.get(id=1)

def report_yesterday_data():
"""
Get the data generated yesterday
"""
include_user_timing = True
timestr = time.strftime("%Y-%m-%d-%H%M%S")
path = "%s/%s.csv" % (DAILY_DUMP_PATH, timestr,)
file = open(path, 'w')
writer = csv.writer(file)

with file:
writer.writerow([
"test_id",
"url_id",
"created_date",
"url",
"performance_score",
"total_byte_weight",
"number_network_requests",
"time_to_first_byte",
"first_contentful_paint",
"first_meaningful_paint",
"dom_content_loaded",
"dom_loaded",
"interactive",
"masthead_onscreen",
"redirect_hops",
"redirect_wasted_ms",
"sequence",
"user_timing_data"
])

run_data = LighthouseRun.objects.filter(
created_date__date=YESTERDAY.date()
)

for run in run_data:
if include_user_timing:
try:
user_timing_data = str(LighthouseDataUsertiming.objects.get(lighthouse_run=run).report_data)
except Exception as ex:
user_timing_data = ''
else:
user_timing_data = ''
try:
writer.writerow([
run.id,
run.url.id,
run.created_date,
run.url.url,
run.performance_score,
run.total_byte_weight,
run.number_network_requests,
run.time_to_first_byte,
run.first_contentful_paint,
run.first_meaningful_paint,
run.dom_content_loaded,
run.dom_loaded,
run.interactive,
run.masthead_onscreen,
run.redirect_hops,
run.redirect_wasted_ms,
run.url.id,
user_timing_data
])
except Exception as ex:
print(ex)


def load_urls_into_db(path):
f = open(path, 'r')
Expand Down Expand Up @@ -118,3 +205,6 @@ def update_urls(path):
except Exception as ex:
print(ex)


if __name__ == '__main__':
report_yesterday_data()
1 change: 1 addition & 0 deletions admin/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ ipdb
django-inline-static
urllib3
django_compressor
django_crontab