|
1 |
| -from app.api.helpers.errors import UnprocessableEntityError |
| 1 | +import csv |
| 2 | +import os |
| 3 | +import uuid |
| 4 | +from datetime import datetime, timedelta |
| 5 | + |
| 6 | +from flask import current_app |
| 7 | + |
| 8 | +from app.api.helpers.errors import BadRequestError, UnprocessableEntityError |
2 | 9 | from app.api.helpers.static import STATION_TYPE
|
| 10 | +from app.api.helpers.storage import UPLOAD_PATHS, UploadedFile, generate_hash, upload |
3 | 11 | from app.models.session import Session
|
4 | 12 | from app.models.station import Station
|
5 | 13 | from app.models.user_check_in import UserCheckIn
|
6 | 14 |
|
7 | 15 |
|
| 16 | +def export_csv(data): |
| 17 | + if data is None: |
| 18 | + raise BadRequestError({'source': data}, 'Bad Request Error') |
| 19 | + |
| 20 | + query_ = UserCheckIn.query.join(Station) |
| 21 | + if 'session_id' in data and data['session_id']: |
| 22 | + query_ = query_.filter(UserCheckIn.session_id == data['session_id']) |
| 23 | + if 'date' in data and data['date']: |
| 24 | + dt = datetime.strptime(data['date'], '%Y-%m-%d') |
| 25 | + start = dt - timedelta(0) |
| 26 | + end = start + timedelta(days=1) |
| 27 | + query_ = query_.filter(UserCheckIn.created_at >= start).filter( |
| 28 | + UserCheckIn.created_at < end |
| 29 | + ) |
| 30 | + if 'track_type' in data and data['track_type']: |
| 31 | + query_ = query_.filter(UserCheckIn.track_name == data['track_type']) |
| 32 | + if 'type' in data and data['type']: |
| 33 | + if '&' in data['type']: |
| 34 | + query_ = query_.filter( |
| 35 | + Station.station_type.in_(x.strip() for x in data['type'].split('&')) |
| 36 | + ) |
| 37 | + else: |
| 38 | + query_ = query_.filter(Station.station_type == data['type']) |
| 39 | + userCheckIns = query_.order_by(UserCheckIn.created_at.desc()).all() |
| 40 | + |
| 41 | + try: |
| 42 | + filedir = os.path.join(current_app.config.get('BASE_DIR'), 'static/uploads/temp/') |
| 43 | + if not os.path.isdir(filedir): |
| 44 | + os.makedirs(filedir) |
| 45 | + |
| 46 | + identifier = uuid.uuid1().hex |
| 47 | + filename = f"user-check-in-{identifier}.csv" |
| 48 | + file_path = os.path.join(filedir, filename) |
| 49 | + |
| 50 | + with open(file_path, "w") as temp_file: |
| 51 | + writer = csv.writer(temp_file) |
| 52 | + content = create_file_csv(userCheckIns) |
| 53 | + for row in content: |
| 54 | + writer.writerow(row) |
| 55 | + csv_file = UploadedFile(file_path=file_path, filename=filename) |
| 56 | + uploadPath = UPLOAD_PATHS['exports-temp']['csv'].format( |
| 57 | + event_id='admin', identifier=identifier |
| 58 | + ) |
| 59 | + upload(csv_file, uploadPath) |
| 60 | + os.remove(filedir + filename) |
| 61 | + return f'static/media/{uploadPath}/{generate_hash(uploadPath)}/{filename}' |
| 62 | + except Exception as e: |
| 63 | + raise BadRequestError({'source': e}, 'Bad Request Error') |
| 64 | + |
| 65 | + |
| 66 | +def create_file_csv(userCheckIns): |
| 67 | + headers = [ |
| 68 | + 'Ticket Id', |
| 69 | + 'Date Time', |
| 70 | + 'Track Name', |
| 71 | + 'Session Name', |
| 72 | + 'Speaker Name', |
| 73 | + 'Type', |
| 74 | + ] |
| 75 | + |
| 76 | + columns = [ |
| 77 | + 'ticket_holder_id', |
| 78 | + 'created_at', |
| 79 | + 'track_name', |
| 80 | + 'session_name', |
| 81 | + 'speaker_name', |
| 82 | + 'type', |
| 83 | + ] |
| 84 | + rows = [headers] |
| 85 | + for userCheckIn in userCheckIns: |
| 86 | + data = [] |
| 87 | + for column in columns: |
| 88 | + if column == 'type': |
| 89 | + data.append(userCheckIn.station.station_type) |
| 90 | + continue |
| 91 | + if column == 'created_at': |
| 92 | + data.append(userCheckIn.created_at.strftime('%Y-%m-%d %H:%M:%S')) |
| 93 | + continue |
| 94 | + data.append(getattr(userCheckIn, column)) |
| 95 | + rows.append(data) |
| 96 | + |
| 97 | + return rows |
| 98 | + |
| 99 | + |
8 | 100 | def validate_microlocation(station: Station, session: Session):
|
9 | 101 | """
|
10 | 102 | validate if microlocation matches
|
|
0 commit comments