Skip to content
Merged

Develop #4079

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
cc1791d
Improve exports to be more faster and added some tests to cover the e…
smihalache06 Sep 29, 2025
a192dfd
Fix sort by country on locations admin
smihalache06 Sep 30, 2025
638bfef
Add sort by profile status
smihalache06 Sep 30, 2025
39d0272
Fix tests, delete unnecesary test (#4068)
smihalache06 Oct 2, 2025
0e8c498
Merge branch 'develop' into 278287_improve_lmsm_admin_panel_exports
smihalache06 Oct 2, 2025
3560089
Merge pull request #4063 from unicef/278287_improve_lmsm_admin_panel_…
smihalache06 Oct 2, 2025
b1070a7
[LMSM] Refactor Export and Improves on EXTERNAL Views (#4071)
smihalache06 Oct 3, 2025
6413913
Overwrite import materials for specific ones
smihalache06 Oct 3, 2025
d2163ee
Merge branch 'develop' into lmsm_import_materials_fix
smihalache06 Oct 3, 2025
b04eb98
Fix tests
smihalache06 Oct 3, 2025
e4d2edd
Copy all the data from the old ItemTransferHistory model to the new o…
smihalache06 Oct 6, 2025
1d5a853
PMP: allow removing of all HR reports
emaciupe Oct 6, 2025
9adc688
Merge pull request #4075 from unicef/274658-reporting-periods-changes
emaciupe Oct 6, 2025
e36003b
Integrate SSO Zendesk auth
smihalache06 Oct 7, 2025
d35755a
Fix isort
smihalache06 Oct 7, 2025
641e8c1
Merge branch 'develop' into lmsm_import_materials_fix
emaciupe Oct 8, 2025
2ac4936
Merge pull request #4077 from unicef/278970_zendesk_integration
emaciupe Oct 8, 2025
c6c3380
Merge branch 'develop' into lmsm_fix_item_transfer_history_copy_data
emaciupe Oct 8, 2025
7a5ece6
Merge branch 'develop' into lmsm_import_materials_fix
emaciupe Oct 8, 2025
44fe5a5
Merge pull request #4074 from unicef/lmsm_fix_item_transfer_history_c…
emaciupe Oct 8, 2025
79570f2
Merge branch 'develop' into lmsm_import_materials_fix
emaciupe Oct 8, 2025
caefee3
Merge pull request #4072 from unicef/lmsm_import_materials_fix
emaciupe Oct 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 109 additions & 0 deletions src/etools/applications/core/zendesk_sso.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import hashlib
import logging
import time
from urllib.parse import urlencode

from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, JsonResponse
from django.views.decorators.http import require_http_methods

import jwt

logger = logging.getLogger(__name__)


@login_required
@require_http_methods(["GET"])
def zendesk_sso_redirect(request):
try:
user = request.user

zendesk_subdomain = getattr(settings, 'ZENDESK_SUBDOMAIN', None)
shared_secret = getattr(settings, 'ZENDESK_SHARED_SECRET', None)

if not zendesk_subdomain or not shared_secret:
logger.error("Zendesk SSO configuration missing")
return JsonResponse({
'error': 'Zendesk SSO is not properly configured'
}, status=500)

return_to = request.GET.get('return_to', '')
request_from_app = request.GET.get('request_from_app', '')

iat = int(time.time())
payload = {
'jti': hashlib.md5(f"{user.email}{iat}".encode()).hexdigest(),
'iat': iat,
'email': user.email.lower(),
'name': user.get_full_name() or user.username,
'external_id': str(user.pk),
'update': True,
}

tags = []
if hasattr(user, 'profile'):
if user.profile.organization:
tags.append(f"org_{user.profile.organization.name.replace(' ', '_').lower()}")
payload['organization'] = user.profile.organization.name
if user.profile.country:
tags.append(f"country_{user.profile.country.name.replace(' ', '_').lower()}")
payload['user_fields'] = {
'country': user.profile.country.name,
'workspace': user.profile.country.name
}

tags.append('end_user')
payload['role'] = 'end-user'
if request_from_app:
tags.append(request_from_app)

if tags:
payload['tags'] = tags

token = jwt.encode(payload, shared_secret, algorithm='HS256')

zendesk_sso_url = f"https://{zendesk_subdomain}.zendesk.com/access/jwt"

params = {'jwt': token}
if return_to:
params['return_to'] = return_to

redirect_url = f"{zendesk_sso_url}?{urlencode(params)}"
return HttpResponseRedirect(redirect_url)

except Exception as e:
logger.exception(f"Error during Zendesk SSO: {str(e)}")
return JsonResponse({
'error': 'An error occurred during SSO authentication'
}, status=500)


@login_required
@require_http_methods(["GET"])
def zendesk_sso_info(request):
try:
zendesk_subdomain = getattr(settings, 'ZENDESK_SUBDOMAIN', None)
shared_secret = getattr(settings, 'ZENDESK_SHARED_SECRET', None)
is_configured = bool(zendesk_subdomain and shared_secret)
response_data = {
'sso_enabled': is_configured,
'user': {
'email': request.user.email,
'name': request.user.get_full_name() or request.user.username,
}
}
if is_configured:
response_data['zendesk_subdomain'] = zendesk_subdomain
response_data['knowledge_base_url'] = (
f"https://{zendesk_subdomain}.zendesk.com/hc/en-us/categories/"
"31285460572180-Last-Mile-Supply-Monitoring-Module"
)

return JsonResponse(response_data)

except Exception as e:
logger.exception(f"Error getting Zendesk SSO info: {str(e)}")
return JsonResponse({
'error': 'An error occurred while fetching SSO information'
}, status=500)
167 changes: 156 additions & 11 deletions src/etools/applications/last_mile/admin_panel/csv_exporter.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,159 @@
import tablib
import csv
from io import StringIO
from typing import Any, Dict, Iterator, List


class CsvExporter:
class BaseCSVExporter:

def export(self, rows):
dataset = tablib.Dataset()
if not rows:
return dataset
headers = list(rows[0].keys())
dataset.headers = headers
for row in rows:
dataset.append([row.get(h) for h in headers])
return dataset
DEFAULT_CHUNK_SIZE = 100

def __init__(self, chunk_size: int = DEFAULT_CHUNK_SIZE):
self.chunk_size = chunk_size

def _write_csv_row(self, row_data: List[Any]) -> str:
output = StringIO()
writer = csv.writer(output)
writer.writerow(row_data)
output.seek(0)
return output.read()

def _serialize_item(self, item: Any, serializer_class: Any) -> Dict[str, Any]:
return serializer_class(item).data

def _extract_values(self, data: Dict[str, Any], headers: List[str]) -> List[Any]:
return [data.get(header, '') for header in headers]

def _get_first_item(self, queryset):
for item in queryset[:1]:
return item
return None


class UsersCSVExporter(BaseCSVExporter):

def generate_csv_data(self, queryset, serializer_class) -> Iterator[str]:
headers = {
"first_name": "First Name",
"last_name": "Last Name",
"email": "Email",
"implementing_partner": "Implementing Partner",
"country": "Country",
"is_active": "Active",
"last_login": "Last Login",
"status": "Status"
}

yield self._write_csv_row(headers.values())

for user in queryset.iterator(chunk_size=self.chunk_size):
serialized_data = self._serialize_item(user, serializer_class)
row_values = self._extract_values(serialized_data, headers.keys())
yield self._write_csv_row(row_values)


class LocationsCSVExporter(BaseCSVExporter):

def generate_csv_data(self, queryset, serializer_class, only_locations=False) -> Iterator[str]:
standard_headers = {
"id": "Unique ID",
"name": "Name",
"primary_type": "Primary Type",
"p_code": "P Code",
"lat": "Latitude",
"lng": "Longitude",
"status": "Status",
"implementing_partner": "Implementing Partner",
"region": "Region",
"district": "District",
"country": "Country"
}

expanded_headers = {
"id": "Unique ID",
"name": "Name",
"primary_type": "Primary Type",
"p_code": "P Code",
"lat": "Latitude",
"lng": "Longitude",
"status": "Status",
"implementing_partner": "Implementing Partner",
"region": "Region",
"district": "District",
"country": "Country",
"transfer_name": "Transfer Name",
"transfer_ref": "Transfer Reference",
"item_id": "Item ID",
"item_name": "Item Name",
"item_qty": "Item Quantity"
}

first_item = self._get_first_item(queryset)
if not first_item:
return

serializer = serializer_class(first_item)
has_row_expansion = hasattr(serializer, 'generate_rows')

if only_locations and has_row_expansion:
has_row_expansion = False

if has_row_expansion:
yield self._write_csv_row(expanded_headers.values())

first_rows = serializer.generate_rows(first_item)
for row_data in first_rows:
row_values = self._extract_values(row_data, expanded_headers.keys())
yield self._write_csv_row(row_values)

for item in queryset[1:].iterator(chunk_size=self.chunk_size):
item_serializer = serializer_class(item)
rows = item_serializer.generate_rows(item)
for row_data in rows:
row_values = self._extract_values(row_data, expanded_headers.keys())
yield self._write_csv_row(row_values)
else:
yield self._write_csv_row(standard_headers.values())

for item in queryset.iterator(chunk_size=self.chunk_size):
serialized_data = self._serialize_item(item, serializer_class)
row_values = self._extract_values(serialized_data, standard_headers.keys())
yield self._write_csv_row(row_values)


class UserLocationsCSVExporter(BaseCSVExporter):

def generate_csv_data(self, queryset, serializer_class) -> Iterator[str]:
headers = {
"id": "Unique ID",
"first_name": "First Name",
"last_name": "Last Name",
"email": "Email",
"implementing_partner": "Implementing Partner",
"location": "Location"
}

yield self._write_csv_row(headers.values())

for user in queryset.iterator(chunk_size=self.chunk_size):
serialized_data = self._serialize_item(user, serializer_class)
row_values = self._extract_values(serialized_data, headers.keys())
yield self._write_csv_row(row_values)


class POITypesCSVExporter(BaseCSVExporter):

def generate_csv_data(self, queryset, serializer_class) -> Iterator[str]:
headers = {
"id": "Unique ID",
"created": "Created",
"modified": "Modified",
"name": "Name",
"category": "Category"
}

yield self._write_csv_row(headers.values())

for poi_type in queryset.iterator(chunk_size=self.chunk_size):
serialized_data = self._serialize_item(poi_type, serializer_class)
row_values = self._extract_values(serialized_data, headers.keys())
yield self._write_csv_row(row_values)
10 changes: 7 additions & 3 deletions src/etools/applications/last_mile/admin_panel/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,12 +425,16 @@ def get_lat(self, obj):
def get_lng(self, obj):
return obj.point.x if obj.point else None

def base_representation(self, instance):
def to_representation(self, instance):
data = super().to_representation(instance)
parent_locations = ParentLocationsSerializer(instance.parent).data
data.update(parent_locations)
if instance.parent:
parent_locations = ParentLocationsSerializer(instance.parent).data
data.update(parent_locations)
return data

def base_representation(self, instance):
return self.to_representation(instance)

def generate_rows(self, instance):
base = self.base_representation(instance)
transfers = (
Expand Down
Loading
Loading