Skip to content

Commit 5c5b2ad

Browse files
Feat: dataset pagination (#1075)
1 parent 49e9c7e commit 5c5b2ad

File tree

9 files changed

+501
-240
lines changed

9 files changed

+501
-240
lines changed

web-app/cypress/e2e/feeds.cy.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ describe('Feed page', () => {
1010
cy.intercept('GET', `${apiBaseUrl}/v1/gtfs_feeds/test-516`, gtfsFeedJson);
1111
cy.intercept(
1212
'GET',
13-
`${apiBaseUrl}/v1/gtfs_feeds/test-516/datasets`,
13+
`${apiBaseUrl}/v1/gtfs_feeds/test-516/datasets?offset=0&limit=10`,
1414
datasetsFeedJson,
1515
);
1616
cy.visit('feeds/test-516');
@@ -25,7 +25,10 @@ describe('Feed page', () => {
2525
});
2626

2727
it('should render the last updated date', () => {
28-
cy.get('[data-testid="last-updated"]').should('contain', 'Quality report updated');
28+
cy.get('[data-testid="last-updated"]').should(
29+
'contain',
30+
'Quality report updated',
31+
);
2932
});
3033

3134
it('should render download button', () => {

web-app/public/locales/en/feeds.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@
131131
},
132132
"datasetHistory": "Dataset History",
133133
"datasetHistoryDescription": "The Mobility Database fetches and stores new datasets once a day at midnight UTC.",
134+
"allDatasetsLoaded": "All datasets loaded",
134135
"datasets": "Datasets",
135136
"validationReportNotAvailable": "Validation report not available",
136137
"runValidationReportYourself": "Run Validator Yourself",

web-app/src/app/screens/Feed/PreviousDatasets.tsx

Lines changed: 298 additions & 215 deletions
Large diffs are not rendered by default.

web-app/src/app/screens/Feed/index.tsx

Lines changed: 30 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ import {
3434
selectBoundingBoxFromLatestDataset,
3535
selectDatasetsData,
3636
selectDatasetsLoadingStatus,
37+
selectHasLoadedAllDatasets,
3738
selectLatestDatasetsData,
3839
} from '../../store/dataset-selectors';
3940
import PreviousDatasets from './PreviousDatasets';
@@ -121,6 +122,7 @@ export default function Feed(): React.ReactElement {
121122
const relatedFeeds = useSelector(selectRelatedFeedsData);
122123
const relatedGtfsRtFeeds = useSelector(selectRelatedGtfsRTFeedsData);
123124
const datasets = useSelector(selectDatasetsData);
125+
const hasLoadedAllDatasets = useSelector(selectHasLoadedAllDatasets);
124126
const latestDataset = useSelector(selectLatestDatasetsData);
125127
const boundingBox = useSelector(selectBoundingBoxFromLatestDataset);
126128
const gtfsFeedData = useSelector(selectGTFSFeedData);
@@ -130,13 +132,26 @@ export default function Feed(): React.ReactElement {
130132
const isAuthenticatedOrAnonymous =
131133
useSelector(selectIsAuthenticated) || useSelector(selectIsAnonymous);
132134
const sortedProviders = formatProvidersSorted(feed?.provider ?? '');
135+
const DATASET_CALL_LIMIT = 10;
136+
137+
const loadDatasets = (offset: number): void => {
138+
if (feedId != undefined && hasLoadedAllDatasets === false) {
139+
dispatch(
140+
loadingDataset({
141+
feedId,
142+
offset,
143+
limit: DATASET_CALL_LIMIT,
144+
}),
145+
);
146+
}
147+
};
133148

134149
useEffect(() => {
135150
if (user != undefined && feedId != undefined && needsToLoadFeed) {
136151
dispatch(clearDataset());
137152
dispatch(loadingFeed({ feedId, feedDataType }));
138153
if (feedDataType === 'gtfs') {
139-
dispatch(loadingDataset({ feedId }));
154+
loadDatasets(0);
140155
}
141156
}
142157
}, [isAuthenticatedOrAnonymous, needsToLoadFeed]);
@@ -167,7 +182,7 @@ export default function Feed(): React.ReactElement {
167182
feedLoadingStatus === 'loaded' &&
168183
datasets == undefined
169184
) {
170-
dispatch(loadingDataset({ feedId }));
185+
loadDatasets(0);
171186
}
172187
return () => {
173188
document.title = 'Mobility Database';
@@ -176,7 +191,9 @@ export default function Feed(): React.ReactElement {
176191

177192
// The feedId parameter doesn't match the feedId in the store, so we need to load the feed and only render the loading message.
178193
const areDatasetsLoading =
179-
feed?.data_type === 'gtfs' && datasetLoadingStatus === 'loading';
194+
feed?.data_type === 'gtfs' &&
195+
datasetLoadingStatus === 'loading' &&
196+
datasets == undefined;
180197
const isCurrenltyLoadingFeed =
181198
feedLoadingStatus === 'loading' || areDatasetsLoading;
182199
if (needsToLoadFeed || isCurrenltyLoadingFeed) {
@@ -483,7 +500,16 @@ export default function Feed(): React.ReactElement {
483500
</Grid>
484501
{feed?.data_type === 'gtfs' && hasDatasets && (
485502
<Grid item xs={12}>
486-
<PreviousDatasets datasets={datasets} />
503+
<PreviousDatasets
504+
datasets={datasets}
505+
isLoadingDatasets={datasetLoadingStatus === 'loading'}
506+
hasloadedAllDatasets={
507+
hasLoadedAllDatasets != undefined && hasLoadedAllDatasets
508+
}
509+
loadMoreDatasets={(offset: number) => {
510+
loadDatasets(offset);
511+
}}
512+
/>
487513
</Grid>
488514
)}
489515
</Box>,

web-app/src/app/store/dataset-reducer.ts

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,23 @@
11
import { createSlice, type PayloadAction } from '@reduxjs/toolkit';
22
import { type FeedErrors, FeedErrorSource, type FeedError } from '../types';
33
import { type paths } from '../services/feeds/types';
4+
import { mergeAndSortDatasets } from '../utils/dataset';
45

56
interface DatasetState {
67
status: 'loading' | 'loaded';
78
datasetId: string | undefined;
89
data:
910
| paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json']
1011
| undefined;
12+
loadedAllData?: boolean;
1113
errors: FeedErrors;
1214
}
1315

1416
const initialState: DatasetState = {
1517
status: 'loading',
1618
datasetId: undefined,
1719
data: undefined,
20+
loadedAllData: false,
1821
errors: {
1922
[FeedErrorSource.DatabaseAPI]: null,
2023
},
@@ -26,6 +29,7 @@ export const datasetSlice = createSlice({
2629
reducers: {
2730
clearDataset: (state) => {
2831
state.data = initialState.data;
32+
state.loadedAllData = initialState.loadedAllData;
2933
state.errors = initialState.errors;
3034
state.status = initialState.status;
3135
state.datasetId = initialState.datasetId;
@@ -42,10 +46,11 @@ export const datasetSlice = createSlice({
4246
state,
4347
action: PayloadAction<{
4448
feedId: string;
49+
offset?: number;
50+
limit?: number;
4551
}>,
4652
) => {
4753
state.status = 'loading';
48-
state.data = undefined;
4954
state.errors = {
5055
...state.errors,
5156
DatabaseAPI: initialState.errors.DatabaseAPI,
@@ -55,18 +60,12 @@ export const datasetSlice = createSlice({
5560
state,
5661
action: PayloadAction<{
5762
data: paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json'];
63+
loadedAllData?: boolean;
5864
}>,
5965
) => {
6066
state.status = 'loaded';
61-
state.data = action.payload?.data.sort((a, b) => {
62-
if (a.downloaded_at !== undefined && b.downloaded_at !== undefined) {
63-
const dateB = new Date(b.downloaded_at).getTime();
64-
const dateA = new Date(a.downloaded_at).getTime();
65-
return dateB - dateA;
66-
}
67-
return 0;
68-
});
69-
// state.datasetId = action.payload.data?.id;
67+
state.loadedAllData = action.payload?.loadedAllData;
68+
state.data = mergeAndSortDatasets(action.payload?.data, state.data);
7069
state.errors = {
7170
...state.errors,
7271
DatabaseAPI: initialState.errors.DatabaseAPI,

web-app/src/app/store/dataset-selectors.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,12 @@ export const selectLatestDatasetsData = (
2121
return state.dataset.data !== undefined ? state.dataset.data[0] : undefined;
2222
};
2323

24+
export const selectHasLoadedAllDatasets = (
25+
state: RootState,
26+
): boolean | undefined => {
27+
return state.dataset.loadedAllData;
28+
};
29+
2430
export const selectBoundingBoxFromLatestDataset = createSelector(
2531
[selectLatestDatasetsData],
2632
(latestDataset): LatLngExpression[] | undefined => {

web-app/src/app/store/saga/dataset-saga.ts

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,33 @@ import { getGtfsFeedDatasets } from '../../services/feeds';
77
import { type paths } from '../../services/feeds/types';
88
import { loadingDatasetSuccess } from '../dataset-reducer';
99
import { getUserAccessToken } from '../../services';
10+
import { areAllDatasetsLoaded } from '../../utils/dataset';
1011

1112
function* getDatasetSaga({
12-
payload: { feedId },
13-
}: PayloadAction<{ feedId: string }>): Generator {
13+
payload: { feedId, offset, limit },
14+
}: PayloadAction<{
15+
feedId: string;
16+
offset?: number;
17+
limit?: number;
18+
}>): Generator {
1419
try {
1520
if (feedId !== undefined) {
1621
const accessToken = (yield call(getUserAccessToken)) as string;
17-
const datasets = (yield call(
18-
getGtfsFeedDatasets,
19-
feedId,
20-
accessToken,
21-
{},
22-
)) as paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json'];
23-
yield put(loadingDatasetSuccess({ data: datasets }));
22+
const datasets = (yield call(getGtfsFeedDatasets, feedId, accessToken, {
23+
offset,
24+
limit,
25+
})) as paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json'];
26+
const hasLoadedAllData = areAllDatasetsLoaded(
27+
datasets.length,
28+
limit,
29+
offset,
30+
);
31+
yield put(
32+
loadingDatasetSuccess({
33+
data: datasets,
34+
loadedAllData: hasLoadedAllData,
35+
}),
36+
);
2437
}
2538
} catch (error) {
2639
yield put(loadingFeedFail(getAppError(error) as FeedError));
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
import { type paths } from '../services/feeds/types';
2+
import { areAllDatasetsLoaded, mergeAndSortDatasets } from './dataset';
3+
4+
type Datasets =
5+
paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json'];
6+
7+
const newDatasets = [
8+
{ id: 1, downloaded_at: '2023-01-02T00:00:00Z' },
9+
{ id: 2, downloaded_at: '2023-02-01T00:00:00Z' },
10+
] as unknown as Datasets;
11+
12+
const existingDatasets = [
13+
{ id: 3, downloaded_at: '2023-01-12T00:00:00Z' },
14+
{ id: 4, downloaded_at: '2023-03-01T00:00:00Z' },
15+
] as unknown as Datasets;
16+
17+
const duplicateDatasets = [
18+
{ id: 2, downloaded_at: '2023-02-01T00:00:00Z' },
19+
{ id: 5, downloaded_at: '2023-05-12T00:00:00Z' },
20+
] as unknown as Datasets;
21+
22+
describe('Dataset utils', () => {
23+
describe('mergeAndSortDatasets', () => {
24+
it('should return the sorted datasets when no existing datasets are provided', () => {
25+
const result = mergeAndSortDatasets(newDatasets, undefined);
26+
expect(result).toEqual([
27+
{ id: 2, downloaded_at: '2023-02-01T00:00:00Z' },
28+
{ id: 1, downloaded_at: '2023-01-02T00:00:00Z' },
29+
]);
30+
});
31+
32+
it('should return the merged and sorted datasets when existing datasets are provided', () => {
33+
const result = mergeAndSortDatasets(newDatasets, existingDatasets);
34+
expect(result).toEqual([
35+
{ id: 3, downloaded_at: '2023-01-12T00:00:00Z' },
36+
{ id: 4, downloaded_at: '2023-03-01T00:00:00Z' },
37+
{ id: 2, downloaded_at: '2023-02-01T00:00:00Z' },
38+
{ id: 1, downloaded_at: '2023-01-02T00:00:00Z' },
39+
]);
40+
});
41+
42+
it('should filter out duplicates and return the merged and sorted datasets', () => {
43+
const result = mergeAndSortDatasets(newDatasets, duplicateDatasets);
44+
expect(result).toEqual([
45+
{ id: 2, downloaded_at: '2023-02-01T00:00:00Z' },
46+
{ id: 5, downloaded_at: '2023-05-12T00:00:00Z' },
47+
{ id: 1, downloaded_at: '2023-01-02T00:00:00Z' },
48+
]);
49+
});
50+
});
51+
describe('areAllDatasetsLoaded', () => {
52+
it('should return true if offset and limit are undefined', () => {
53+
const result = areAllDatasetsLoaded(3, undefined, undefined);
54+
expect(result).toBe(true);
55+
});
56+
57+
it('should return true if the number of datasets returned is less than the limit', () => {
58+
const result = areAllDatasetsLoaded(3, 5, undefined);
59+
expect(result).toBe(true);
60+
});
61+
62+
it('should return undefined if offset is defined and limit is undefined', () => {
63+
const result = areAllDatasetsLoaded(3, undefined, 0);
64+
expect(result).toBe(undefined);
65+
});
66+
67+
it('should return false if the number of datasets returned is greater than the limit', () => {
68+
const result = areAllDatasetsLoaded(3, 2, 5);
69+
expect(result).toBe(false);
70+
});
71+
});
72+
});

web-app/src/app/utils/dataset.ts

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
import { type paths } from '../services/feeds/types';
2+
3+
type Datasets =
4+
paths['/v1/gtfs_feeds/{id}/datasets']['get']['responses'][200]['content']['application/json'];
5+
6+
export function mergeAndSortDatasets(
7+
newDatasets: Datasets,
8+
existingDatasets: Datasets | undefined,
9+
): Datasets {
10+
let formattedDatasets: Datasets = [];
11+
if (existingDatasets === undefined) {
12+
formattedDatasets = newDatasets.sort((a, b) => {
13+
if (a.downloaded_at !== undefined && b.downloaded_at !== undefined) {
14+
const dateB = new Date(b.downloaded_at).getTime();
15+
const dateA = new Date(a.downloaded_at).getTime();
16+
return dateB - dateA;
17+
}
18+
return 0;
19+
});
20+
} else {
21+
const existingIds = new Set(existingDatasets.map((item) => item.id));
22+
const newFilteredData = newDatasets.filter(
23+
(item) => !existingIds.has(item.id),
24+
);
25+
const sortedNewFilteredData = newFilteredData.sort((a, b) => {
26+
if (a.downloaded_at !== undefined && b.downloaded_at !== undefined) {
27+
const dateB = new Date(b.downloaded_at).getTime();
28+
const dateA = new Date(a.downloaded_at).getTime();
29+
return dateB - dateA;
30+
}
31+
return 0;
32+
});
33+
formattedDatasets = [...existingDatasets, ...sortedNewFilteredData];
34+
}
35+
return formattedDatasets;
36+
}
37+
38+
/*
39+
Function to determine if all datasets are loaded given the offset and limit
40+
False is if the datasets are not finished loading due to offset / limit
41+
True is if the datasets are all loaded for the given feed
42+
Undefined is if there is not enough information to determine if the datasets are all loaded
43+
*/
44+
export function areAllDatasetsLoaded(
45+
numberOfDatasetsLoaded: number,
46+
limit?: number,
47+
offset?: number,
48+
): boolean | undefined {
49+
let hasLoadedAllData: boolean | undefined = false;
50+
if (offset == undefined && limit == undefined) {
51+
hasLoadedAllData = true;
52+
} else if (limit != undefined && numberOfDatasetsLoaded < limit) {
53+
hasLoadedAllData = true;
54+
} else if (offset != undefined && limit == undefined) {
55+
hasLoadedAllData = undefined;
56+
}
57+
return hasLoadedAllData;
58+
}

0 commit comments

Comments
 (0)