Skip to content

Commit 7b5a5b9

Browse files
committed
Fix bug and add warning
1 parent da09ba5 commit 7b5a5b9

File tree

9 files changed

+4040
-23652
lines changed

9 files changed

+4040
-23652
lines changed

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,8 @@ Lately, there have been problems retrieving the data from NACIS Natural Earth to
104104
In case users want to see the distribution of the components by countries, they will need a Google API Key, Client User ID and Secret. The library Geocoder is used to get the data from Google, but it also allows the users of the ADC-Toolbox to use other APIs to do reverse geocoding (retrieving location information by coordinates). In the function scatter_plot, they will find the following line. You can rewrite as they wish. The list of providers can be found in <a href = "https://github.com/DenisCarriere/geocoder" target = "_blank"> <em>Geocoder</em>'s Github repository</a>.
105105
```python
106106
>>> merge_df['Country'] = merge_df.apply(lambda row: geocoder.google([row['latitude'], row['longitude']],
107-
method='reverse', key = google_api_key).country_long, axis = 1)
107+
method = 'reverse', key = google_api_key).country_long,
108+
axis = 1)
108109
```
109110
If they do not want to edit anything and prefer to run the code using Google API, then they should edit the file <em>keys.txt</em> under the folder <em>data</em>, and write three lines, under their ADS API key. They should contain, in this order, the Google API Key, Client User ID and Secret. They can get their credentials in <a href = "https://console.cloud.google.com/projectselector/google/maps-apis/credentials" target = "_blank">Google Cloud Platform</a>.
110111

functions/functions_general.ipynb

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -259,11 +259,16 @@
259259
"\n",
260260
" \"\"\"\n",
261261
"\n",
262-
" bbox = ((lon_min, lat_min), (lon_max, lat_max))\n",
262+
" if lon_min > lon_max or lat_min > lat_max:\n",
263+
" print('ERROR: The minimum longitude/latitude is larger than the maximum longitude/latitude.')\n",
264+
" raise KeyboardInterrupt()\n",
265+
" \n",
266+
" else:\n",
267+
" bbox = ((lon_min, lat_min), (lon_max, lat_max))\n",
263268
"\n",
264-
" print('SEARCH BOUNDING BOX')\n",
265-
" print(f'Latitudes: from {lat_min} to {lat_max}')\n",
266-
" print(f'Longitudes: from {lon_min} to {lon_max}')\n",
269+
" print('SEARCH BOUNDING BOX')\n",
270+
" print(f'Latitudes: from {lat_min} to {lat_max}')\n",
271+
" print(f'Longitudes: from {lon_min} to {lon_max}')\n",
267272
"\n",
268273
" return bbox"
269274
]
@@ -832,7 +837,7 @@
832837
" print('The sum will be matched to the sensor data by nearest neighbours.')\n",
833838
"\n",
834839
" model_ds_time = model_ds_time.component.sum(dim = 'hybrid', skipna = False)\n",
835-
" \n",
840+
" \n",
836841
" match_df_time['step_index'] = match_df_time.apply(lambda row: nearest_neighbour(model_times, row['delta_time']), axis = 1)\n",
837842
" match_df_time['model_time'] = match_df_time.apply(lambda row: model_ds_time.valid_time[row['step_index']].values, axis = 1)\n",
838843
" match_df_time['model_column'] = match_df_time.apply(lambda row: model_ds_time.sel(\n",
@@ -2230,7 +2235,8 @@
22302235
"\n",
22312236
" # Reverse geocoding\n",
22322237
" merge_df['country'] = merge_df.apply(lambda row: geocoder.google([row['latitude'], row['longitude']], \n",
2233-
" method='reverse', key = google_api_key).country_long, axis = 1)\n",
2238+
" method = 'reverse', key = google_api_key).country_long, \n",
2239+
" axis = 1)\n",
22342240
"\n",
22352241
" # Find data for the countries in search list\n",
22362242
" merge_df = merge_df[merge_df['country'].isin(plot_countries)]\n",
@@ -2463,6 +2469,10 @@
24632469
" # Drop NaN values\n",
24642470
" merge_df = merge_df.dropna()\n",
24652471
"\n",
2472+
" # Transform string to tuple (if there is only one element)\n",
2473+
" if isinstance(regions_names, str):\n",
2474+
" regions_names = tuple([regions_names])\n",
2475+
" \n",
24662476
" # Drop the dates that have NaN values\n",
24672477
" plot_dates = np.intersect1d(plot_dates, np.unique(merge_df.index.get_level_values(2)))\n",
24682478
"\n",
@@ -2665,13 +2675,16 @@
26652675
" Returns:\n",
26662676
" trends_table (dataframe): Trends table\n",
26672677
" \"\"\"\n",
2668-
"\n",
2678+
" \n",
26692679
" if len(np.unique(merge_df.reset_index()['time'])) >= 12:\n",
26702680
" \n",
26712681
" # Sinusoidal model\n",
26722682
" def objective_function_sin(X, C, D, E, N):\n",
26732683
" return C * np.sin(D * X + E) + N\n",
26742684
"\n",
2685+
" # Drop NaN values\n",
2686+
" merge_df = merge_df.dropna()\n",
2687+
"\n",
26752688
" # Transform string to tuple (if there is only one element)\n",
26762689
" if isinstance(regions_names, str):\n",
26772690
" regions_names = tuple([regions_names])\n",
@@ -2681,9 +2694,6 @@
26812694
"\n",
26822695
" trends_table = []\n",
26832696
"\n",
2684-
" # Drop NaN values\n",
2685-
" merge_df = merge_df.dropna()\n",
2686-
"\n",
26872697
" for region_lats, region_lons, region_name in zip(regions_lats, regions_lons, regions_names):\n",
26882698
"\n",
26892699
" summary_region = []\n",
@@ -3099,7 +3109,7 @@
30993109
" coords_list (list): List of search coordinates (e.g. (lat, lon, lat, lon, ...)\n",
31003110
" regions_names (list): Region names\n",
31013111
" \"\"\"\n",
3102-
" \n",
3112+
"\n",
31033113
" if ((sensor_break_date != None and model_break_date != None and sensor_break_date != model_break_date) or\n",
31043114
" (sensor_break_date != None and model_break_date == None) or\n",
31053115
" (sensor_break_date == None and model_break_date != None)):\n",

main_cams_gome_L2.ipynb

Lines changed: 56 additions & 17813 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)