forked from marxanweb/marxan-server
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathserver.py
More file actions
6625 lines (6012 loc) · 347 KB
/
server.py
File metadata and controls
6625 lines (6012 loc) · 347 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/home/ubuntu/miniconda2/envs/python36/bin/python3.6
#
# Copyright (c) 2020 Andrew Cottam.
#
# This file is part of marxan-server
# (see https://github.com/marxanweb/marxan-server).
#
# License: European Union Public Licence V. 1.2, see https://opensource.org/licenses/EUPL-1.2
#
"""Core module for handling all marxan-server REST API requests.
This module is run with the Tornado Web Server to handle requests to the Marxan
software and to return JSON data for those requests.
This module defines the following:
- Global variables (constants) that are used in the module.
- Private module functions (prefixed with an underscore) that are the internal
implementation.
- Request handler classes. HTTPRequest handlers
(MarxanRESTHandler descendents) and WebSocket handlers for long-running processes
(MarxanWebSocketHandler descendents).
- Utiliy classes, e.g. for interacting with PostGIS.
"""
import psutil, urllib, tornado.options, webbrowser, logging, fnmatch, json, psycopg2, pandas, os, re, time, traceback, glob, time, datetime, select, subprocess, sys, zipfile, shutil, uuid, signal, colorama, io, requests, platform, ctypes, aiopg, asyncio, aiohttp, monkeypatch, numpy, shlex, threading
from psycopg2.extensions import register_adapter, AsIs
from tornado.websocket import WebSocketClosedError
from tornado.iostream import StreamClosedError
from tornado.process import Subprocess
from tornado.log import LogFormatter
from tornado.web import HTTPError
from tornado.web import StaticFileHandler
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.platform.asyncio import AnyThreadEventLoopPolicy
from tornado import gen, queues, httpclient, concurrent
from google.cloud import logging as googlelogger
from datetime import timedelta, timezone
from colorama import Fore, Back, Style
from sqlalchemy import create_engine
from collections import OrderedDict
from subprocess import Popen, PIPE, CalledProcessError
from threading import Thread
from urllib.parse import urlparse
from urllib import request
from psycopg2 import sql
from mapbox import Uploader
from mapbox import errors
from osgeo import ogr
####################################################################################################################################################################################################################################################################
## constant declarations
####################################################################################################################################################################################################################################################################
##SECURITY SETTINGS
PERMITTED_METHODS = ["getServerData","createUser","validateUser","resendPassword","testTornado", "getProjectsWithGrids"]
"""REST services that do not need authentication/authorisation."""
ROLE_UNAUTHORISED_METHODS = {
"ReadOnly": ["createProject","createImportProject","upgradeProject","deleteProject","cloneProject","createProjectGroup","deleteProjects","renameProject","updateProjectParameters","getCountries","deletePlanningUnitGrid","createPlanningUnitGrid","uploadTilesetToMapBox","uploadFileToFolder","uploadFile","importPlanningUnitGrid","createFeaturePreprocessingFileFromImport","createUser","getUsers","updateUserParameters","getFeature","importFeatures","getPlanningUnitsData","updatePUFile","getSpeciesData","getSpeciesPreProcessingData","updateSpecFile","getProtectedAreaIntersectionsData","getMarxanLog","getBestSolution","getOutputSummary","getSummedSolution","getMissingValues","preprocessFeature","preprocessPlanningUnits","preprocessProtectedAreas","runMarxan","stopProcess","testRoleAuthorisation","deleteFeature","deleteUser","getRunLogs","clearRunLogs","updateWDPA","unzipShapefile","getShapefileFieldnames","createFeatureFromLinestring","runGapAnalysis","toggleEnableGuestUser","importGBIFData","deleteGapAnalysis","shutdown","addParameter","block", "resetDatabase","cleanup","exportProject","importProject",'getCosts','updateCosts','deleteCost','runSQLFile','exportPlanningUnitGrid','exportFeature'],
"User": ["testRoleAuthorisation","deleteFeature","getUsers","deleteUser","deletePlanningUnitGrid","clearRunLogs","updateWDPA","toggleEnableGuestUser","shutdown","addParameter","block", "resetDatabase","cleanup",'runSQLFile'],
"Admin": []
}
"""Dict that controls access to REST services using role-based authentication. Add REST services that you want to lock down to specific roles - a class added to an array will make that method unavailable for that role"""
MARXAN_SERVER_VERSION = "v1.0.9"
"""The version of marxan-server."""
MARXAN_REGISTRY = "https://marxanweb.github.io/general/registry/marxan.json"
"""The url of the Marxan Registry which contains information on hosted Marxan Web servers, base maps and other global level variables"""
GUEST_USERNAME = "guest"
"""The name of the guest user"""
NOT_AUTHENTICATED_ERROR = "Request could not be authenticated. No secure cookie found."
NO_REFERER_ERROR = "The request header does not specify a referer and this is required for CORS access."
MAPBOX_USER = "andrewcottam"
"""The default name for the Mapbox user account to store Vector tiles"""
#filenames
SERVER_CONFIG_FILENAME = "server.dat"
"""The name of the server configuration file."""
MARXAN_LOG_FILENAME = 'marxan-server.log'
"""The name of the server configuration file."""
RUN_LOG_FILENAME = "runlog.dat"
"""The name of the server configuration file."""
USER_DATA_FILENAME = "user.dat"
"""The name of the server configuration file."""
PROJECT_DATA_FILENAME = "input.dat"
OUTPUT_LOG_FILENAME = "output_log.dat"
PLANNING_UNITS_FILENAME ="pu.dat"
PUVSPR_FILENAME = "puvspr.dat"
SPEC_FILENAME ="spec.dat"
BOUNDARY_LENGTH_FILENAME = "bounds.dat"
BEST_SOLUTION_FILENAME = "output_mvbest"
OUTPUT_SUMMARY_FILENAME = "output_sum"
SUMMED_SOLUTION_FILENAME = "output_ssoln"
FEATURE_PREPROCESSING_FILENAME = "feature_preprocessing.dat"
"""The name of the feature preprocessing file which contains summary information on the total number of planning units and total area for a feature intersecting a planning grid."""
PROTECTED_AREA_INTERSECTIONS_FILENAME = "protected_area_intersections.dat"
"""The name of the protected area intersection file which contains information on the intersections between planning units and the WDPA."""
NOTIFICATIONS_FILENAME = "notifications.dat"
SHUTDOWN_FILENAME = "shutdown.dat"
WDPA_DOWNLOAD_FILENAME = "wdpa.zip"
#file prefixes
SOLUTION_FILE_PREFIX = "output_r"
MISSING_VALUES_FILE_PREFIX = "output_mv"
#docs settings
DOCS_FOLDER = "/marxan-server/docs/build/html"
#export settings
EXPORT_F_SHP_FOLDER = "f_shps"
"""The name of the folder where feature shapefiles are exported to during a project export."""
EXPORT_PU_SHP_FOLDER = "pu_shps"
"""The name of the folder where planning grid shapefiles are exported to during a project export."""
EXPORT_F_METADATA = 'features.csv'
"""The name of the file that contains the feature metadata data during a project export."""
EXPORT_PU_METADATA = 'planning_grid.csv'
"""The name of the file that contains the planning grid metadata data during a project export."""
#gbif constants
GBIF_API_ROOT = "https://api.gbif.org/v1/"
"""The GBIF API root url"""
GBIF_CONCURRENCY = 10
"""How many concurrent download processes to do for GBIF."""
GBIF_PAGE_SIZE = 300
"""The page size for occurrence records for GBIF requests"""
GBIF_POINT_BUFFER_RADIUS = 1000
"""The radius in meters to buffer all lat/lng coordinates for GBIF occurrence data"""
GBIF_OCCURRENCE_LIMIT = 200000
"""From the GBIF docs here: https://www.gbif.org/developer/occurrence#search"""
UNIFORM_COST_NAME = "Equal area"
"""The name of the cost profile that is equal area."""
DOCS_ROOT = "https://docs.marxanweb.org/"
"""The url for the documentation root."""
ERRORS_PAGE = DOCS_ROOT + "errors.html"
"""The errors HTML page url."""
SHUTDOWN_EVENT = tornado.locks.Event()
"""A Tornado event to allow it to exit gracefully."""
PING_INTERVAL = 30000
"""Interval between regular pings to keep a connection alive when using websockets."""
SHOW_START_LOG = True
"""To disable the start logging from unit tests."""
DICT_PAD = 25
"""Text is right padded this much in dictionary outputs."""
LOGGING_LEVEL = logging.INFO
"""Tornado logging level that controls what is logged to the console - options are logging.INFO, logging.DEBUG, logging.WARNING, logging.ERROR, logging.CRITICAL. All SQL statements can be logged by setting this to logging.DEBUG."""
#pdoc3 dict to whitelist private members for the documentation
__pdoc__ = {}
privateMembers = ['getGeometryType','_addParameter', '_authenticate', '_authoriseRole', '_authoriseUser', '_checkCORS', '_checkZippedShapefile', '_cleanup', '_cloneProject', '_copyDirectory', '_createFeaturePreprocessingFileFromImport', '_createProject', '_createPuFile', '_createUser', '_createZipfile', '_dataFrameContainsValue', '_debugSQLStatement', '_deleteAllFiles', '_deleteArchiveFiles', '_deleteCost', '_deleteFeature', '_deleteFeatureClass', '_deletePlanningUnitGrid', '_deleteProject', '_deleteRecordsInTextFile', '_deleteShutdownFile', '_deleteTileset', '_deleteZippedShapefile', '_dismissNotification', '_estimatePlanningUnitCount', '_exportAndZipShapefile', '_finishCreatingFeature', '_finishImportingFeature', '_getAllProjects', '_getAllSpeciesData', '_getBestSolution', '_getCosts', '_getDictValue', '_getEndOfLine', '_getExceptionLastLine', '_getFeature', '_getFilesInFolderRecursive', '_getGML', '_getIntArrayFromArg', '_getKeyValue', '_getKeyValuesFromFile', '_getKeys', '_getMBAT', '_getMarxanLog', '_getMissingValues', '_getNotificationsData', '_getNumberOfRunsCompleted', '_getNumberOfRunsRequired', '_getOutputFilename', '_getOutputSummary', '_getPlanningUnitGrids', '_getPlanningUnitsCostData', '_getPlanningUnitsData', '_getProjectData', '_getProjectInputData', '_getProjectInputFilename', '_getProjects', '_getProjectsForFeature', '_getProjectsForPlanningGrid', '_getProjectsForUser', '_getProtectedAreaIntersectionsData', '_getPuvsprStats', '_getRESTMethod', '_getRunLogs', '_getSafeProjectName', '_getServerData', '_getShapefileFieldNames', '_getSimpleArguments', '_getSolution', '_getSpeciesData', '_getSpeciesPreProcessingData', '_getSummedSolution', '_getUniqueFeatureclassName', '_getUserData', '_getUsers', '_getUsersData', '_get_free_space_mb', '_guestUserEnabled', '_importDataFrame', '_importPlanningUnitGrid', '_invalidateProtectedAreaIntersections', '_isProjectRunning', '_loadCSV', '_normaliseDataFrame', '_padDict', '_preprocessProtectedAreas', '_puidsArrayToPuDatFormat', '_raiseError', '_readFile', '_readFileUnicode', '_reprocessProtectedAreas', '_requestIsWebSocket', '_resetNotifications', '_runCmd', '_setCORS', '_setFolderPaths', '_setGlobalVariables', '_shapefileHasField', '_tilesetExists', '_txtIntsToList', '_unzipFile', '_unzipShapefile', '_updateCosts', '_updateDataFrame', '_updateParameters', '_updatePuFile', '_updateRunLog', '_updateSpeciesFile', '_uploadTileset', '_uploadTilesetToMapbox', '_validateArguments', '_writeCSV', '_writeFile', '_writeFileUnicode', '_writeToDatFile', '_zipfolder']
for m in privateMembers:
__pdoc__.update({m: True})
####################################################################################################################################################################################################################################################################
## generic functions that dont belong to a class so can be called by subclasses of tornado.web.RequestHandler and tornado.websocket.WebSocketHandler equally - underscores are used so they dont mask the equivalent url endpoints
####################################################################################################################################################################################################################################################################
async def _setGlobalVariables():
"""Run when the server starts to read the server configuration from the server.dat file and set all of the global path variables
Args:
None
Returns:
None
"""
global MBAT
global GLOBAL_LOCK
global MARXAN_FOLDER
global MARXAN_USERS_FOLDER
global MARXAN_CLIENT_BUILD_FOLDER
global CLUMP_FOLDER
global EXPORT_FOLDER
global IMPORT_FOLDER
global MARXAN_EXECUTABLE
global MARXAN_WEB_RESOURCES_FOLDER
global CASE_STUDIES_FOLDER
global EMPTY_PROJECT_TEMPLATE_FOLDER
global OGR2OGR_EXECUTABLE
global GDAL_DATA_ENVIRONMENT_VARIABLE
global CONDA_DEFAULT_ENV_ENVIRONMENT_VARIABLE
global MARXAN_CLIENT_VERSION
global CONNECTION_STRING
global COOKIE_RANDOM_VALUE
global PERMITTED_DOMAINS
global SERVER_NAME
global SERVER_DESCRIPTION
global DATABASE_NAME
global DATABASE_HOST
global DATABASE_USER
global DATABASE_PASSWORD
global DATABASE_VERSION_POSTGRESQL
global DATABASE_VERSION_POSTGIS
global PORT
global CERTFILE
global KEYFILE
global PLANNING_GRID_UNITS_LIMIT
global DISABLE_SECURITY
global DISABLE_FILE_LOGGING
global ENABLE_RESET
global pg
#get data from the marxan registry
MBAT = _getMBAT()
#initialise colorama to be able to show log messages on windows in color
colorama.init()
#create a global lock for writing to the run log file in a thread-safe way
GLOBAL_LOCK = threading.Lock()
#register numpy int64 with psycopg2
psycopg2.extensions.register_adapter(numpy.int64, psycopg2._psycopg.AsIs)
#get the folder from this files path
MARXAN_FOLDER = os.path.dirname(os.path.realpath(__file__)) + os.sep
#get the data in the server configuration file
serverData = _getKeyValuesFromFile(MARXAN_FOLDER + SERVER_CONFIG_FILENAME)
#get the database connection string
SERVER_NAME = _getDictValue(serverData,'SERVER_NAME')
SERVER_DESCRIPTION = _getDictValue(serverData,'SERVER_DESCRIPTION')
DATABASE_NAME = _getDictValue(serverData,'DATABASE_NAME')
DATABASE_HOST = _getDictValue(serverData,'DATABASE_HOST')
DATABASE_USER = _getDictValue(serverData,'DATABASE_USER')
DATABASE_PASSWORD = _getDictValue(serverData,'DATABASE_PASSWORD')
DATABASE_NAME = _getDictValue(serverData,'DATABASE_NAME')
PORT = str(_getDictValue(serverData, 'PORT'))
CERTFILE = _getDictValue(serverData,'CERTFILE')
KEYFILE = _getDictValue(serverData,'KEYFILE')
DISABLE_SECURITY = _getDictValue(serverData,'DISABLE_SECURITY')
DISABLE_FILE_LOGGING = _getDictValue(serverData,'DISABLE_FILE_LOGGING')
ENABLE_RESET = _getDictValue(serverData,'ENABLE_RESET')
CONNECTION_STRING = "host='" + DATABASE_HOST + "' dbname='" + DATABASE_NAME + "' user='" + DATABASE_USER + "' password='" + DATABASE_PASSWORD + "'"
#initialise the connection pool
pg = PostGIS()
await pg.initialise()
#get the database version
results = await pg.execute("SELECT version(), PostGIS_Version();", returnFormat="Array")
DATABASE_VERSION_POSTGRESQL, DATABASE_VERSION_POSTGIS = results[0]
COOKIE_RANDOM_VALUE = _getDictValue(serverData,'COOKIE_RANDOM_VALUE')
PERMITTED_DOMAINS = _getDictValue(serverData,'PERMITTED_DOMAINS').split(",")
PLANNING_GRID_UNITS_LIMIT = int(_getDictValue(serverData,'PLANNING_GRID_UNITS_LIMIT'))
#get the GDAL_DATA environment variable
if ('GDAL_DATA' in os.environ.keys()):
GDAL_DATA_ENVIRONMENT_VARIABLE = os.environ['GDAL_DATA']
else:
GDAL_DATA_ENVIRONMENT_VARIABLE = "Not set"
#get the name of the current conda environment
if ('CONDA_DEFAULT_ENV' in os.environ.keys()):
CONDA_DEFAULT_ENV_ENVIRONMENT_VARIABLE = os.environ['CONDA_DEFAULT_ENV']
else:
CONDA_DEFAULT_ENV_ENVIRONMENT_VARIABLE = "Not set"
#OUTPUT THE INFORMATION ABOUT THE MARXAN-SERVER SOFTWARE
log("Starting marxan-server " + MARXAN_SERVER_VERSION + " listening on port " + PORT + " ..", Fore.GREEN)
#print out which operating system is being used
log(_padDict("Operating system:", platform.system(), DICT_PAD))
log(_padDict("Tornado version:", tornado.version, DICT_PAD))
log(_padDict("Permitted domains:" , _getDictValue(serverData,'PERMITTED_DOMAINS'), DICT_PAD))
#output the ssl information if it is being used
if CERTFILE != "None":
log(_padDict("SSL certificate file:", CERTFILE,DICT_PAD))
testUrl = "https://"
else:
log(_padDict("SSL certificate file:", "None", DICT_PAD))
testUrl = "http://"
testUrl = testUrl + "<host>:" + PORT + "/marxan-server/testTornado" if (PORT != '80') else testUrl + "<host>/marxan-server/testTornado"
if KEYFILE != "None":
log(_padDict("Private key file:", KEYFILE, DICT_PAD))
else:
log(_padDict("Private key file:", "None", DICT_PAD))
log(_padDict("Database:", CONNECTION_STRING, DICT_PAD))
log(_padDict("PostgreSQL:", DATABASE_VERSION_POSTGRESQL, DICT_PAD))
log(_padDict("PostGIS:", DATABASE_VERSION_POSTGIS, DICT_PAD))
log(_padDict("WDPA Version:", _getDictValue(serverData,'WDPA_VERSION'), DICT_PAD))
log(_padDict("Planning grid limit:", str(PLANNING_GRID_UNITS_LIMIT), DICT_PAD))
log(_padDict("Disable security:", str(DISABLE_SECURITY), DICT_PAD))
log(_padDict("Disable file logging:", str(DISABLE_FILE_LOGGING), DICT_PAD))
log(_padDict("Enable reset:", str(ENABLE_RESET), DICT_PAD))
log(_padDict("Conda environment:", CONDA_DEFAULT_ENV_ENVIRONMENT_VARIABLE, DICT_PAD))
log(_padDict("Python executable:", sys.executable, DICT_PAD))
#get the path to the ogr2ogr file - it should be in the miniconda bin folder
if platform.system() == "Windows":
ogr2ogr_executable = "ogr2ogr.exe"
OGR2OGR_PATH = os.path.dirname(sys.executable) + os.sep + "library" + os.sep + "bin" + os.sep # sys.executable is the Python.exe file and will likely be in C:\Users\a_cottam\Miniconda2 folder - ogr2ogr is then in /library/bin on windows
marxan_executable = "Marxan.exe" #TODO Use Marxan_x64.exe for 64 bit processors
stopCmd = "Press CTRL+C or CTRL+Fn+Pause to stop the server\n"
else:
ogr2ogr_executable = "ogr2ogr"
OGR2OGR_PATH = os.path.dirname(sys.executable) + os.sep # sys.executable is the Python.exe file and will likely be in /home/ubuntu//miniconda2/bin/ - the same place as ogr2ogr
marxan_executable = "MarOpt_v243_Linux64"
stopCmd = "Press CTRL+C to stop the server\n"
#if the ogr2ogr executable path is not in the miniconda bin directory, then hard-code it here and uncomment the line
#OGR2OGR_PATH = ""
OGR2OGR_EXECUTABLE = OGR2OGR_PATH + ogr2ogr_executable
if not os.path.exists(OGR2OGR_EXECUTABLE):
raise MarxanServicesError(" ogr2ogr executable:\t'" + OGR2OGR_EXECUTABLE + "' could not be found. Set it manually in the marxan-server.py file.")
else:
log(_padDict("ogr2ogr executable:", OGR2OGR_EXECUTABLE, DICT_PAD))
#set the various folder paths
MARXAN_USERS_FOLDER = MARXAN_FOLDER + "users" + os.sep
CLUMP_FOLDER = MARXAN_USERS_FOLDER + "_clumping" + os.sep
EXPORT_FOLDER = MARXAN_FOLDER + "exports" + os.sep
IMPORT_FOLDER = MARXAN_FOLDER + "imports" + os.sep
MARXAN_EXECUTABLE = MARXAN_FOLDER + marxan_executable
MARXAN_WEB_RESOURCES_FOLDER = MARXAN_FOLDER + "_marxan_web_resources" + os.sep
CASE_STUDIES_FOLDER = MARXAN_WEB_RESOURCES_FOLDER + "case_studies" + os.sep
EMPTY_PROJECT_TEMPLATE_FOLDER = MARXAN_WEB_RESOURCES_FOLDER + "empty_project" + os.sep
log(_padDict("GDAL_DATA path:", GDAL_DATA_ENVIRONMENT_VARIABLE, DICT_PAD))
log(_padDict("Marxan executable:", MARXAN_EXECUTABLE, DICT_PAD))
log("\nTo test marxan-server goto " + testUrl, Fore.GREEN)
log(stopCmd, Fore.RED)
#get the parent folder
PARENT_FOLDER = MARXAN_FOLDER[:MARXAN_FOLDER[:-1].rindex(os.sep)] + os.sep
#OUTPUT THE INFORMATION ABOUT THE MARXAN-CLIENT SOFTWARE IF PRESENT
packageJson = PARENT_FOLDER + "marxan-client" + os.sep + "package.json"
if os.path.exists(packageJson):
MARXAN_CLIENT_BUILD_FOLDER = PARENT_FOLDER + "marxan-client" + os.sep + "build"
#open the node.js package.json file for the marxan-client app to read the version of the software
f = open(packageJson)
MARXAN_CLIENT_VERSION = json.load(f)['version']
f.close()
log("marxan-client " + MARXAN_CLIENT_VERSION + " installed", Fore.GREEN)
else:
MARXAN_CLIENT_BUILD_FOLDER = ""
MARXAN_CLIENT_VERSION = "Not installed"
log("marxan-client is not installed\n", Fore.GREEN)
def _padDict(k, v, w):
"""outputs a key: value from a dictionary into 2 columns with width w
Args:
k (string): The dictionary key
v (string): The dictionary value
k (int): The width of the key column - the key value will be padded to this width
Returns:
string: The padded dictionary as a string
"""
return k + (w - len(k))*" " + v
def log(_str, _color = Fore.RESET):
"""Logs the string to the logging handlers using the passed colorama color
Args:
_str (string): The string to log
_color (int): The color to use. The default is Fore.RESET.
Returns:
None
"""
if SHOW_START_LOG:
#print to the console
print(_color + _str)
#print to the log file if not disabled
if not DISABLE_FILE_LOGGING:
#print to file
_writeFileUnicode(MARXAN_FOLDER + MARXAN_LOG_FILENAME, _str + "\n", "a")
def _raiseError(obj, msg):
"""Generic function to send an error response and close the connection. Used in all MarxanRESTHandler descendent classes.
Args:
obj (MarxanRESTHandler): The request handler instance
msg (string): The error message to send
Returns:
None
"""
#send a response with the error message
if hasattr(obj, "send_response"):
obj.send_response({"error": msg})
obj.finish()
#log the error
logging.warning(msg)
def _getRESTMethod(path):
"""Gets the method part of the REST service path, e.g. /marxan-server/validateUser will return validateUser. Returns an empty string if the method is not found.
Args:
path (string): The request path
Returns:
string: The method name
"""
pos = path.rfind("/")
if pos > -1:
return path[pos+1:]
else:
return ""
def _createUser(obj, user, fullname, email, password):
"""Creates a new user in the file system and stores the users metadata in the user.dat file.
Args:
obj (MarxanRESTHandler): The request handler instance
user (string): The user to create. This will be the name of the folder created in the MARXAN_USERS_FOLDER folder
fullname (string): The fullname of the user
email (string): The email address of the user
password (string): The password of the user. CAUTION: This is stored in plain text in the user.dat file.
Returns:
None
Raises:
MarxanServicesError: If the user already exists.
"""
#get the list of users
users = _getUsers()
if user in users:
raise MarxanServicesError("User '" + user + "' already exists")
#create the user folder
obj.folder_user = MARXAN_USERS_FOLDER + user + os.sep
os.mkdir(obj.folder_user)
#copy the user.dat file
shutil.copyfile(MARXAN_WEB_RESOURCES_FOLDER + USER_DATA_FILENAME, obj.folder_user + USER_DATA_FILENAME)
#copy the notifications.dat file
shutil.copyfile(MARXAN_WEB_RESOURCES_FOLDER + NOTIFICATIONS_FILENAME, obj.folder_user + NOTIFICATIONS_FILENAME)
#update the user.dat file parameters
_updateParameters(obj.folder_user + USER_DATA_FILENAME, {'NAME': fullname,'EMAIL': email,'PASSWORD': password, 'CREATEDATE': datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")})
def _getUsers():
"""Gets a list of all registered users.
Args:
None
Returns:
string[]: List of all registered users
"""
#get a list of folders underneath the marxan users folder
user_folders = glob.glob(MARXAN_USERS_FOLDER + "*/")
#convert these into a list of users
users = [user[:-1][user[:-1].rfind(os.sep)+1:] for user in user_folders]
if "input" in users:
users.remove("input")
if "output" in users:
users.remove("output")
if "MarxanData" in users:
users.remove("MarxanData")
if "MarxanData_unix" in users:
users.remove("MarxanData_unix")
#dont include any users with an underscore (e.g. the _clumping user)
return [u for u in users if u[:1] != "_"]
def _getUsersData(users):
"""Gets all the users data for the passed users.
Args:
users (string[]): The user names to get the data for.
Returns:
dict[]: The users data as an array of dict.
"""
users.sort()
usersData = []
#create a extendable object to hold the user data
tmpObj = ExtendableObject()
#iterate through the users
for user in users:
tmpObj.folder_user = MARXAN_USERS_FOLDER + user + os.sep
#get the data for the user
_getUserData(tmpObj)
#create a dict to save the data
combinedDict = tmpObj.userData.copy() # pylint:disable=no-member
combinedDict.update({'user': user})
usersData.append(combinedDict)
return usersData
def _getNotificationsData(obj):
"""Gets the notification data for a user.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
string[]: The users notification data
"""
#get the data from the notifications file
s =_readFile(obj.folder_user + NOTIFICATIONS_FILENAME)
if (s == ""):
return []
else:
return s.split(",")
def _dismissNotification(obj, notificationid):
"""Appends the notificationid in the users NOTIFICATIONS_FILENAME to dismiss the notification.
Args:
obj (MarxanRESTHandler): The request handler instance.
notificationid (int): The notification id
Returns:
None
"""
#get the data from the notifications file
ids = _getNotificationsData(obj)
ids.append(notificationid)
_writeFileUnicode(obj.folder_user + NOTIFICATIONS_FILENAME, ",".join(ids))
def _resetNotifications(obj):
"""Resets all notification for the user by clearing the NOTIFICATIONS_FILENAME.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
_writeFileUnicode(obj.folder_user + NOTIFICATIONS_FILENAME, "")
#returns the project name without internal spaces or other invalid characters
def _getSafeProjectName(project_name):
"""Returns a safe name that can be used as a folder name by replacing spaces with underscores
Args:
project_name (string): Unsafe project name
Returns:
string: A safe project name
"""
return project_name.strip().replace(" ", "_")
async def _getProjectsForUser(user):
"""Gets the projects for the specified user.
Args:
user (string): The name of the user
Returns:
dict[]: A list of dict containing each of the projects data
"""
#get a list of folders underneath the users home folder
project_folders = glob.glob(MARXAN_USERS_FOLDER + user + os.sep + "*/")
#sort the folders
project_folders.sort()
projects = []
#iterate through the project folders and get the parameters for each project to return
tmpObj = ExtendableObject()
for dir in project_folders:
#get the name of the folder
project = dir[:-1][dir[:-1].rfind(os.sep)+1:]
if (project[:2] != "__"): #folders beginning with __ are system folders
#get the data from the input file for this project
tmpObj.project = project
tmpObj.folder_project = MARXAN_USERS_FOLDER + user + os.sep + project + os.sep
await _getProjectData(tmpObj)
#create a dict to save the data
projects.append({'user': user, 'name': project,'description': tmpObj.projectData["metadata"]["DESCRIPTION"],'createdate': tmpObj.projectData["metadata"]["CREATEDATE"],'oldVersion': tmpObj.projectData["metadata"]["OLDVERSION"],'private': tmpObj.projectData["metadata"]["PRIVATE"]}) # pylint:disable=no-member
return projects
async def _getAllProjects():
"""Gets data for all projects.
Args:
None
Returns:
dict[]: A list of dict containing each of the projects data. See ``_getProjectData``.
"""
allProjects = []
#get a list of users
users = _getUsers()
#iterate through the users and get the project data
for user in users:
projects = await _getProjectsForUser(user)
allProjects.extend(projects)
return allProjects
async def _getProjects(obj):
"""Gets the projects for the currently logged on user
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
dict[]: A list of dict containing each of the projects data
"""
if ((obj.user == GUEST_USERNAME) or (obj.get_secure_cookie("role").decode("utf-8") == "Admin")):
obj.projects = await _getAllProjects()
else:
obj.projects = await _getProjectsForUser(obj.user)
def _createProject(obj, name):
"""Creates a new empty project with the passed parameters.
Args:
obj (MarxanRESTHandler): The request handler instance.
name (string): The name of the project to create.
Returns:
None
Raises:
MarxanServicesError: If the project already exists.
"""
#make sure the project does not already exist
if os.path.exists(obj.folder_user + name):
raise MarxanServicesError("The project '" + name + "' already exists")
#copy the _project_template folder to the new location
_copyDirectory(EMPTY_PROJECT_TEMPLATE_FOLDER, obj.folder_user + name)
#set the paths to this project in the passed object - the arguments are normally passed as lists in tornado.get_argument - and the _setFolderPaths expects bytes not strings as they normally come from self.request.arguments
_setFolderPaths(obj, {'user': [obj.user.encode("utf-8")], 'project': [name.encode("utf-8")]})
def _deleteProject(obj):
"""Deletes a project.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
#delete the folder and all of its contents
try:
shutil.rmtree(obj.folder_project)
except (WindowsError) as e: # pylint:disable=undefined-variable
raise MarxanServicesError(e.strerror)
def _cloneProject(source_folder, destination_folder):
"""Clones a project from the source_folder to the destination_folder
Args:
source_folder (string): Full folder path to the source folder.
destination_folder (string): Full folder path to the destination folder.
Returns:
string: The name of the cloned project
"""
#get the project name
original_project_name = source_folder[:-1].split(os.sep)[-1]
#get the new project folder
new_project_folder = destination_folder + original_project_name + os.sep
#recursively check that the folder does not exist until we get a new folder that doesnt exist
while (os.path.exists(new_project_folder)):
new_project_folder = new_project_folder[:-1] + "_copy" + os.sep
#copy the project
shutil.copytree(source_folder, new_project_folder)
#update the description and create date
_updateParameters(new_project_folder + PROJECT_DATA_FILENAME, {'DESCRIPTION': "Clone of project '" + original_project_name + "'", 'CREATEDATE': datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")})
#return the name of the new project
return new_project_folder[:-1].split(os.sep)[-1]
def _setFolderPaths(obj, arguments):
"""Sets the various paths to the users folder and project folders using the request arguments in the passed object.
Args:
obj (MarxanRESTHandler): The request handler instance.
arguments (dict): See https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest.arguments
Returns:
None
"""
if "user" in list(arguments.keys()):
#argument values are bytes
user = arguments["user"][0].decode("utf-8")
obj.folder_user = MARXAN_USERS_FOLDER + user + os.sep
obj.user = user
#get the project folder and the input and output folders
if "project" in list(arguments.keys()):
obj.folder_project = obj.folder_user + arguments["project"][0].decode("utf-8").strip() + os.sep
obj.folder_input = obj.folder_project + "input" + os.sep
obj.folder_output = obj.folder_project + "output" + os.sep
obj.project = obj.get_argument("project")
async def _getProjectData(obj):
"""Gets the project data from the input.dat file as a categorised list of settings (project, metadata, files, runParameters and renderer). These are set on the passed obj in the projectData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
paramsArray = []
filesDict = {}
metadataDict = {}
rendererDict = {}
#get the file contents
s = _readFileUnicode(obj.folder_project + PROJECT_DATA_FILENAME)
#get the keys from the file
keys = _getKeys(s)
#iterate through the keys and get their values
for k in keys:
#some parameters we do not need to return
if k in ["PUNAME","SPECNAME","PUVSPRNAME","BOUNDNAME","BLOCKDEF"]: # Input Files section of input.dat file
key, value = _getKeyValue(s, k)
filesDict.update({ key: value})
elif k in ['BLM', 'PROP', 'RANDSEED', 'NUMREPS', 'NUMITNS', 'STARTTEMP', 'NUMTEMP', 'COSTTHRESH', 'THRESHPEN1', 'THRESHPEN2', 'SAVERUN', 'SAVEBEST', 'SAVESUMMARY', 'SAVESCEN', 'SAVETARGMET', 'SAVESUMSOLN', 'SAVEPENALTY', 'SAVELOG', 'RUNMODE', 'MISSLEVEL', 'ITIMPTYPE', 'HEURTYPE', 'CLUMPTYPE', 'VERBOSITY', 'SAVESOLUTIONSMATRIX']:
key, value = _getKeyValue(s, k) #run parameters
paramsArray.append({'key': key, 'value': value})
elif k in ['DESCRIPTION','CREATEDATE','PLANNING_UNIT_NAME','OLDVERSION','IUCN_CATEGORY','PRIVATE','COSTS']: # metadata section of the input.dat file
key, value = _getKeyValue(s, k)
metadataDict.update({key: value})
if k=='PLANNING_UNIT_NAME':
df2 = await pg.execute("select * from marxan.get_planning_units_metadata(%s)", data=[value], returnFormat="DataFrame")
if (df2.shape[0] == 0):
metadataDict.update({'pu_alias': value,'pu_description': 'No description','pu_domain': 'Unknown domain','pu_area': 'Unknown area','pu_creation_date': 'Unknown date','pu_created_by':'Unknown','pu_country':'Unknown'})
else:
#get the data from the metadata_planning_units table
metadataDict.update({'pu_alias': df2.iloc[0]['alias'],'pu_country': df2.iloc[0]['country'],'pu_description': df2.iloc[0]['description'],'pu_domain': df2.iloc[0]['domain'],'pu_area': df2.iloc[0]['area'],'pu_creation_date': df2.iloc[0]['creation_date'],'pu_created_by':df2.iloc[0]['created_by']})
elif k in ['CLASSIFICATION', 'NUMCLASSES','COLORCODE', 'TOPCLASSES','OPACITY']: # renderer section of the input.dat file
key, value = _getKeyValue(s, k)
rendererDict.update({key: value})
#set the project data
obj.projectData = {}
obj.projectData.update({'project': obj.project, 'metadata': metadataDict, 'files': filesDict, 'runParameters': paramsArray, 'renderer': rendererDict})
async def _getProjectInputFilename(obj, fileToGet):
"""Gets the filename of the Marxan input file from the projects input.dat file.
Args:
obj (MarxanRESTHandler): The request handler instance.
fileToGet (string): The name of the input file as specified in the Input Files section of the input.dat file, e.g. one of INPUTDIR, PUNAME, SPECNAME, PUVSPRNAME or BOUNDNAME.
Returns:
The filename to the input file.
"""
if not hasattr(obj, "projectData"):
await _getProjectData(obj)
return obj.projectData["files"][fileToGet]
async def _getProjectInputData(obj, fileToGet, errorIfNotExists = False):
"""Gets the projects input data using the fileToGet, e.g. SPECNAME will return the data from the file corresponding to the input.dat file SPECNAME setting.
Args:
obj (MarxanRESTHandler): The request handler instance.
fileToGet (string): The name of the input file as specified in the Input Files section of the input.dat file, e.g. one of INPUTDIR, PUNAME, SPECNAME, PUVSPRNAME or BOUNDNAME.
errorIfNotExists (bool): Optional. If True, raises and exception if the file does not exist. Defaults to False.
Returns:
dict: The data from the input file.
"""
filename = obj.folder_input + os.sep + await _getProjectInputFilename(obj, fileToGet)
return _loadCSV(filename, errorIfNotExists)
def _getKeyValuesFromFile(filename):
"""Gets the key/value pairs from a text file as a dictionary.
Args:
filename (string): Full path to the file that will be read.
Returns:
dict: The key/value pairs as a dict.
Raises:
MarxanServicesError: If the file does not exist.
"""
if not os.path.exists(filename):
raise MarxanServicesError("The file '" + filename +"' does not exist")
#get the file contents
s = _readFileUnicode(filename)
#get the keys from the file
keys = _getKeys(s)
#iterate through the keys, get their values and add them to a dictionary
data = {}
for k in keys:
key, value = _getKeyValue(s, k)
#update the dict
if value == "true":
value = True
if value == "false":
value = False
data[key] = value
return data
def _get_free_space_mb():
"""Gets the drive free space in gigabytes.
Args:
None
Returns:
string: The free space in Gb, e.g. 1.2 Gb
"""
if platform.system() == 'Windows':
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(MARXAN_FOLDER), None, None, ctypes.pointer(free_bytes))
space = free_bytes.value / 1024 / 1024 # pylint:disable=old-division
else:
st = os.statvfs(MARXAN_FOLDER)
space = st.f_bavail * st.f_frsize / 1024 / 1024 # pylint:disable=old-division
return (str("{:.1f}".format(space/1000)) + " Gb")
def _getServerData(obj):
"""Gets all of the data about the server including from the server configuration file and the free space, processors and memory. These are set on the passed obj in the serverData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
#get the data from the server configuration file
obj.serverData = _getKeyValuesFromFile(MARXAN_FOLDER + SERVER_CONFIG_FILENAME)
#get the free space in Mb
space = _get_free_space_mb()
#get the number of processors
processors = psutil.cpu_count()
#get the virtual memory
memory = (str("{:.1f}".format(psutil.virtual_memory().total/1000000000)) + " Gb")
#set the return values: permitted CORS domains - these are set in this Python module; the server os and hardware; the version of the marxan-server software
obj.serverData.update({"RAM": memory, "PROCESSOR_COUNT": processors, "DATABASE_VERSION_POSTGIS": DATABASE_VERSION_POSTGIS, "DATABASE_VERSION_POSTGRESQL": DATABASE_VERSION_POSTGRESQL, "SYSTEM": platform.system(), "NODE": platform.node(), "RELEASE": platform.release(), "VERSION": platform.version(), "MACHINE": platform.machine(), "PROCESSOR": platform.processor(), "MARXAN_SERVER_VERSION": MARXAN_SERVER_VERSION,"MARXAN_CLIENT_VERSION": MARXAN_CLIENT_VERSION, "SERVER_NAME": SERVER_NAME, "SERVER_DESCRIPTION": SERVER_DESCRIPTION, "DISK_FREE_SPACE": space})
def _getUserData(obj, returnPassword = False):
"""Gets the data on the user from the user.dat file. These are set on the passed obj in the userData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
returnPassword (bool): Optional. Set to True to return the users password. Default value is False.
Returns:
None
"""
data = _getKeyValuesFromFile(obj.folder_user + USER_DATA_FILENAME)
#set the userData attribute on this object
if (returnPassword):
obj.userData = data
else:
obj.userData = {k: v for k, v in data.items() if k != 'PASSWORD'}
async def _getSpeciesData(obj):
"""Gets the species data for a project from the Marxan SPECNAME file as a DataFrame and joins it to the data from the PostGIS database if the project is a Marxan Web project. These are set on the passed obj in the speciesData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
#get the values from the spec.dat file - speciesDataFilename will be empty if it doesn't exist yet
df = await _getProjectInputData(obj, "SPECNAME")
#create the output data frame using the id field as an index
output_df = df.set_index("id")
#add the index as a column
output_df['oid'] = output_df.index
#see if the version of marxan is the old version
if obj.projectData["metadata"]["OLDVERSION"]:
#return the data from the spec.dat file with additional fields manually added
output_df['tmp'] = 'Unique identifer: '
#if the spec.dat file has a field called 'name' then this will be used as the alias
if ('name' in output_df.columns):
output_df['alias'] = output_df['name']
else:
output_df['alias'] = output_df['tmp'].str.cat((output_df['oid']).apply(str)) # returns: 'Unique identifer: 4702435'
output_df['feature_class_name'] = output_df['oid']
output_df['description'] = "No description"
output_df['creation_date'] = "Unknown"
output_df['area'] = -1
output_df['tilesetid'] = ''
output_df['created_by'] = 'Unknown'
try:
output_df = output_df[["alias", "feature_class_name", "description", "creation_date", "area", "tilesetid", "prop", "spf", "oid", "created_by"]]
except (KeyError) as e:
raise MarxanServicesError("Unable to load spec.dat data. " + e.args[1] + ". Column names: " + ",".join(df.columns.to_list()).encode('unicode_escape')) #.encode('unicode_escape') in case there are tab characters which will be escaped to \\t
else:
#get the postgis feature data - this doesnt use _getAllSpeciesData because we have to join on the oid column
df2 = await pg.execute("select * from marxan.get_features()", returnFormat="DataFrame")
#join the species data to the PostGIS data
output_df = output_df.join(df2.set_index("oid"))
#rename the columns that are sent back to the client as the names of various properties are different in Marxan compared to the web client
output_df = output_df.rename(index=str, columns={'prop': 'target_value', 'oid':'id'})
#get the target as an integer - Marxan has it as a percentage, i.e. convert 0.17 -> 17
output_df['target_value'] = (output_df['target_value'] * 100).astype(int)
obj.speciesData = output_df
async def _getFeature(obj, oid):
"""Gets data for a single feature from PostGIS in a Marxan Web project (this does not apply to imported projects as they have no data in PostGIS). These are set on the passed obj in the data attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
oid (string): The feature oid in PostGIS.
Returns:
None
"""
obj.data = await pg.execute("SELECT oid::integer id,feature_class_name,alias,description,_area area,extent, to_char(creation_date, 'DD/MM/YY HH24:MI:SS')::text AS creation_date, tilesetid, source, created_by FROM marxan.metadata_interest_features WHERE oid=%s;",data=[oid], returnFormat="DataFrame")
async def _getAllSpeciesData(obj):
"""Gets all feature information from the PostGIS database. These are set on the passed obj in the allSpeciesData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
obj.allSpeciesData = await pg.execute("SELECT oid::integer id,feature_class_name , alias , description , _area area, extent, to_char(creation_date, 'DD/MM/YY HH24:MI:SS')::text AS creation_date, tilesetid, source, created_by FROM marxan.metadata_interest_features ORDER BY lower(alias);", returnFormat="DataFrame")
#get the information about which species have already been preprocessed
def _getSpeciesPreProcessingData(obj):
"""Get the information about which species have already been preprocessed. These are set on the passed obj in the speciesPreProcessingData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
obj.speciesPreProcessingData = _loadCSV(obj.folder_input + FEATURE_PREPROCESSING_FILENAME)
async def _getPlanningUnitsData(obj):
"""Get the planning units status information from the PUNAME file as a list of lists. The data is normalised to reduce bandwidth and are set on the passed obj in the planningUnitsData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
df = await _getProjectInputData(obj, "PUNAME")
#normalise the planning unit data to make the payload smaller
obj.planningUnitsData = _normaliseDataFrame(df, "status", "id")
async def _getPlanningUnitsCostData(obj):
"""Get the planning units cost information from the PUNAME file as a list of lists. The data is categorised and normalised into 9 classes to reduce bandwidth.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
list[]: The categorised and normalised cost data.
"""
df = await _getProjectInputData(obj, "PUNAME")
#normalise the planning unit cost data to make the payload smaller
return _normaliseDataFrame(df, "cost", "id", 9)
def _getCosts(obj):
"""Gets a list of the custom cost profiles for a project - these are defined in the input/*.cost files. These are set on the passed obj in the costNames attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
#get all files that end in .cost
costFiles = glob.glob(obj.folder_input + "*.cost")
#get the names of the files
costNames = [os.path.basename(f)[:-5] for f in costFiles]
#add the default cost profile
costNames.append(UNIFORM_COST_NAME)
costNames.sort()
#return the costNames
obj.costNames = costNames
async def _updateCosts(obj, costname):
"""Updates the costs in the Marxan PUNAME file using the costname file and saves the setting in the input.dat file.
Args:
obj (MarxanRESTHandler): The request handler instance.
costname (string): The name of the costname file to use without the .cost extension.
Returns:
None
Raises:
MarxanServicesError: If the costname file does not exist.
"""
filename = obj.folder_input + costname + ".cost"
#load the pu.dat file
df = await _getProjectInputData(obj, "PUNAME")
#default costs are uniform
if costname==UNIFORM_COST_NAME:
df['cost'] = 1
else:
#check the cost file exists
if not os.path.exists(filename):
raise MarxanServicesError("The cost file '" + costname + "' does not exist")
#load the costs file
df2 = _loadCSV(filename)
#join the costs file (which has id,cost) to the pu.dat file (which has status)
df = df2.join(df[['status']])
#update the input.dat file
_updateParameters(obj.folder_project + PROJECT_DATA_FILENAME, {'COSTS': costname})
await _writeCSV(obj, "PUNAME", df)
def _deleteCost(obj, costname):
"""Deletes a cost profile.
Args:
obj (MarxanRESTHandler): The request handler instance.
costname (string): The name of the costname file to delete without the .cost extension.
Returns:
None
Raises:
MarxanServicesError: If the costname file does not exist.
"""
filename = obj.folder_input + costname + ".cost"
#check the cost file exists
if not os.path.exists(filename):
raise MarxanServicesError("The cost file '" + costname + "' does not exist")
else:
os.remove(filename)
async def _getPlanningUnitGrids():
"""Gets the data for all of the planning grids.
Args:
None
Returns:
dict[]: The planning grids data.
"""
return await pg.execute("SELECT feature_class_name ,alias ,description ,to_char(creation_date, 'DD/MM/YY HH24:MI:SS')::text AS creation_date ,country_id ,aoi_id,domain,_area,ST_AsText(envelope) envelope, pu.source, original_n country, created_by,tilesetid, planning_unit_count FROM marxan.metadata_planning_units pu LEFT OUTER JOIN marxan.gaul_2015_simplified_1km ON id_country = country_id order by lower(alias);", returnFormat="Dict")
async def _estimatePlanningUnitCount(areakm2, iso3, domain):
"""Estimates the number of planning grid units in the passed country, area and domain.
Args:
areakm2 (string): The area of the planning grid in Km2.
iso3 (string): The country iso3 3-letter code.
domain (string): The domain for the planning grid. One of marine or terrestrial.
Returns:
int: The number of planning grid units.
"""
#see if we are using terrestrial or marine
if (domain == 'Terrestrial'):
unitCount = await pg.execute("SELECT ST_Area(ST_Transform(wkb_geometry, 3410))/(%s*1000000) FROM marxan.gaul_2015_simplified_1km WHERE iso3 = %s;", data=[areakm2,iso3], returnFormat="Array")
else:
unitCount = await pg.execute("SELECT ST_Area(ST_Transform(wkb_geometry, 3410))/(%s*1000000) FROM marxan.eez_simplified_1km WHERE iso3 = %s;", data=[areakm2,iso3], returnFormat="Array")
return unitCount[0][0]
def _getProtectedAreaIntersectionsData(obj):
"""Gets the protected area intersections information for a project. These are set on the passed obj in the protectedAreaIntersectionsData attribute.
Args:
obj (MarxanRESTHandler): The request handler instance.
Returns:
None
"""
df = _loadCSV(obj.folder_input + PROTECTED_AREA_INTERSECTIONS_FILENAME)
#normalise the protected area intersections to make the payload smaller
obj.protectedAreaIntersectionsData = _normaliseDataFrame(df, "iucn_cat", "puid")
def _invalidateProtectedAreaIntersections():
"""Resets all of the protected area intersections information for all projects - for example when a new version of the wdpa is installed.
Args:
None
Returns:
None
"""
#get all of the existing protected area intersection files - this includes projects in the /_marxan_web_resources/case_studies folder
files = _getFilesInFolderRecursive(MARXAN_USERS_FOLDER, PROTECTED_AREA_INTERSECTIONS_FILENAME)
#iterate through all of these files and replace them with an empty file
for file in files:
shutil.copyfile(EMPTY_PROJECT_TEMPLATE_FOLDER + "input" + os.sep + PROTECTED_AREA_INTERSECTIONS_FILENAME, file)