-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathadd_features2.py
More file actions
312 lines (243 loc) · 10.8 KB
/
add_features2.py
File metadata and controls
312 lines (243 loc) · 10.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
# Created by Billy Henshaw
# Last updated on 08.11.2021
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %%
import arcpy # not installed on local, but is on Grapery VM
import pandas as pd
import numpy as np
from arcgis.gis import GIS
from arcgis import features, geometry
from copy import deepcopy
import os
import sys
import uuid
# import getpass
from time import sleep
from arcgis.features import GeoAccessor, GeoSeriesAccessor
import Updates_to_Crewtable
# %%
# change this to Grapery dir or shared drive
def main():
# dr = ''
dr = ''
# dr = os.path.dirname(sys.executable)
spreadsheet = ''
try:
username = ""
password = ''
# username = input('Username: ')
# password = getpass.getpass()
gis = GIS("", username, password)
print("Logged in as " + gis.properties.user.username +
' to ' + gis.properties.name)
except Exception:
print('Failed to login. Check credentials and network connection and try again.')
# pull in Internal Assignment layer
Job_IntAssgn_prod = gis.content.search(
'Job_IntAssgn')[0] # production dataset
# Job_IntAssgn = gis.content.get('') # test dataset
IntAssgn = Job_IntAssgn_prod.layers[1] # production dataset
# IntAssgn = Job_IntAssgn.layers[0] # test dataset
Job = Job_IntAssgn_prod.layers[5] # pull the jobs layer
# how many features before the append?
pre_cnt = len(IntAssgn.query().sdf.values)
print(f'Number of features before append: {pre_cnt}')
# Pull in field dataset for master field list
RF_no_edit = gis.content.search('title:RF_no_edit')[0]
# for layer in RF_no_edit.layers:
# print(layer.properties.name)
Field = RF_no_edit.layers[0]
Field_db = Field.query().sdf
# calculate centroid of each field
centroids = []
for indx, row in Field_db.iterrows():
centr = row['SHAPE'].centroid
centroids.append(centr)
Field_db['Centroid'] = centroids
# read in Job dataframe
# only need GlobalID, job code, and field name for merge
job = Job.query().sdf[['job_code', 'field_name', 'GlobalID']]
# read in ptIntAssgn spreadsheet
df = pd.read_excel(dr + spreadsheet, sheet_name='ptIntAssgn')
df['Crew Code'] = df['Crew Code'].astype(str)
df['Field Name'] = df['Field Name'].astype(str)
df['Job Code'] = df['Job Code'].astype('float').astype('Int32')
for indx, value in enumerate(df['Crew Code']):
df.loc[indx, 'Crew Code'] = value.zfill(3)
# replacing zeros with null values
null_value = float('NaN')
df.replace(0, null_value, inplace=True)
df.replace('0', null_value, inplace=True)
df.replace('000', null_value, inplace=True)
df.replace('000000', null_value, inplace=True)
# removing null rows
df = df.dropna(how='all', axis=1).dropna(thresh=4, axis=0)
# To be used for debugging by the creator only! Otherwise this function will display a FileError.
# save sheet as separate CSV
# df.to_csv('ptCrewIntAssgn.csv', index=False)
# convert Job Codes in Job to int
job['job_code'] = job['job_code'].astype('float').astype('Int32')
# check for new crew codes, and update the domain if new crews
#call the needed feature layers
ptCrew = Job_IntAssgn_prod.layers[0]
crewcodes_list = list(df['Crew Code'].values)
ptCrew_crewcodes_list = list(ptCrew.query().sdf['CrewNumber'].values)
in_ptCrew = [a in ptCrew_crewcodes_list for a in crewcodes_list]
notin_ptCrew = np.array([a == False for a in in_ptCrew])
dfindx = []
newcrewnames = list(
set([i for (i, v) in zip(crewcodes_list, notin_ptCrew) if v]))
for name in newcrewnames:
dfindx.append((df['Crew Code'].values == name).argmax())
df_newcrews = df.iloc[dfindx]
# add new crews to ptCrew
if len(newcrewnames) > 0:
print('Adding new crews to domain...')
sleep(2)
crews_to_add = []
feature = deepcopy(ptCrew.query().features[0])
pt_geometry = {'x': -13266834.274, 'y': 4233952.943}
for i in range(len(newcrewnames)):
feature_copy = deepcopy(feature)
feature_copy.attributes['CrewLead'] = df_newcrews.iloc[i].loc['CrewLeadFirst'] + \
' ' + df_newcrews.iloc[i].loc['CrewLeadLast']
feature_copy.attributes['CrewName'] = df_newcrews.iloc[i].loc['Crew Code'] + \
' - ' + feature_copy.attributes['CrewLead']
feature_copy.attributes['CrewNumber'] = df_newcrews.iloc[i].loc['Crew Code']
feature_copy.attributes['CrewLeadFirst'] = df_newcrews.iloc[i].loc['CrewLeadFirst']
feature_copy.attributes['CrewLeadLast'] = df_newcrews.iloc[i].loc['CrewLeadLast']
feature_copy.attributes['CrewLeadPhone'] = df_newcrews.iloc[i].loc['PhoneNumber']
feature_copy.attributes['DomainXfer'] = 0
feature_copy.attributes['Zone'] = None
feature_copy.attributes['UName'] = None
feature_copy.geometry['x'] = pt_geometry['x'] + (10 * i)
feature_copy.geometry['y'] = pt_geometry['y']
crews_to_add.append(feature_copy)
ptCrew.edit_features(adds=crews_to_add)
# merge df with Job by "field_name" and "job_code"
"""
Get the GlobalID to append to the JobID field in the
IntAssgn feature service. Do this by assigning JobID
field to GlobalID field in the feature add iteration.
"""
dfJobmerge = pd.merge(df, job,
left_on=['Field Name', 'Job Code'],
right_on=['field_name', 'job_code'],
how='left') # only spit out the GlobalID field; don't need any other field.
# create NullID for null GlobalIDs
# to be used to find null GlobalIDs after feature adds
nullID = '{' + str(uuid.uuid4()).upper() + '}'
# function to adjust the field names to match the master field list
# """
# may not be necessary after field name dropdown is added to spreadsheet
# """
def adjust(fieldname):
try:
nums = sum(c.isdigit() for c in fieldname[:3])
last_nums = sum(c.isdigit() for c in fieldname[-1])
first_chars = sum(c.isalpha() for c in fieldname[0])
if first_chars == 1:
fieldname = fieldname.replace(fieldname[0], '')
if nums < 3:
fieldname = fieldname[:2].rjust(3, '0') + fieldname[2:]
# print(f'Number of numbers: {nums}')
# print(i)
if last_nums < 1:
fieldname = fieldname[:-2] + fieldname[-2:].ljust(3, '1')
# assume a '1', because it's the user's responsibility to insert the correct field name.
return fieldname
except TypeError:
return None
# make a deep copy of merged Job/ptIntAssgn df so all changes to new_rows are not reflected in original copy
new_rows = dfJobmerge.copy(deep=True)
# # adjust field names to match formatting of master field list
# '''
# may not be necessary after field name dropdown is added to spreadsheet
# '''
for g, i in new_rows.iterrows():
fieldname1 = i['Field Name']
adj_field = adjust(fieldname1)
if adj_field == None:
continue
i['Field Name'] = adj_field
#query all the features
fset = IntAssgn.query()
features_to_be_added = []
template_hostedFeature = deepcopy(fset.features[0])
# add features from df into Internal Assignment feature layer
for index, row in new_rows.iterrows():
new_feature = deepcopy(template_hostedFeature)
# check if field name is null. If so, skip the feature
if pd.isnull(row['Field Name']) == True:
continue
else:
try:
x = Field_db['Centroid'][Field_db['field_name']
== row['Field Name']].iloc[0][0]
y = Field_db['Centroid'][Field_db['field_name']
== row['Field Name']].iloc[0][1]
print(
f'Creating crew {row["Crew Code"]} with field code {row["Field Name"]}...')
sleep(0.5)
except IndexError:
print(
f'Crew {row["Crew Code"]} with field code {row["Field Name"]} does not have a geometry. Feature not added.')
sleep(0.5)
# raise
input_geometry = {'y': float(y), 'x': float(x)}
# output_geometry = geometry.project(geometries=[input_geometry], in_sr=3857, out_sr=fset.spatial_reference['latestWkid'], gis=gis)
# assign the updated values
current_date = row['Date']
# adjusting for timezone
current_timestamp = int(current_date.timestamp() + 50400) * 1000
new_feature.geometry['x'] = input_geometry['x']
new_feature.geometry['y'] = input_geometry['y']
new_feature.attributes['date'] = current_timestamp
new_feature.attributes['crew_number'] = row['Crew Code']
new_feature.attributes['jobcode'] = row['Job Code']
new_feature.attributes['field_name'] = row['Field Name']
if pd.isna(row['GlobalID']):
new_feature.attributes['JobID'] = nullID
else:
new_feature.attributes['JobID'] = row['GlobalID']
new_feature.attributes['WFXfer'] = 0
# append new features to new features list
features_to_be_added.append(new_feature)
add_cnt = len(features_to_be_added)
# add new features to IntAssgn feature layer
IntAssgn.edit_features(adds=features_to_be_added)
print('Features successfully added to feature layer.')
sleep(1)
print('Checking for nulls...')
sleep(3)
# how many features in the database after the append?
post_cnt = len(IntAssgn.query().sdf.values)
# replace NullID with None in the JobID field
features_to_be_updated = IntAssgn.query().features[-(add_cnt):]
for _, row in enumerate(features_to_be_updated):
if row.attributes['JobID'] == nullID:
row.attributes['JobID'] = None
else:
continue
IntAssgn.edit_features(updates=features_to_be_updated)
print(f'Number of features after append: {post_cnt}\n')
print(f'Number of features added: {add_cnt}\n\n')
sleep(3)
# """
# This block displays the newly added features as a Pandas DataFrame. Not
# necessary for the end-product of an exe, but uncomment to see them in the ipynb script.
# """
# display the newly added features
# print('Newly added features:')
# IntAssgn.query().sdf.tail(add_cnt)
print('Assignments added without error!')
print('\n')
sleep(1)
print('Updating domain with new crews...')
print('\n')
sleep(2)
# END
if __name__ == '__main__':
main()
Updates_to_Crewtable.main()