Skip to content

Commit 6f43a18

Browse files
committed
Trim remote helper comments
1 parent 52ab1f9 commit 6f43a18

File tree

2 files changed

+1
-34
lines changed

2 files changed

+1
-34
lines changed

graphistry/compute/chain_remote.py

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@ def chain_remote_generic(
5252
if not dataset_id:
5353
raise ValueError("Missing dataset_id; either pass in, or call on g2=g1.plot(render='g') in api=3 mode ahead of time")
5454

55-
# Resolve engine: auto -> pandas/cudf based on graph DataFrame type
5655
engine_resolved = resolve_engine(engine, self)
5756
if engine_resolved not in [Engine.PANDAS, Engine.CUDF]:
5857
raise ValueError(f"Remote GFQL only supports 'pandas' or 'cudf' engines (or 'auto' which resolves to one of them). "
@@ -66,7 +65,6 @@ def chain_remote_generic(
6665
else:
6766
format = "parquet"
6867

69-
# Validate persist compatibility early
7068
if persist and output_type in ["nodes", "edges"]:
7169
raise ValueError(f"persist=True is not supported with output_type='{output_type}'. "
7270
f"Use output_type='all' for persistence support.")
@@ -97,13 +95,11 @@ def chain_remote_generic(
9795
if persist:
9896
request_body["persist"] = persist
9997

100-
# Include privacy settings for persisted dataset
10198
if hasattr(self, '_privacy') and self._privacy is not None:
10299
request_body["privacy"] = dict(self._privacy)
103100

104101
url = f"{self.base_url_server()}/api/v2/etl/datasets/{dataset_id}/gfql/{output_type}"
105102

106-
# Prepare headers
107103
headers = {
108104
"Authorization": f"Bearer {api_token}",
109105
"Content-Type": "application/json",
@@ -112,27 +108,19 @@ def chain_remote_generic(
112108

113109
response = requests.post(url, headers=headers, json=request_body, verify=self.session.certificate_validation)
114110

115-
# Enhanced error handling for GFQL validation errors
116111
if not response.ok:
117112
try:
118-
# Try to parse JSON error response for more details
119113
if response.headers.get('content-type', '').startswith('application/json'):
120114
error_data = response.json()
121115
error_msg = error_data.get('error', str(error_data))
122116
raise ValueError(f"GFQL remote operation failed: {error_msg} (HTTP {response.status_code})")
123117
else:
124-
# Fallback to generic error with response text
125118
raise ValueError(f"GFQL remote operation failed: {response.text[:500]} (HTTP {response.status_code})")
126119
except (ValueError,) as ve:
127-
# Re-raise our custom ValueError
128120
raise ve
129121
except Exception:
130-
# If JSON parsing fails, re-raise the original HTTP error
131122
response.raise_for_status()
132123

133-
# deserialize based on output_type & format
134-
135-
# Determine DataFrame library by checking both edges and nodes
136124
edges_is_cudf = self._edges is not None and 'cudf.core.dataframe' in str(getmodule(self._edges))
137125
nodes_is_cudf = self._nodes is not None and 'cudf.core.dataframe' in str(getmodule(self._nodes))
138126

@@ -180,18 +168,15 @@ def chain_remote_generic(
180168

181169
result = self.edges(edges_df).nodes(nodes_df)
182170

183-
# Check for metadata.json in zip (both persist and GFQL metadata)
184171
if 'metadata.json' in zip_ref.namelist():
185172
try:
186173
metadata_content = zip_ref.read('metadata.json')
187174
metadata = json.loads(metadata_content.decode('utf-8'))
188175

189176
if persist:
190-
# Extract dataset_id for URL generation
191177
if 'dataset_id' in metadata:
192178
result._dataset_id = metadata['dataset_id']
193179

194-
# Generate URL using existing infrastructure
195180
if result._dataset_id: # Type guard
196181
info: DatasetInfo = {
197182
'name': result._dataset_id,
@@ -201,7 +186,6 @@ def chain_remote_generic(
201186

202187
result._url = result._pygraphistry._viz_url(info, result._url_params)
203188

204-
# Optionally restore privacy settings
205189
if 'privacy' in metadata:
206190
result._privacy = metadata['privacy']
207191

@@ -223,18 +207,14 @@ def chain_remote_generic(
223207

224208
return result
225209
except zipfile.BadZipFile as e:
226-
# Server likely returned an error response instead of zip data
227-
# Try to parse the response as JSON for a better error message
228210
try:
229211
if response.headers.get('content-type', '').startswith('application/json'):
230212
error_data = response.json()
231213
error_msg = error_data.get('error', str(error_data))
232214
raise ValueError(f"GFQL remote operation failed with validation error: {error_msg}")
233215
else:
234-
# Show the response text for debugging
235216
raise ValueError(f"GFQL remote operation failed - server returned non-zip response: {response.text[:500]}")
236217
except Exception:
237-
# If all else fails, re-raise the original BadZipFile error with context
238218
raise ValueError(f"GFQL remote operation failed - server response is not a valid zip file. "
239219
f"This usually indicates a server validation error. Response status: {response.status_code}") from e
240220
elif output_type in ["nodes", "edges"] and format in ["csv", "parquet"]:
@@ -265,12 +245,10 @@ def chain_remote_generic(
265245
else:
266246
raise ValueError(f"JSON format read with unexpected output_type: {output_type}")
267247

268-
# Handle persist response - set dataset_id if provided
269248
if persist:
270249
if 'dataset_id' in o:
271250
result._dataset_id = o['dataset_id']
272251

273-
# Generate URL using existing infrastructure
274252
if result._dataset_id: # Type guard
275253
dataset_info: DatasetInfo = {
276254
'name': result._dataset_id,

graphistry/compute/python_remote.py

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,6 @@ def task(g: Plottable) -> Dict[str, Any]:
125125

126126
assert format in ["json", "csv", "parquet"], f"format should be 'json', 'csv', or 'parquet', got: {format}"
127127

128-
# Resolve engine: auto -> pandas/cudf based on graph DataFrame type
129128
engine_resolved = resolve_engine(engine, self)
130129
if engine_resolved not in [Engine.PANDAS, Engine.CUDF]:
131130
raise ValueError(f"Remote Python execution only supports 'pandas' or 'cudf' engines (or 'auto' which resolves to one of them). "
@@ -134,7 +133,6 @@ def task(g: Plottable) -> Dict[str, Any]:
134133
engine_str = engine_resolved.value
135134

136135
# TODO remove auto-indent when server updated
137-
# workaround parsing bug by indenting each line by 4 spaces
138136
code_indented = "\n".join([" " + line for line in code.split("\n")])
139137

140138
request_body = {
@@ -147,7 +145,6 @@ def task(g: Plottable) -> Dict[str, Any]:
147145

148146
url = f"{self.base_url_server()}/api/v2/datasets/{dataset_id}/python"
149147

150-
# Prepare headers
151148
headers = {
152149
"Authorization": f"Bearer {api_token}",
153150
"Content-Type": "application/json",
@@ -156,19 +153,15 @@ def task(g: Plottable) -> Dict[str, Any]:
156153

157154
response = requests.post(url, headers=headers, json=request_body, verify=self.session.certificate_validation)
158155

159-
# Enhanced error handling for GFQL validation errors
160156
if not response.ok:
161157
try:
162-
# Try to parse JSON error response for more details
163158
if response.headers.get('content-type', '').startswith('application/json'):
164159
error_data = response.json()
165160
error_msg = error_data.get('error', str(error_data))
166161
raise ValueError(f"GFQL remote operation failed: {error_msg} (HTTP {response.status_code})")
167162
except ValueError:
168-
# Re-raise ValueError (which includes our custom message)
169163
raise
170164
except Exception:
171-
# Fall back to default error handling for other JSON parsing errors
172165
pass
173166
response.raise_for_status()
174167

@@ -215,22 +208,18 @@ def task(g: Plottable) -> Dict[str, Any]:
215208

216209
return self.edges(edges_df).nodes(nodes_df)
217210
except zipfile.BadZipFile as e:
218-
# Handle case where response is not a zip file (e.g., error response)
219211
try:
220-
# Try to parse as JSON error response
221212
if response.headers.get('content-type', '').startswith('application/json'):
222213
error_data = response.json()
223214
error_msg = error_data.get('error', str(error_data))
224215
raise ValueError(f"GFQL remote operation failed: {error_msg} (Expected zip file but got JSON error)")
225216
else:
226-
# Try to decode as text for better error context
227217
try:
228-
error_text = response.content.decode('utf-8')[:500] # First 500 chars
218+
error_text = response.content.decode('utf-8')[:500]
229219
raise ValueError(f"GFQL remote operation failed: Expected zip file but received: {error_text}")
230220
except UnicodeDecodeError:
231221
raise ValueError(f"GFQL remote operation failed: Expected zip file but received invalid data (HTTP {response.status_code})")
232222
except Exception:
233-
# Fallback: re-raise original BadZipFile with more context
234223
raise ValueError(f"GFQL remote operation failed: {str(e)} - Response may be an error message instead of expected zip file")
235224
elif output_type in ["nodes", "edges", "table"] and format in ["csv", "parquet"]:
236225
data = BytesIO(response.content)

0 commit comments

Comments
 (0)