Skip to content

Commit e48bb18

Browse files
authored
feat: add --dump-html option to dump raw HTML to stdout (#7)
1 parent fb3dea1 commit e48bb18

File tree

2 files changed

+831
-756
lines changed

2 files changed

+831
-756
lines changed

peakbagger/cli.py

Lines changed: 111 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from typing import TYPE_CHECKING
44

55
import click
6+
from rich.console import Console
67

78
from peakbagger import __version__
89
from peakbagger.client import PeakBaggerClient
@@ -12,6 +13,42 @@
1213
if TYPE_CHECKING:
1314
from peakbagger.models import Peak
1415

16+
# Module-level console for status messages (stderr to keep stdout clean for data)
17+
_console = Console(stderr=True)
18+
19+
20+
def _status(ctx: click.Context, message: str, style: str | None = None) -> None:
21+
"""
22+
Print a status message to stderr using Rich.
23+
24+
Respects --quiet and --dump-html flags to keep output clean.
25+
26+
Args:
27+
ctx: Click context containing options
28+
message: Status message to display
29+
style: Optional Rich style (e.g., "bold yellow", "red")
30+
"""
31+
# Suppress status messages if --quiet or --dump-html is set
32+
if ctx.obj.get("quiet") or ctx.obj.get("dump_html"):
33+
return
34+
35+
if style:
36+
_console.print(message, style=style)
37+
else:
38+
_console.print(message)
39+
40+
41+
def _error(message: str) -> None:
42+
"""
43+
Print an error message to stderr using Rich.
44+
45+
Always shown regardless of --quiet or --dump-html flags.
46+
47+
Args:
48+
message: Error message to display
49+
"""
50+
_console.print(f"[bold red]Error:[/bold red] {message}")
51+
1552

1653
@click.group()
1754
@click.version_option(version=__version__)
@@ -21,11 +58,17 @@
2158
is_flag=True,
2259
help="Suppress informational messages",
2360
)
61+
@click.option(
62+
"--dump-html",
63+
is_flag=True,
64+
help="Dump raw HTML to stdout instead of parsing",
65+
)
2466
@click.pass_context
25-
def main(ctx: click.Context, quiet: bool) -> None:
67+
def main(ctx: click.Context, quiet: bool, dump_html: bool) -> None:
2668
"""PeakBagger CLI - Search and retrieve mountain peak data from PeakBagger.com"""
2769
ctx.ensure_object(dict)
2870
ctx.obj["quiet"] = quiet
71+
ctx.obj["dump_html"] = dump_html
2972

3073

3174
@main.group()
@@ -60,7 +103,10 @@ def ascent() -> None:
60103
default=2.0,
61104
help="Seconds between requests (default: 2.0)",
62105
)
63-
def search(query: str, output_format: str, full: bool, rate_limit: float) -> None:
106+
@click.pass_context
107+
def search(
108+
ctx: click.Context, query: str, output_format: str, full: bool, rate_limit: float
109+
) -> None:
64110
"""
65111
Search for peaks by name.
66112
@@ -80,19 +126,24 @@ def search(query: str, output_format: str, full: bool, rate_limit: float) -> Non
80126

81127
try:
82128
# Fetch search results
83-
click.echo(f"Searching for '{query}'...")
129+
_status(ctx, f"Searching for '{query}'...")
84130
html = client.get("/search.aspx", params={"ss": query, "tid": "M"})
85131

132+
# If dump-html flag is set, print HTML and exit
133+
if ctx.obj.get("dump_html"):
134+
click.echo(html)
135+
return
136+
86137
# Parse results
87138
results = scraper.parse_search_results(html)
88139

89140
if not results:
90-
click.echo(f"No results found for '{query}'")
141+
_status(ctx, f"No results found for '{query}'")
91142
return
92143

93144
# If --full flag, fetch details for each peak
94145
if full:
95-
click.echo(f"Fetching details for {len(results)} peak(s)...\n")
146+
_status(ctx, f"Fetching details for {len(results)} peak(s)...\n")
96147
peaks: list[Peak] = []
97148
for result in results:
98149
detail_html = client.get(f"/{result.url}")
@@ -106,7 +157,7 @@ def search(query: str, output_format: str, full: bool, rate_limit: float) -> Non
106157
formatter.format_search_results(results, output_format)
107158

108159
except Exception as e:
109-
click.echo(f"Error: {e}", err=True)
160+
_error(str(e))
110161
raise click.Abort() from e
111162
finally:
112163
client.close()
@@ -127,7 +178,8 @@ def search(query: str, output_format: str, full: bool, rate_limit: float) -> Non
127178
default=2.0,
128179
help="Seconds between requests (default: 2.0)",
129180
)
130-
def show(peak_id: str, output_format: str, rate_limit: float) -> None:
181+
@click.pass_context
182+
def show(ctx: click.Context, peak_id: str, output_format: str, rate_limit: float) -> None:
131183
"""
132184
Get detailed information about a specific peak.
133185
@@ -145,21 +197,26 @@ def show(peak_id: str, output_format: str, rate_limit: float) -> None:
145197

146198
try:
147199
# Fetch peak detail page
148-
click.echo(f"Fetching peak {peak_id}...")
200+
_status(ctx, f"Fetching peak {peak_id}...")
149201
html = client.get("/peak.aspx", params={"pid": peak_id})
150202

203+
# If dump-html flag is set, print HTML and exit
204+
if ctx.obj.get("dump_html"):
205+
click.echo(html)
206+
return
207+
151208
# Parse peak data
152209
peak_obj = scraper.parse_peak_detail(html, peak_id)
153210

154211
if not peak_obj:
155-
click.echo(f"Failed to parse peak data for ID {peak_id}", err=True)
212+
_error(f"Failed to parse peak data for ID {peak_id}")
156213
raise click.Abort()
157214

158215
# Display results
159216
formatter.format_peak_detail(peak_obj, output_format)
160217

161218
except Exception as e:
162-
click.echo(f"Error: {e}", err=True)
219+
_error(str(e))
163220
raise click.Abort() from e
164221
finally:
165222
client.close()
@@ -211,7 +268,9 @@ def show(peak_id: str, output_format: str, rate_limit: float) -> None:
211268
default=2.0,
212269
help="Seconds between requests (default: 2.0)",
213270
)
271+
@click.pass_context
214272
def ascents(
273+
ctx: click.Context,
215274
peak_id: str,
216275
output_format: str,
217276
after: str | None,
@@ -245,7 +304,7 @@ def ascents(
245304

246305
# Validate mutually exclusive date filters
247306
if within and (after or before):
248-
click.echo("Error: --within cannot be combined with --after/--before", err=True)
307+
_error("--within cannot be combined with --after/--before")
249308
raise click.Abort()
250309

251310
client: PeakBaggerClient = PeakBaggerClient(rate_limit_seconds=rate_limit)
@@ -255,19 +314,24 @@ def ascents(
255314

256315
try:
257316
# Fetch ascent list page
258-
click.echo(f"Fetching ascents for peak {peak_id}...")
317+
_status(ctx, f"Fetching ascents for peak {peak_id}...")
259318
url = "/climber/PeakAscents.aspx"
260319
params = {"pid": peak_id, "sort": "ascentdate", "u": "ft", "y": "9999"}
261320
html = client.get(url, params=params)
262321

322+
# If dump-html flag is set, print HTML and exit
323+
if ctx.obj.get("dump_html"):
324+
click.echo(html)
325+
return
326+
263327
# Parse ascents
264328
ascent_list = scraper.parse_peak_ascents(html)
265329

266330
if not ascent_list:
267-
click.echo(f"No ascents found for peak ID {peak_id}", err=True)
331+
_error(f"No ascents found for peak ID {peak_id}")
268332
return
269333

270-
click.echo(f"Found {len(ascent_list)} ascents\n")
334+
_status(ctx, f"Found {len(ascent_list)} ascents\n")
271335

272336
# Apply date filters
273337
filtered_ascents = ascent_list
@@ -276,26 +340,26 @@ def ascents(
276340
period = analyzer.parse_within_period(within)
277341
after_date = datetime.now() - period
278342
filtered_ascents = analyzer.filter_by_date_range(filtered_ascents, after=after_date)
279-
click.echo(f"Filtered to {len(filtered_ascents)} ascents within {within}\n")
343+
_status(ctx, f"Filtered to {len(filtered_ascents)} ascents within {within}\n")
280344
except ValueError as e:
281-
click.echo(f"Error: {e}", err=True)
345+
_error(str(e))
282346
raise click.Abort() from e
283347
elif after or before:
284348
after_date = datetime.strptime(after, "%Y-%m-%d") if after else None
285349
before_date = datetime.strptime(before, "%Y-%m-%d") if before else None
286350
filtered_ascents = analyzer.filter_by_date_range(
287351
filtered_ascents, after=after_date, before=before_date
288352
)
289-
click.echo(f"Filtered to {len(filtered_ascents)} ascents\n")
353+
_status(ctx, f"Filtered to {len(filtered_ascents)} ascents\n")
290354

291355
# Apply metadata filters
292356
if with_gpx:
293357
filtered_ascents = [a for a in filtered_ascents if a.has_gpx]
294-
click.echo(f"Filtered to {len(filtered_ascents)} ascents with GPX tracks\n")
358+
_status(ctx, f"Filtered to {len(filtered_ascents)} ascents with GPX tracks\n")
295359

296360
if with_tr:
297361
filtered_ascents = [a for a in filtered_ascents if a.has_trip_report]
298-
click.echo(f"Filtered to {len(filtered_ascents)} ascents with trip reports\n")
362+
_status(ctx, f"Filtered to {len(filtered_ascents)} ascents with trip reports\n")
299363

300364
# Display ascent list (not statistics)
301365
# Create a simple statistics object just for formatting the list
@@ -310,10 +374,10 @@ def ascents(
310374
)
311375

312376
if len(filtered_ascents) > limit:
313-
click.echo(f"\nShowing first {limit} of {len(filtered_ascents)} ascents")
377+
_status(ctx, f"\nShowing first {limit} of {len(filtered_ascents)} ascents")
314378

315379
except Exception as e:
316-
click.echo(f"Error: {e}", err=True)
380+
_error(str(e))
317381
raise click.Abort() from e
318382
finally:
319383
client.close()
@@ -360,7 +424,9 @@ def ascents(
360424
default=2.0,
361425
help="Seconds between requests (default: 2.0)",
362426
)
427+
@click.pass_context
363428
def stats(
429+
ctx: click.Context,
364430
peak_id: str,
365431
output_format: str,
366432
after: str | None,
@@ -393,7 +459,7 @@ def stats(
393459

394460
# Validate mutually exclusive date filters
395461
if within and (after or before):
396-
click.echo("Error: --within cannot be combined with --after/--before", err=True)
462+
_error("--within cannot be combined with --after/--before")
397463
raise click.Abort()
398464

399465
client: PeakBaggerClient = PeakBaggerClient(rate_limit_seconds=rate_limit)
@@ -403,19 +469,24 @@ def stats(
403469

404470
try:
405471
# Fetch ascent list page
406-
click.echo(f"Fetching ascents for peak {peak_id}...")
472+
_status(ctx, f"Fetching ascents for peak {peak_id}...")
407473
url = "/climber/PeakAscents.aspx"
408474
params = {"pid": peak_id, "sort": "ascentdate", "u": "ft", "y": "9999"}
409475
html = client.get(url, params=params)
410476

477+
# If dump-html flag is set, print HTML and exit
478+
if ctx.obj.get("dump_html"):
479+
click.echo(html)
480+
return
481+
411482
# Parse ascents
412483
ascent_list = scraper.parse_peak_ascents(html)
413484

414485
if not ascent_list:
415-
click.echo(f"No ascents found for peak ID {peak_id}", err=True)
486+
_error(f"No ascents found for peak ID {peak_id}")
416487
return
417488

418-
click.echo(f"Found {len(ascent_list)} ascents\n")
489+
_status(ctx, f"Found {len(ascent_list)} ascents\n")
419490

420491
# Apply date filters
421492
filtered_ascents = ascent_list
@@ -424,25 +495,25 @@ def stats(
424495
period = analyzer.parse_within_period(within)
425496
after_date = datetime.now() - period
426497
filtered_ascents = analyzer.filter_by_date_range(filtered_ascents, after=after_date)
427-
click.echo(f"Analyzing {len(filtered_ascents)} ascents within {within}\n")
498+
_status(ctx, f"Analyzing {len(filtered_ascents)} ascents within {within}\n")
428499
except ValueError as e:
429-
click.echo(f"Error: {e}", err=True)
500+
_error(str(e))
430501
raise click.Abort() from e
431502
elif after or before:
432503
after_date = datetime.strptime(after, "%Y-%m-%d") if after else None
433504
before_date = datetime.strptime(before, "%Y-%m-%d") if before else None
434505
filtered_ascents = analyzer.filter_by_date_range(
435506
filtered_ascents, after=after_date, before=before_date
436507
)
437-
click.echo(f"Analyzing {len(filtered_ascents)} ascents\n")
508+
_status(ctx, f"Analyzing {len(filtered_ascents)} ascents\n")
438509

439510
# Parse reference date for seasonal analysis
440511
ref_date = None
441512
if reference_date:
442513
try:
443514
ref_date = datetime.strptime(reference_date, "%Y-%m-%d")
444515
except ValueError as e:
445-
click.echo(f"Error: Invalid reference date format: {reference_date}", err=True)
516+
_error(f"Invalid reference date format: {reference_date}")
446517
raise click.Abort() from e
447518

448519
# Calculate statistics
@@ -461,7 +532,7 @@ def stats(
461532
)
462533

463534
except Exception as e:
464-
click.echo(f"Error: {e}", err=True)
535+
_error(str(e))
465536
raise click.Abort() from e
466537
finally:
467538
client.close()
@@ -482,7 +553,8 @@ def stats(
482553
default=2.0,
483554
help="Seconds between requests (default: 2.0)",
484555
)
485-
def show_ascent(ascent_id: str, output_format: str, rate_limit: float) -> None:
556+
@click.pass_context
557+
def show_ascent(ctx: click.Context, ascent_id: str, output_format: str, rate_limit: float) -> None:
486558
"""
487559
Get detailed information about a specific ascent.
488560
@@ -500,21 +572,26 @@ def show_ascent(ascent_id: str, output_format: str, rate_limit: float) -> None:
500572

501573
try:
502574
# Fetch ascent detail page
503-
click.echo(f"Fetching ascent {ascent_id}...")
575+
_status(ctx, f"Fetching ascent {ascent_id}...")
504576
html = client.get("/climber/ascent.aspx", params={"aid": ascent_id})
505577

578+
# If dump-html flag is set, print HTML and exit
579+
if ctx.obj.get("dump_html"):
580+
click.echo(html)
581+
return
582+
506583
# Parse ascent data
507584
ascent_obj = scraper.parse_ascent_detail(html, ascent_id)
508585

509586
if not ascent_obj:
510-
click.echo(f"Failed to parse ascent data for ID {ascent_id}", err=True)
587+
_error(f"Failed to parse ascent data for ID {ascent_id}")
511588
raise click.Abort()
512589

513590
# Display results
514591
formatter.format_ascent_detail(ascent_obj, output_format)
515592

516593
except Exception as e:
517-
click.echo(f"Error: {e}", err=True)
594+
_error(str(e))
518595
raise click.Abort() from e
519596
finally:
520597
client.close()

0 commit comments

Comments
 (0)