Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ repos:
hooks:
- id: codespell
args: [ "--write-changes" ]
exclude: "^(check_scraper|reproduce_|test_issue_|prototype_slicing|test_prototype).*\\.py$"
ci:
autofix_prs: false
autoupdate_schedule: "quarterly"
1 change: 1 addition & 0 deletions changelog/142.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Migrated all network clients to the new unified Scraper format introduced in ``sunpy`` 6.1, resolving compatibility issues with ``sunpy`` 7.1.0.
17 changes: 7 additions & 10 deletions radiospectra/net/sources/ecallisto.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,26 +39,23 @@ class eCALLISTOClient(GenericClient):
<BLANKLINE>
"""

baseurl = (
r"http://soleil80.cs.technik.fhnw.ch/solarradio/data/2002-20yy_Callisto/"
r"%Y/%m/%d/{obs}_%Y%m%d_%H%M%S.*.fit.gz"
)
pattern = (
r"{}/2002-20yy_Callisto/{year:4d}/{month:2d}/{day:2d}/"
r"{Observatory}_{year:4d}{month:2d}{day:2d}"
r"_{hour:2d}{minute:2d}{second:2d}{suffix}.fit.gz"
r"http://soleil80.cs.technik.fhnw.ch/solarradio/data/2002-20yy_Callisto/"
r"{{year:4d}}/{{month:2d}}/{{day:2d}}/{obs}_{{year:4d}}{{month:2d}}{{day:2d}}"
r"_{{hour:2d}}{{minute:2d}}{{second:2d}}{{suffix}}.fit.gz"
)

@classmethod
def pre_search_hook(cls, *args, **kwargs):
baseurl, pattern, matchdict = super().pre_search_hook(*args, **kwargs)
obs = matchdict.pop("Observatory")
obs = matchdict["Observatory"]
if obs[0] == "*":
baseurl = baseurl.format(obs=r".*")
pattern = pattern.replace("{obs}", "{{Observatory}}")
matchdict.pop("Observatory")
else:
# Need case sensitive so have to override
obs_attr = [a for a in args if isinstance(a, Observatory)][0]
baseurl = baseurl.format(obs=obs_attr.value)
pattern = pattern.replace("{obs}", obs_attr.value)
return baseurl, pattern, matchdict

def post_search_hook(self, exdict, matchdict):
Expand Down
13 changes: 7 additions & 6 deletions radiospectra/net/sources/ilofar.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,11 @@ class ILOFARMode357Client(GenericClient):
<BLANKLINE>
"""

baseurl = r"https://data.lofar.ie/%Y/%m/%d/bst/kbt/{dataset}/" r"%Y%m%d_\d{{6}}_bst_00\S{{1}}.dat"

pattern = r"{}/{year:4d}{month:2d}{day:2d}_{hour:2d}{minute:2d}{second:2d}" r"_bst_00{Polarisation}.dat"
pattern = (
r"https://data.lofar.ie/{{year:4d}}/{{month:2d}}/{{day:2d}}/bst/kbt/{dataset}/"
r"{{year:4d}}{{month:2d}}{{day:2d}}_{{hour:2d}}{{minute:2d}}{{second:2d}}"
r"_bst_00{{Polarisation}}.dat"
)

@classmethod
def _check_wavelengths(cls, wavelength):
Expand Down Expand Up @@ -94,9 +96,8 @@ def search(self, *args, **kwargs):
tr = TimeRange(matchdict["Start Time"], matchdict["End Time"])

for dataset in DATASET_NAMES:
url = self.baseurl.format(dataset=dataset)
scraper = Scraper(url, regex=True)
filesmeta = scraper._extract_files_meta(tr, extractor=self.pattern)
scraper = Scraper(format=self.pattern.replace("{dataset}", dataset))
filesmeta = scraper._extract_files_meta(tr)
for i in filesmeta:
rowdict = self.post_search_hook(i, matchdict)
metalist.append(rowdict)
Expand Down
13 changes: 6 additions & 7 deletions radiospectra/net/sources/psp.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,10 @@ class RFSClient(GenericClient):
<BLANKLINE>
"""

baseurl = (
r"https://spdf.gsfc.nasa.gov/pub/data/psp/fields/l2/{Wavelength}/"
r"{year}/psp_fld_l2_(\w){{7}}_(\d){{8}}_v(\d){{2}}.cdf"
pattern = (
r"https://spdf.gsfc.nasa.gov/pub/data/psp/fields/l2/{receiver}/{year_path}/"
r"psp_fld_l2_{{Wavelength}}_{{year:4d}}{{month:2d}}{{day:2d}}_v{{version:2d}}.cdf"
)
pattern = r"{}/{Wavelength}/{year:4d}/" r"psp_fld_l2_{Wavelength}_{year:4d}{month:2d}{day:2d}_v{:2d}.cdf"

@classmethod
def _check_wavelengths(cls, wavelength):
Expand Down Expand Up @@ -111,9 +110,9 @@ def search(self, *args, **kwargs):
tr = TimeRange(matchdict["Start Time"], matchdict["End Time"])
for receiver in receivers:
for year in range(start_year, end_year + 1):
urlpattern = self.baseurl.format(Wavelength=receiver, year=year)
scraper = Scraper(urlpattern, regex=True)
filesmeta = scraper._extract_files_meta(tr, extractor=self.pattern)
pattern = self.pattern.replace("{receiver}", receiver).replace("{year_path}", str(year))
scraper = Scraper(format=pattern)
filesmeta = scraper._extract_files_meta(tr)
for i in filesmeta:
rowdict = self.post_search_hook(i, matchdict)
metalist.append(rowdict)
Expand Down
14 changes: 8 additions & 6 deletions radiospectra/net/sources/rstn.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@ class RSTNClient(GenericClient):
<BLANKLINE>
"""

baseurl = (
pattern = (
r"https://www.ngdc.noaa.gov/stp/space-weather/solar-data/"
r"solar-features/solar-radio/rstn-spectral/{obs}/%Y/%m/.*.gz"
r"solar-features/solar-radio/rstn-spectral/{obs}/{{year:4d}}/{{month:2d}}/"
r"{{obs_short:2l}}{{year2:2d}}{{month2:2d}}{{day:2d}}.SRS.gz"
)
pattern = r"{}/rstn-spectral/{obs}/{year:4d}/{month:2d}/" r"{obs_short:2l}{year2:2d}{month2:2d}{day:2d}.SRS.gz"

observatory_map = {
"Holloman": "holloman",
Expand All @@ -47,14 +47,16 @@ class RSTNClient(GenericClient):
observatory_map = {**observatory_map, **dict(map(reversed, observatory_map.items()))}

def search(self, *args, **kwargs):
baseurl, pattern, matchdict = self.pre_search_hook(*args, **kwargs)
_, pattern, matchdict = self.pre_search_hook(*args, **kwargs)
metalist = []
for obs in matchdict["Observatory"]:
scraper = Scraper(baseurl.format(obs=self.observatory_map[obs.title()]), regex=True)
obs_path = self.observatory_map[obs.title()]
scraper = Scraper(format=pattern.replace("{obs}", obs_path))
tr = TimeRange(matchdict["Start Time"], matchdict["End Time"])
filesmeta = scraper._extract_files_meta(tr, extractor=pattern, matcher=matchdict)
filesmeta = scraper._extract_files_meta(tr, matcher=matchdict)

for i in filesmeta:
i["obs"] = obs_path
rowdict = self.post_search_hook(i, matchdict)
metalist.append(rowdict)

Expand Down
Loading