Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions .zenodo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
{
"title": "Ag Forecasting API",
"upload_type": "software",
"description": "The purpose of this API is to serve as the backend of crop disease forecasting inferences based on real-time weather data from Wisconet weather stations.",
"creators": [
{
"name": "Oros, Maria",
"affiliation": "UW-Madison",
"orcid": "0000-0009-0006-2067-4063",
"type": "ProjectMember"
},
{
"name": "Smith, Damon",
"affiliation": "UW-Madison",
"orcid": "0000-0003-3436-3718",
"type": "ProjectMember"
},
{
"name": "McConell, Iain",
"affiliation": "UW-Madison",
"orcid": "0000-0002-9982-7088",
"type": "ProjectMember"
}
],
"contributors": [
{
"name": "University of Wisconsin - Data Science Institute",
"affiliation": "University of Wisconsin - Madison",
"type": "HostingInstitution"
}
],
"access_right": "open",
"license": "mit",
"keywords": [
"data science",
"forecasting",
"open source software",
"agriculture",
"crop disease",
"Wisconet",
"UW-DSI"
],
"version": "1.0.0",
"language": "eng",
"communities": [
{ "identifier": "university-of-wisconsin-data-science-institute" }
],
"related_identifiers": [
{
"identifier": "https://github.com/UW-Madison-DSI/ag_forecasting_api",
"relation": "isSupplementTo",
"resource_type": "software"
}
],
"grants": []
}
28 changes: 28 additions & 0 deletions CITATION.cff
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
cff-version: 1.2.0
message: "Please cite this work when using the software."
type: software
title: "Soybean ROI tool"
authors:
- family-names: "Oros"
given-names: "Maria"
orcid: "https://orcid.org/0000-0009-0006-2067-4063"
affiliation: "UW-Madison"
- family-names: "Smith"
given-names: "Damon"
orcid: "https://orcid.org/0000-0003-3436-3718"
affiliation: "UW-Madison"
- family-names: "McConnel"
given-names: "Iain"
orcid: "https://orcid.org/0000-0002-9982-7088"
affiliation: "UW-Madison"
repository-code: "https://github.com/UW-Madison-DSI/ag_forecasting_api"
url: "https://github.com/UW-Madison-DSI/ag_forecasting_api"
license: "MIT"
keywords:
- open source
- forecasting-tools
- fungicide
- agriculture
# recommended:
version: "0.1.0"
date-released: "2025-12-15"
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
# Open Source Agricultural Forecasting API

## Citation

[![DOI](https://zenodo.org/badge/837313087.svg)](https://doi.org/10.5281/zenodo.17459876)

## About

This API provides access to crop disease models developed by the University of wisconesin Madison experts in plant pathology. The API uses a FastAPI-based backend and integrates weather station data from public and private sources, including mesonet weather stations and IBM Environmental Intelligence. It also supports querying Wisconet by a wrapper built in top of it in a custom way.
[API](https://ag-forecasting-api.services.dsi.wisc.edu/)
#(https://connect.doit.wisc.edu/pywisconet_wrapper/docs#/default/all_data_from_wisconet_query_ag_models_wrappers_wisconet_get)

Table of Contents

Expand Down
12 changes: 6 additions & 6 deletions ag_models_wrappers/process_wisconet.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pickle
import traceback
from datetime import datetime, timedelta
from concurrent.futures import ProcessPoolExecutor, as_completed
from concurrent.futures import ThreadPoolExecutor, as_completed
from ag_models_wrappers.forecasting_models import (
calculate_tarspot_risk_function,
calculate_gray_leaf_spot_risk_function,
Expand All @@ -25,10 +25,10 @@
BASE_URL = "https://wisconet.wisc.edu/api/v1"
MIN_DAYS_ACTIVE = 38

MEASUREMENTS_CACHE_DIR = "station_measurements_cache"
MEASUREMENTS_CACHE_DIR = os.getenv("MEASUREMENTS_CACHE_DIR", "station_measurements_cache")
os.makedirs(MEASUREMENTS_CACHE_DIR, exist_ok=True)

STATIONS_CACHE_FILE = "wisconsin_stations_cache.csv"
STATIONS_CACHE_FILE = os.getenv("STATIONS_CACHE_FILE", "wisconsin_stations_cache.csv")
CACHE_EXPIRY_DAYS = 7
STATIONS_TO_EXCLUDE = ['MITEST1', 'WNTEST1']

Expand Down Expand Up @@ -286,7 +286,7 @@ async def retrieve_tarspot_all_stations_async(input_date, input_station_id=None,
# 1) Load or fetch station list, with caching
stations = None

if today.day == 1:
if today.day == 1 or not os.path.exists(STATIONS_CACHE_FILE):
url = f"https://api.wisconet.wisc.edu/api/v1/stations/"
async with session.get(url) as resp:
if resp.status == 200:
Expand All @@ -303,7 +303,7 @@ async def retrieve_tarspot_all_stations_async(input_date, input_station_id=None,
stations.to_csv(STATIONS_CACHE_FILE, index=False)
else:
stations= None
if today.day!=1 or stations==None:
if stations is None:
stations = pd.read_csv(STATIONS_CACHE_FILE)

station_ids = stations['station_id'].tolist()
Expand All @@ -325,7 +325,7 @@ async def retrieve_tarspot_all_stations_async(input_date, input_station_id=None,

# 5) Compute risks in parallel
chunks = chunk_dataframe(merged, os.cpu_count() or 1)
with ProcessPoolExecutor() as exe:
with ThreadPoolExecutor() as exe:
futures = [exe.submit(compute_risks, c) for c in chunks]
processed = [f.result() for f in as_completed(futures)]
final = pd.concat(processed, ignore_index=True)
Expand Down