Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `filter_spatial`: Clarified that masking is applied using the given geometries. [#469](https://github.com/Open-EO/openeo-processes/issues/469)
- `load_stac`: Clarify handling of the `properties` parameter in the context of STAC APIs and static catalogs. [#536](https://github.com/Open-EO/openeo-processes/issues/536)
- `load_collection` and `load_stac`: Clarified that scale and offset are not applied automatically when loading the data. [#503](https://github.com/Open-EO/openeo-processes/issues/503)
- `load_uploaded_files` and `run_udf`: Clarify handling of file paths and added `FileNotFound` exception. [#461](https://github.com/Open-EO/openeo-processes/issues/461)
- `mask`: Add missing exception `IncompatibleDataCubes` [#538](https://github.com/Open-EO/openeo-processes/issues/538)
- `mod`: Clarified behavior for y = 0
- `run_udf`: Simplified and clarified the schema for `data` - no functional change.
Expand Down
8 changes: 4 additions & 4 deletions proposals/export_workspace.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"id": "export_workspace",
"summary": "Export data to a cloud user workspace",
"description": "Exports the given processing results made available through a STAC resource (e.g., a STAC Collection) to the given user workspace. The STAC resource itself is exported with all STAC resources and assets underneath.",
"summary": "Export data to a cloud workspace",
"description": "Exports the given processing results made available through a STAC resource (e.g., a STAC Collection) to the given cloud workspace. The STAC resource itself is exported with all STAC resources and assets underneath.",
"categories": [
"export",
"stac"
Expand All @@ -10,15 +10,15 @@
"parameters": [
{
"name": "data",
"description": "The data to export to the user workspace as a STAC resource.",
"description": "The data to export to the cloud workspace as a STAC resource.",
"schema": {
"type": "object",
"subtype": "stac"
}
},
{
"name": "workspace",
"description": "The identifier of the workspace to export to.",
"description": "The identifier of the cloud workspace to export to.",
"schema": {
"type": "string",
"pattern": "^[\\w\\-\\.~]+$",
Expand Down
114 changes: 59 additions & 55 deletions proposals/load_uploaded_files.json
Original file line number Diff line number Diff line change
@@ -1,55 +1,59 @@
{
"id": "load_uploaded_files",
"summary": "Load files from the user workspace",
"description": "Loads one or more user-uploaded files from the server-side workspace of the authenticated user and returns them as a single data cube. The files must have been stored by the authenticated user on the back-end currently connected to.",
"categories": [
"cubes",
"import"
],
"experimental": true,
"parameters": [
{
"name": "paths",
"description": "The files to read. Folders can't be specified, specify all files instead. An exception is thrown if a file can't be read.",
"schema": {
"type": "array",
"subtype": "file-paths",
"items": {
"type": "string",
"subtype": "file-path",
"pattern": "^[^\r\n\\:'\"]+$"
}
}
},
{
"name": "format",
"description": "The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*.",
"schema": {
"type": "string",
"subtype": "input-format"
}
},
{
"name": "options",
"description": "The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options.",
"schema": {
"type": "object",
"subtype": "input-format-options"
},
"default": {},
"optional": true
}
],
"returns": {
"description": "A data cube for further processing.",
"schema": {
"type": "object",
"subtype": "datacube"
}
},
"exceptions": {
"FormatUnsuitable": {
"message": "Data can't be loaded with the requested input format."
}
}
}
{
"id": "load_uploaded_files",
"summary": "Load files from the user workspace",
"description": "Loads one or more user-uploaded files from the server-side workspace of the authenticated user and returns them as a single data cube. The files must have been stored by the authenticated user on the back-end currently connected to.",
"categories": [
"cubes",
"import"
],
"experimental": true,
"parameters": [
{
"name": "paths",
"description": "The files to read. Folders can't be specified, specify all files instead. An exception is thrown if a file can't be read.\n\nAs the workspace acts as an isolated root folder, the absolute path `/folder/file.txt` and relative paths `folder/file.txt` and `./folder/file.txt` are all equivalent. Likewise, specifying a path outside of the workspace results in a `FileNotFound` error.",

"schema": {
"type": "array",
"subtype": "file-paths",
"items": {
"type": "string",
"subtype": "file-path",
"pattern": "^[^\r\n\\:'\"]+$"
}
}
},
{
"name": "format",
"description": "The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*.",
"schema": {
"type": "string",
"subtype": "input-format"
}
},
{
"name": "options",
"description": "The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options.",
"schema": {
"type": "object",
"subtype": "input-format-options"
},
"default": {},
"optional": true
}
],
"returns": {
"description": "A data cube for further processing.",
"schema": {
"type": "object",
"subtype": "datacube"
}
},
"exceptions": {
"FormatUnsuitable": {
"message": "Data can't be loaded with the requested input format."
},
"FileNotFound": {
"message": "The specified file does not exist."
}
}
}
5 changes: 4 additions & 1 deletion run_udf.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"pattern": "^https?://"
},
{
"description": "Path to a UDF uploaded to the server.",
"description": "Path to a UDF uploaded to the user workspace.\n\nAs the workspace acts as an isolated root folder, the absolute path `/folder/file.txt` and relative paths `folder/file.txt` and `./folder/file.txt` are all equivalent. Likewise, specifying a path outside of the workspace results in a `FileNotFound` error.",
"type": "string",
"subtype": "file-path",
"pattern": "^[^\r\n\\:'\"]+$"
Expand Down Expand Up @@ -80,6 +80,9 @@
},
"InvalidVersion": {
"message": "The specified UDF runtime version is not supported."
},
"FileNotFound": {
"message": "The specified file does not exist."
}
},
"returns": {
Expand Down
Loading