|
3 | 3 | from typing import List |
4 | 4 | from databusclient import client |
5 | 5 |
|
| 6 | +from nextcloudclient import upload |
6 | 7 |
|
7 | 8 | @click.group() |
8 | 9 | def app(): |
@@ -57,5 +58,115 @@ def download(databusuris: List[str], localdir, databus, token, authurl, clientid |
57 | 58 | ) |
58 | 59 |
|
59 | 60 |
|
| 61 | +@app.command(help="Upload files to Nextcloud and deploy to DBpedia Databus.") |
| 62 | +@click.option( |
| 63 | + "--webdav-url", "webdav_url", |
| 64 | + help="WebDAV URL (e.g., https://cloud.example.com/remote.php/webdav)", |
| 65 | +) |
| 66 | +@click.option( |
| 67 | + "--remote", |
| 68 | + help="rclone remote name (e.g., 'nextcloud')", |
| 69 | +) |
| 70 | +@click.option( |
| 71 | + "--path", |
| 72 | + help="Remote path on Nextcloud (e.g., 'datasets/mydataset')", |
| 73 | +) |
| 74 | +@click.option( |
| 75 | + "--no-upload", "no_upload", |
| 76 | + is_flag=True, |
| 77 | + help="Skip file upload and use existing metadata", |
| 78 | +) |
| 79 | +@click.option( |
| 80 | + "--metadata", |
| 81 | + type=click.Path(exists=True), |
| 82 | + help="Path to metadata JSON file (required if --no-upload is used)", |
| 83 | +) |
| 84 | + |
| 85 | +@click.option( |
| 86 | + "--version-id", "version_id", |
| 87 | + required=True, |
| 88 | + help="Target databus version/dataset identifier of the form " |
| 89 | + "<https://databus.dbpedia.org/$ACCOUNT/$GROUP/$ARTIFACT/$VERSION>", |
| 90 | +) |
| 91 | +@click.option("--title", required=True, help="Dataset title") |
| 92 | +@click.option("--abstract", required=True, help="Dataset abstract max 200 chars") |
| 93 | +@click.option("--description", required=True, help="Dataset description") |
| 94 | +@click.option("--license", "license_url", required=True, help="License (see dalicc.net)") |
| 95 | +@click.option("--apikey", required=True, help="API key") |
| 96 | + |
| 97 | +@click.argument( |
| 98 | + "files", |
| 99 | + nargs=-1, |
| 100 | + type=click.Path(exists=True), |
| 101 | +) |
| 102 | +def upload_and_deploy(webdav_url, remote, path, no_upload, metadata, version_id, title, abstract, description, license_url, apikey, files: List[str]): |
| 103 | + """ |
| 104 | + Deploy a dataset version with the provided metadata and distributions. |
| 105 | + """ |
| 106 | + |
| 107 | + if no_upload: |
| 108 | + if not metadata: |
| 109 | + click.echo(click.style("Error: --metadata is required when using --no-upload", fg="red")) |
| 110 | + sys.exit(1) |
| 111 | + if not os.path.isfile(metadata): |
| 112 | + click.echo(click.style(f"Error: Metadata file not found: {metadata}", fg="red")) |
| 113 | + sys.exit(1) |
| 114 | + with open(metadata, 'r') as f: |
| 115 | + metadata = json.load(f) |
| 116 | + else: |
| 117 | + if not (webdav_url and remote and path): |
| 118 | + click.echo(click.style("Error: --webdav-url, --remote, and --path are required unless --no-upload is used", fg="red")) |
| 119 | + sys.exit(1) |
| 120 | + |
| 121 | + click.echo(f"Uploading data to nextcloud: {remote}") |
| 122 | + metadata = upload.upload_to_nextcloud(files, remote, path, webdav_url) |
| 123 | + |
| 124 | + |
| 125 | + click.echo(f"Creating {len(metadata)} distributions") |
| 126 | + distributions = [] |
| 127 | + counter = 0 |
| 128 | + for filename, checksum, size, url in metadata: |
| 129 | + # Expect a SHA-256 hex digest (64 chars). Reject others. |
| 130 | + if not isinstance(checksum, str) or len(checksum) != 64: |
| 131 | + raise ValueError(f"Invalid checksum for {filename}: expected SHA-256 hex (64 chars), got '{checksum}'") |
| 132 | + parts = filename.split(".") |
| 133 | + if len(parts) == 1: |
| 134 | + file_format = "none" |
| 135 | + compression = "none" |
| 136 | + elif len(parts) == 2: |
| 137 | + file_format = parts[-1] |
| 138 | + compression = "none" |
| 139 | + else: |
| 140 | + file_format = parts[-2] |
| 141 | + compression = parts[-1] |
| 142 | + |
| 143 | + distributions.append( |
| 144 | + create_distribution( |
| 145 | + url=url, |
| 146 | + cvs={"count": f"{counter}"}, |
| 147 | + file_format=file_format, |
| 148 | + compression=compression, |
| 149 | + sha256_length_tuple=(checksum, size) |
| 150 | + ) |
| 151 | + ) |
| 152 | + counter += 1 |
| 153 | + |
| 154 | + dataset = create_dataset( |
| 155 | + version_id=version_id, |
| 156 | + title=title, |
| 157 | + abstract=abstract, |
| 158 | + description=description, |
| 159 | + license_url=license_url, |
| 160 | + distributions=distributions |
| 161 | + ) |
| 162 | + |
| 163 | + click.echo(f"Deploying dataset version: {version_id}") |
| 164 | + |
| 165 | + deploy(dataset, api_key) |
| 166 | + metadata_string = ",\n".join([entry[-1] for entry in metadata]) |
| 167 | + |
| 168 | + click.echo(f"Successfully deployed\n{metadata_string}\nto databus {version_id}") |
| 169 | + |
| 170 | + |
60 | 171 | if __name__ == "__main__": |
61 | 172 | app() |
0 commit comments