diff --git a/CHANGELOG.md b/CHANGELOG.md index 4c6edc2..316a2a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,13 @@ # Changelog -## Version 13.2.3 - 2024-10-09 +## Version 13.3.0 - 2024-12-03 - Bugfix `transitions update-execution` now supports additional keyword arguments +- Added optional parameter `--async` to `predictions create` +- Added `predictions get` +- Added optional parameter `--statistics-last-n-days` to `models get-training` +- Added optional parameter `--max-elements` to `models get-training` +- Remove optional parameter `--in-schema` and `--out-schema` from `transitions create` and `transitions update` ## Version 13.2.2 - 2024-06-13 diff --git a/lascli/__version__.py b/lascli/__version__.py index b855d7a..458dd32 100644 --- a/lascli/__version__.py +++ b/lascli/__version__.py @@ -7,4 +7,4 @@ __maintainer_email__ = 'magnus@lucidtech.ai' __title__ = 'lucidtech-las-cli' __url__ = 'https://github.com/LucidtechAI/las-cli' -__version__ = '13.2.3' +__version__ = '13.3.0' diff --git a/lascli/parser/datasets.py b/lascli/parser/datasets.py index bb3cf0a..daa2560 100644 --- a/lascli/parser/datasets.py +++ b/lascli/parser/datasets.py @@ -321,7 +321,7 @@ def create_documents( return dict(counter) -def get_documents(las_client: Client, dataset_id, output_dir, num_threads, chunk_size): +def get_documents(las_client: Client, dataset_id, output_dir, num_threads, chunk_size, max_results): already_downloaded = set() if output_dir.exists(): for path in output_dir.iterdir(): @@ -337,6 +337,8 @@ def get_documents(las_client: Client, dataset_id, output_dir, num_threads, chunk already_downloaded_from_dataset.add(document['documentId']) else: documents.append(document) + if max_results and max_results <= len(documents): + break print(f'Found {len(already_downloaded_from_dataset)} documents already downloaded') start_time = time() @@ -468,6 +470,7 @@ def create_datasets_parser(subparsers): get_documents_parser.add_argument('output_dir', type=Path, help='Path to download directory') get_documents_parser.add_argument('--num-threads', default=32, type=int, help='Number of threads to use') get_documents_parser.add_argument('--chunk-size', default=100, type=int) + get_documents_parser.add_argument('--max-results', default=0, type=int) get_documents_parser.set_defaults(cmd=get_documents) create_transformation_parser = subparsers.add_parser('create-transformation') @@ -480,7 +483,7 @@ def create_datasets_parser(subparsers): "options": {} (optional) }, ... - ] + ] Examples: [{"type": "remove-duplicates", "options": {}}] ''')) diff --git a/lascli/parser/models.py b/lascli/parser/models.py index 5a35204..e52d4b0 100644 --- a/lascli/parser/models.py +++ b/lascli/parser/models.py @@ -71,8 +71,12 @@ def create_training(las_client: Client, model_id, data_bundle_ids, data_scientis ) -def get_training(las_client: Client, model_id, training_id): - return las_client.get_training(model_id=model_id, training_id=training_id) +def get_training(las_client: Client, model_id, training_id, statistics_last_n_days): + return las_client.get_training( + model_id=model_id, + training_id=training_id, + statistics_last_n_days=statistics_last_n_days, + ) def list_trainings(las_client: Client, model_id, max_results, next_token): @@ -254,6 +258,7 @@ def create_models_parser(subparsers): get_training_parser = subparsers.add_parser('get-training') get_training_parser.add_argument('model_id') get_training_parser.add_argument('training_id') + get_training_parser.add_argument('--statistics-last-n-days', type=int_range(1, 30)) get_training_parser.set_defaults(cmd=get_training) list_trainings_parser = subparsers.add_parser('list-trainings') diff --git a/lascli/parser/predictions.py b/lascli/parser/predictions.py index fb3626f..c39d2d7 100644 --- a/lascli/parser/predictions.py +++ b/lascli/parser/predictions.py @@ -8,9 +8,14 @@ def create_prediction(las_client: Client, document_id, model_id, **optional_args): + optional_args['run_async'] = optional_args.pop('async', None) return las_client.create_prediction(document_id, model_id, **optional_args) +def get_prediction(las_client: Client, prediction_id, **optional_args): + return las_client.get_prediction(prediction_id, **optional_args) + + def list_predictions(las_client: Client, **optional_args): return las_client.list_predictions(**optional_args) @@ -51,6 +56,7 @@ def create_predictions_parser(subparsers): {"strategy": "BEST_N_PAGES", "parameters": {"n": 3}} {"strategy": "BEST_N_PAGES", "parameters": {"n": 3, "collapse": true}} ''')) + create_predicton_parser.add_argument('--async', action='store_true', help='Create prediction async') create_predicton_parser.set_defaults(cmd=create_prediction) list_predictions_parser = subparsers.add_parser('list') @@ -61,4 +67,8 @@ def create_predictions_parser(subparsers): list_predictions_parser.add_argument('--model-id') list_predictions_parser.set_defaults(cmd=list_predictions) + get_prediction_parser = subparsers.add_parser('get') + get_prediction_parser.add_argument('prediction_id') + get_prediction_parser.set_defaults(cmd=get_prediction) + return parser diff --git a/lascli/parser/transitions.py b/lascli/parser/transitions.py index 1d8bb38..7543712 100644 --- a/lascli/parser/transitions.py +++ b/lascli/parser/transitions.py @@ -67,8 +67,6 @@ def create_transitions_parser(subparsers): create_parser = subparsers.add_parser('create') create_parser.add_argument('transition_type', choices=["docker", "manual"]) create_parser.add_argument('--parameters', '-p', type=json_path, help='path to parameters to the docker image') - create_parser.add_argument('--in-schema', type=json_path, help='path to input jsonschema') - create_parser.add_argument('--out-schema', type=json_path, help='path to output jsonschema') create_parser.add_argument('--name') create_parser.add_argument('--description') create_parser.set_defaults(cmd=create_transition) @@ -87,8 +85,6 @@ def create_transitions_parser(subparsers): update_parser.add_argument('transition_id') update_parser.add_argument('--name', type=nullable(str), default=NotProvided) update_parser.add_argument('--description', type=nullable(str), default=NotProvided) - update_parser.add_argument('--in-schema', type=json_path, help='Path to input jsonschema') - update_parser.add_argument('--out-schema', type=json_path, help='Path to output jsonschema') update_parser.add_argument( '--assets', type=json_path, diff --git a/requirements.txt b/requirements.txt index a35f2ec..fdcf93c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ PyYAML>=6.0.0 argcomplete>=2.0.0 dateparser>=1.1.1 filetype>=1.0.13 -lucidtech-las~=11.2 +lucidtech-las~=11.4 diff --git a/tests/test_transitions.py b/tests/test_transitions.py index a1adc8a..d101736 100644 --- a/tests/test_transitions.py +++ b/tests/test_transitions.py @@ -6,36 +6,26 @@ ('docker', ('--parameters', str(util.transition_parameters_path()))), ('manual', ()), ]) -@pytest.mark.parametrize('in_schema', [('--in-schema', str(util.schema_path())), ()]) -@pytest.mark.parametrize('out_schema', [('--out-schema', str(util.schema_path())), ()]) -def test_transitions_create(parser, client, transition_type, in_schema, out_schema, name_and_description, parameters): +def test_transitions_create(parser, client, transition_type, name_and_description, parameters): args = [ 'transitions', 'create', transition_type, - *in_schema, - *out_schema, *name_and_description, ] util.main_parser(parser, client, args) -@pytest.mark.parametrize('in_schema', [('--in-schema', str(util.schema_path())), ()]) -@pytest.mark.parametrize('out_schema', [('--out-schema', str(util.schema_path())), ()]) def test_transitions_update( parser, client, - in_schema, - out_schema, name_and_description, ): args = [ 'transitions', 'update', service.create_transition_id(), - *in_schema, - *out_schema, *name_and_description, ] @@ -64,7 +54,7 @@ def test_transitions_update_manual( util.main_parser(parser, client, args) else: util.main_parser(parser, client, args) - + @pytest.mark.parametrize('image_url', [('--image-url', 'image:url'), ()]) @pytest.mark.parametrize('secret_id', [