diff --git a/ocli/aikp/cluster/__init__.py b/ocli/aikp/cluster/__init__.py index 24979b6..d385eba 100644 --- a/ocli/aikp/cluster/__init__.py +++ b/ocli/aikp/cluster/__init__.py @@ -30,10 +30,10 @@ 'eodata': None, 'ai_results': '/optoss/out', 'stack_results': '/optoss/stack', - 'slave': None, - 'slave_path': None, - 'master': None, - 'master_path': None, + 'subordinate': None, + 'subordinate_path': None, + 'main': None, + 'main_path': None, } RECIPE_CLUSTER_TPL = { @@ -282,9 +282,9 @@ def validate_task(task, key)->(bool,List[str]): errors.append('Not found') elif not os.access(value, os.W_OK): errors.append('Not writable') - elif key in ['master', 'slave'] and not value: + elif key in ['main', 'subordinate'] and not value: errors.append(REQUIRED) - elif key in ['master_path', 'slave_path']: + elif key in ['main_path', 'subordinate_path']: if value is None: errors.append(REQUIRED) elif _eodata is None: @@ -302,7 +302,7 @@ def task_set(task: Task, d: Dict): if k in d: value = d[k] # type: string """set new value""" - if k in ['master', 'slave']: + if k in ['main', 'subordinate']: cache_file_name = _cache_pairs_file_name(task) try: diff --git a/ocli/aikp/cluster/sentinel_1/__init__.py b/ocli/aikp/cluster/sentinel_1/__init__.py index b15c538..91f2a05 100644 --- a/ocli/aikp/cluster/sentinel_1/__init__.py +++ b/ocli/aikp/cluster/sentinel_1/__init__.py @@ -75,14 +75,14 @@ def get_stack_path(task:Task,full=False): if _e: e.append("stack_results: "+",".join(_e)) - for k in ['master', 'slave', 'swath', 'firstBurstIndex', 'lastBurstIndex']: + for k in ['main', 'subordinate', 'swath', 'firstBurstIndex', 'lastBurstIndex']: _e = sentinel_1.validate_task(task, k)[1] if _e: e.append(f"{k} "+",".join(_e)) if e: raise AssertionError(','.join(e)) - master_id = s1_prod_id(task.config['master']) - slave_id = s1_prod_id(task.config['slave']) - snap_name = f"{master_id}_{slave_id}_{task.config['swath']}" + \ + main_id = s1_prod_id(task.config['main']) + subordinate_id = s1_prod_id(task.config['subordinate']) + snap_name = f"{main_id}_{subordinate_id}_{task.config['swath']}" + \ f"_{task.config['firstBurstIndex']}_{task.config['lastBurstIndex']}" # noqa return str(Path(task.config['stack_results'], snap_name).absolute()) if full else snap_name \ No newline at end of file diff --git a/ocli/cli/ai_options.py b/ocli/cli/ai_options.py index 2c848b8..db5acbb 100644 --- a/ocli/cli/ai_options.py +++ b/ocli/cli/ai_options.py @@ -199,7 +199,7 @@ def option_stack_vis(f): --------------------------- --------- ------------ ---------- -------------------- raw 3 b1 b2 b3 usable for histograms (--hist) composite 3 b1 b2 b3 b1-coh, b2-VV, b3-VH (same as raw) - sar 3 b1 (b2+b3)/2 b2-b3 b1-coh, b2-master, b3-slave (use the same polarization) + sar 3 b1 (b2+b3)/2 b2-b3 b1-coh, b2-main, b3-subordinate (use the same polarization) simple 2 b1 b2 b1/b2 (VV, VH, VV/VH) rgb-ratio 2 b1 2*b2 (b1/b2)/100 (VV, 2VH, VV/VH/100) rgb-diff 2 b1 b2 b1-b2 (VH, VV, VH-VV) @@ -217,9 +217,9 @@ def option_stack_vis(f): --vis: bands R G B comment --------------------------- --------- ------------ ---------- -------------------- raw 3 b1 b2 b3 useful for histograms (--hist) - sar 3 b1 (lg(b2)+lg(b3))/2 lg(b2)-lg(b3) b1-coh, b2-master, b3-slave (use the same polarization) - composite-u 3 lg(b1) lg(b2) b3 b1-master, b2-slave, b3-coh (use the same polarization) - composite 3 b1 lg(b2) lg(b3) b1-coh, b2-slave, b3-master (use the same polarization) + sar 3 b1 (lg(b2)+lg(b3))/2 lg(b2)-lg(b3) b1-coh, b2-main, b3-subordinate (use the same polarization) + composite-u 3 lg(b1) lg(b2) b3 b1-main, b2-subordinate, b3-coh (use the same polarization) + composite 3 b1 lg(b2) lg(b3) b1-coh, b2-subordinate, b3-main (use the same polarization) false-color 2 b1=VH b2=VV false-color-enhanced - same params as for false-color diff --git a/ocli/cli/bucket.py b/ocli/cli/bucket.py index 3b41bbb..7cfd9c4 100644 --- a/ocli/cli/bucket.py +++ b/ocli/cli/bucket.py @@ -23,9 +23,9 @@ def _task_ms(task: Task) -> (str, str): try: task.resolve() - _, _m = task.get_valid_key('master') + _, _m = task.get_valid_key('main') if task.kind == 'cluster': - _, _s = task.get_valid_key('slave') + _, _s = task.get_valid_key('subordinate') else: _s = None return _m, _s @@ -34,7 +34,7 @@ def _task_ms(task: Task) -> (str, str): return None, None -def _bkt_list(repo: Repo, master: str, slave: str, geometry: Polygon, fit: int) -> (GeoDataFrame, list): +def _bkt_list(repo: Repo, main: str, subordinate: str, geometry: Polygon, fit: int) -> (GeoDataFrame, list): """ list avaliable buckets""" _df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo))) try: @@ -53,7 +53,7 @@ def _bkt_list(repo: Repo, master: str, slave: str, geometry: Polygon, fit: int) raise RuntimeError(e) headers = ['#', 'bucket', 'mean fit', 'from', 'to', 'Cnt'] - if master or slave: + if main or subordinate: def _get_bucket_mytitle(t: str): _m = _bk.loc[_bk['title'] == t] @@ -61,8 +61,8 @@ def _get_bucket_mytitle(t: str): return _m.iloc[0]['bucket'] return None - _m = _get_bucket_mytitle(master) - _s = _get_bucket_mytitle(slave) + _m = _get_bucket_mytitle(main) + _s = _get_bucket_mytitle(subordinate) def _ms(b): _x = 'm' if _m == b else ' ' @@ -153,7 +153,7 @@ def bucket_cli(): @option_locate_task @option_roi @click.option('--check', 'check', is_flag=True, required=False, default=False, - help='Check master-slave data exists') + help='Check main-subordinate data exists') @click.option('--update', '-u', 'reload', is_flag=True, required=False, default=False, help='force products load') @click.argument('bucket_name', metavar='') @products_list_options(def_col=None, def_sort=['+startDate']) @@ -200,7 +200,7 @@ def bkt_info(ctx, repo, task: Task, roi_id, bucket_name, reload, less, sort, lim if task.loaded: output.comment(f"Task: {task.name}") if 'task' in cols: - output.comment(f"INFO: 'task' column: 'm' - used as master in task, 's' - used as slave in task ") + output.comment(f"INFO: 'task' column: 'm' - used as main in task, 's' - used as subordinate in task ") if 'exists' in cols: output.comment(f"INFO: 'exists' column: '+' - full data loaded, '~' - metadata only loaded") @@ -215,7 +215,7 @@ def bkt_info(ctx, repo, task: Task, roi_id, bucket_name, reload, less, sort, lim @option_locate_task @option_roi @click.option('--check', 'check', is_flag=True, required=False, default=False, - help='Check master-slave data exists') + help='Check main-subordinate data exists') # @products_list_options(def_col=['productId', 'startDate', 'title'], def_sort=['+startDate']) @option_less @click.argument('product_id', metavar="PRODUCT_ID") @@ -235,7 +235,7 @@ def bkt_info(repo: Repo, task: Task, roi_id, less, _id, _roi = resolve_roi(roi_id, repo) _m, _s = _task_ms(task) geometry = _roi['geometry'] - output.comment(f"active task master: {_m}") + output.comment(f"active task main: {_m}") _df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo))) _df = _df.set_index('productId') @@ -272,11 +272,11 @@ def bkt_info(repo: Repo, task: Task, roi_id, less, if _m in _df.index: _df.loc[_m, 'task'] = 'm' else: - output.warning('Current task master not found in bucket') + output.warning('Current task main not found in bucket') if _s in _df.index: _df.loc[_s, 'task'] = 's' else: - output.warning('Current task slave not found in bucket') + output.warning('Current task subordinate not found in bucket') _df = _df.reset_index() _e, eodata = task.get_valid_key('eodata') @@ -301,7 +301,7 @@ def _ch_fs(b): headers = ['#'] + cols output.table(_df, headers=headers, ) - # if master or slave: + # if main or subordinate: # # def _get_bucket_mytitle(t: str): # _m = _bk.loc[_bk['title'] == t] @@ -309,8 +309,8 @@ def _ch_fs(b): # return _m.iloc[0]['bucket'] # return None # - # _m = _get_bucket_mytitle(master) - # _s = _get_bucket_mytitle(slave) + # _m = _get_bucket_mytitle(main) + # _s = _get_bucket_mytitle(subordinate) # # def _ms(b): # _x = 'm' if _m == b else ' ' @@ -345,8 +345,8 @@ def bkt_list(ctx: click.Context, repo: Repo, task: Task, roi_id, reload, fit): _t, headers = _bkt_list(repo, geometry=_roi['geometry'], fit=fit, - master=_m, - slave=_s, + main=_m, + subordinate=_s, ) except RuntimeError as e: raise click.UsageError(str(e)) diff --git a/ocli/cli/state.py b/ocli/cli/state.py index e521f01..67756d3 100644 --- a/ocli/cli/state.py +++ b/ocli/cli/state.py @@ -625,7 +625,7 @@ def resolve(self): @ensure_task_loaded def validate(self, key): config = self.config - # TODO validate master-slave is in + # TODO validate main-subordinate is in if key not in config: raise click.BadArgumentUsage(f'Could not validate Key {key}: key not found') value = config[key] @@ -687,10 +687,10 @@ def get_validation_data_frame(self) -> 'gpd.pd.DataFrame': return _l @ensure_task_loaded - def get_geometry_fit_data_frame(self, geometry, key='master') -> 'gpd.pd.DataFrame': + def get_geometry_fit_data_frame(self, geometry, key='main') -> 'gpd.pd.DataFrame': # TODO use validate_all - if key not in ['master', 'slave']: - raise AssertionError("key: only 'master' or 'salve are supported'") + if key not in ['main', 'subordinate']: + raise AssertionError("key: only 'main' or 'salve are supported'") for k in ['eodata', key + '_path']: e = self.validate(k) if e: @@ -707,8 +707,8 @@ def get_stack_path(self, full=False): format is : - cluster Sentinel-1: ____ - rvi Sentinel-1: ___ + cluster Sentinel-1: ____ + rvi Sentinel-1: ___ if full==True os.join with with """ @@ -730,19 +730,19 @@ def get_stack_path(self, full=False): # kind = self.config['kind'] # source = self.config['source'] # if kind == 'cluster' and source == 'Sentinel-1': - # e = self.validate_all(['stack_results', 'master', 'slave', 'swath', 'firstBurstIndex', 'lastBurstIndex']) + # e = self.validate_all(['stack_results', 'main', 'subordinate', 'swath', 'firstBurstIndex', 'lastBurstIndex']) # if e: # raise AssertionError(','.join(e)) - # master_id = s1_prod_id(self.config['master']) - # slave_id = s1_prod_id(self.config['slave']) - # snap_name = f"{master_id}_{slave_id}_{self.config['swath']}" + \ + # main_id = s1_prod_id(self.config['main']) + # subordinate_id = s1_prod_id(self.config['subordinate']) + # snap_name = f"{main_id}_{subordinate_id}_{self.config['swath']}" + \ # f"_{self.config['firstBurstIndex']}_{self.config['lastBurstIndex']}" # noqa # elif kind == 'rvi' and source == 'Sentinel-1': - # e = self.validate_all(['stack_results', 'master', 'swath', 'firstBurstIndex', 'lastBurstIndex']) + # e = self.validate_all(['stack_results', 'main', 'swath', 'firstBurstIndex', 'lastBurstIndex']) # if e: # raise AssertionError(','.join(e)) - # master_id = s1_prod_id(self.config['master']) - # snap_name = f"{master_id}_{self.config['swath']}" + \ + # main_id = s1_prod_id(self.config['main']) + # snap_name = f"{main_id}_{self.config['swath']}" + \ # f"_{self.config['firstBurstIndex']}_{self.config['lastBurstIndex']}" # noqa # else: # raise AssertionError(f'Could not build path for task config kind "{kind}" and source {source} ') @@ -750,23 +750,23 @@ def get_stack_path(self, full=False): @ensure_task_loaded def _compose_friendly_keys(self, roi_name): - e, master = self.get_valid_key('master') - prod_fields = {'m_' + k: v for (k, v) in parse_title(master).items()} - prod_fields = {**prod_fields, **{'m_' + k: v for (k, v) in parse_title(master).items()}} - prod_fields['m_id'] = s1_prod_id(master) + e, main = self.get_valid_key('main') + prod_fields = {'m_' + k: v for (k, v) in parse_title(main).items()} + prod_fields = {**prod_fields, **{'m_' + k: v for (k, v) in parse_title(main).items()}} + prod_fields['m_id'] = s1_prod_id(main) prod_fields['s_id'] = '' if self.kind == 'cluster': - e, slave = self.get_valid_key('slave') - prod_fields = {**prod_fields, **{'s_' + k: v for (k, v) in parse_title(slave).items()}} - prod_fields['s_id'] = s1_prod_id(slave) + e, subordinate = self.get_valid_key('subordinate') + prod_fields = {**prod_fields, **{'s_' + k: v for (k, v) in parse_title(subordinate).items()}} + prod_fields['s_id'] = s1_prod_id(subordinate) fields = {**self.config, **prod_fields} fields['predictor'] = fields['predictor'].split('/')[-1] fields['roi'] = roi_name return fields def format_pattern(self, key, roi_name): - ms = ['master', 'slave'] if self.kind == 'cluster' else ['master'] + ms = ['main', 'subordinate'] if self.kind == 'cluster' else ['main'] e = self.validate_all(ms) if e: raise AssertionError(','.join(e)) diff --git a/ocli/cli/task.py b/ocli/cli/task.py index 9d9045c..2dd255a 100644 --- a/ocli/cli/task.py +++ b/ocli/cli/task.py @@ -369,11 +369,11 @@ def task_preview(repo: Repo, task: Task, roi_id, swath, basemap, # show by product lists cache_file_name = _cache_pairs_file_name(repo) _df = pairs.load_from_cache(cache_file_name=cache_file_name) - _e, m = task.get_valid_key('master') + _e, m = task.get_valid_key('main') s = None title = f"M: {m}" - if task.config.get('slave') is not None: - _e, s = task.get_valid_key('slave') + if task.config.get('subordinate') is not None: + _e, s = task.get_valid_key('subordinate') title += f"\nS: {s}" df = _df[_df['title'].isin([m, s])] preview_roi(df, _roi, title=title, zoom=zoom_basemap, basemap=basemap) @@ -381,28 +381,28 @@ def task_preview(repo: Repo, task: Task, roi_id, swath, basemap, else: # thos requires metadata to be loaded try: - _e, m = task.get_valid_key('master') + _e, m = task.get_valid_key('main') title = f"M: {m}" - df_master = task.get_geometry_fit_data_frame(_roi.geometry, key='master') - df_master['master'] = True - fit = df_master[['IW1_fit', 'IW2_fit', 'IW3_fit']] + df_main = task.get_geometry_fit_data_frame(_roi.geometry, key='main') + df_main['main'] = True + fit = df_main[['IW1_fit', 'IW2_fit', 'IW3_fit']] output.comment(f"MASTER ROI '{_roi['name']}': coverage by Swath/Burst\n\n") output.table(fit, headers=['burst', 'IW1', 'IW2', 'IW3']) output.comment("\n\n") output.table(gpd.pd.DataFrame(fit.sum()), headers=['Swath', 'total fit']) - df = df_master - df.crs = df_master.crs - if task.config.get('slave') is not None: - _e, s = task.get_valid_key('slave') + df = df_main + df.crs = df_main.crs + if task.config.get('subordinate') is not None: + _e, s = task.get_valid_key('subordinate') title += f"\nS: {s}" if _e: - raise click.BadParameter(f'Task key "slave" is invalid: {_e}') - df_slave = task.get_geometry_fit_data_frame(_roi.geometry, key='slave') - df_slave['master'] = False - fit = df_slave[['IW1_fit', 'IW2_fit', 'IW3_fit']] + raise click.BadParameter(f'Task key "subordinate" is invalid: {_e}') + df_subordinate = task.get_geometry_fit_data_frame(_roi.geometry, key='subordinate') + df_subordinate['main'] = False + fit = df_subordinate[['IW1_fit', 'IW2_fit', 'IW3_fit']] output.comment(f"SLAVE ROI '{_roi['name']}': coverage by Swath/Burst\n\n") - df = gpd.pd.concat([df, df_slave]) - df.crs = df_master.crs + df = gpd.pd.concat([df, df_subordinate]) + df.crs = df_main.crs output.table(fit, headers=['burst', 'IW1', 'IW2', 'IW3']) output.comment("\n\n") @@ -451,8 +451,8 @@ def task_info(repo: Repo, task: Task, swath, roi_id, """ show task information \b - --preview requires master and slave to be in product list - --swath option requires master and slave metadata to be loaded + --preview requires main and subordinate to be in product list + --swath option requires main and subordinate metadata to be loaded """ # TODO -q option to return just valididty status @@ -492,17 +492,17 @@ def task_info(repo: Repo, task: Task, swath, roi_id, elif swath: _id, _roi = resolve_roi(roi_id, repo) try: - df_master = task.get_geometry_fit_data_frame(_roi.geometry, key='master') - df_master['master'] = True - fit = df_master[['IW1_fit', 'IW2_fit', 'IW3_fit']] + df_main = task.get_geometry_fit_data_frame(_roi.geometry, key='main') + df_main['main'] = True + fit = df_main[['IW1_fit', 'IW2_fit', 'IW3_fit']] output.comment(f"MASTER ROI '{_roi['name']}': coverage by Swath/Burst\n\n") output.table(fit, headers=['burst', 'IW1', 'IW2', 'IW3']) output.comment("\n\n") output.table(gpd.pd.DataFrame(fit.sum()), headers=['Swath', 'total fit']) - if task.config.get('slave') is not None: - df_slave = task.get_geometry_fit_data_frame(_roi.geometry, key='slave') - df_slave['master'] = False - fit = df_slave[['IW1_fit', 'IW2_fit', 'IW3_fit']] + if task.config.get('subordinate') is not None: + df_subordinate = task.get_geometry_fit_data_frame(_roi.geometry, key='subordinate') + df_subordinate['main'] = False + fit = df_subordinate[['IW1_fit', 'IW2_fit', 'IW3_fit']] output.comment(f"SLAVE ROI '{_roi['name']}': coverage by Swath/Burst\n\n") output.table(fit, headers=['burst', 'IW1', 'IW2', 'IW3']) output.comment("\n\n") @@ -541,19 +541,19 @@ def task_info(repo: Repo, task: Task, swath, roi_id, except AssertionError as e: output.error(f"ai_results {e}") if task.kind == 'cluster': - if not task.validate_all(['master', 'slave']): + if not task.validate_all(['main', 'subordinate']): S1_cycle_T = 24 * 3600 * 12 - m = parse_title(task.config['master'])['completionDate'] - s = parse_title(task.config['slave'])['completionDate'] + m = parse_title(task.config['main'])['completionDate'] + s = parse_title(task.config['subordinate'])['completionDate'] cycle_dt = abs((m - s) / timedelta(seconds=1)) % S1_cycle_T cycle_dt = cycle_dt if (cycle_dt <= S1_cycle_T / 2) else S1_cycle_T - cycle_dt if cycle_dt > 0.1: output.warning( - f'timedelta: master-slave timedelta {cycle_dt} > 0.1. Acquisitions could be misaligned') + f'timedelta: main-subordinate timedelta {cycle_dt} > 0.1. Acquisitions could be misaligned') else: output.comment( - f'timedelta: master-slave timedelta {cycle_dt} <= 0.1. Acquisitions could be aligned') + f'timedelta: main-subordinate timedelta {cycle_dt} <= 0.1. Acquisitions could be aligned') else: output.comment("bucket delta") output.comment(f"errors: {_l['error'].notna().sum()}") @@ -657,8 +657,8 @@ def task_get(repo: Repo, task: Task, key, raw, roi_id, info): returns parsed or extended values transformed keys: - master - returns master product ID - slave - returns master product ID + main - returns main product ID + subordinate - returns main product ID Example: ls -lastr $( ocli -v INFO t get-key ai_results --parsed ) outputs content of active task ai_results directory @@ -681,9 +681,9 @@ def task_get(repo: Repo, task: Task, key, raw, roi_id, info): value = task.get_ai_results_path(full=True) elif key == 'stack_results': value = task.get_stack_path(full=True) - elif key in ['master', 'slave']: + elif key in ['main', 'subordinate']: value = s1_prod_id(task.config[key]) - elif key in ['master_path', 'slave_path']: + elif key in ['main_path', 'subordinate_path']: value = _local_eodata_relative_path(task.config['eodata'], task.config[key]) elif key == 'friendly_name': _, roi = resolve_roi(roi_id, repo) @@ -761,21 +761,21 @@ def rsync_meta(options: List[str], remote_path: str, local_path: str): @cli_task.command('get-data') @option_locate_task @click.option('-d', '--data', 'data', is_flag=True, default=False, help='load meta-data and data') -@click.option('-m', '--master', is_flag=True, default=False, help='load master') -@click.option('-s', '--slave', is_flag=True, default=False, help='load slave') +@click.option('-m', '--main', is_flag=True, default=False, help='load main') +@click.option('-s', '--subordinate', is_flag=True, default=False, help='load subordinate') @click.option('--dry-run', is_flag=True, default=False, help='dry-run, do not perform actual download') @pass_task @ensure_task_resolved -def task_get(task: Task, data, master, slave, dry_run): +def task_get(task: Task, data, main, subordinate, dry_run): """ load satellite date into task.eodata directory""" # Zsh and other crazy shells extends patterns passed arguments, so be shure rsync runs in BASH!!! if task.config.get('source') != 'Sentinel-1': raise click.BadParameter(f"Only Sentinel-1 supported for now, task source is {task.get_valid_key('source')}") - if not master and not slave: - raise click.BadOptionUsage('master', "at least on of --master or --salve option is required") - ks = ['eodata', 'master'] + if not main and not subordinate: + raise click.BadOptionUsage('main', "at least on of --main or --salve option is required") + ks = ['eodata', 'main'] if task.kind == 'cluster': - ks.append('slave') + ks.append('subordinate') for k in ks: e = task.validate(k) if e is not None: @@ -815,32 +815,32 @@ def _rsync_meta(key, task: Task, options): opts.append("--exclude 'preview/*'") opts.append("--exclude 'annotation/calibration/*'") opts.append("--exclude '*.tiff'") - if master: - _rsync_meta('master', task, opts) - if slave: - _rsync_meta('slave', task, opts) + if main: + _rsync_meta('main', task, opts) + if subordinate: + _rsync_meta('subordinate', task, opts) # ######################### LS ############################################# @cli_task.command('ls') @option_locate_task -@click.option('-m', '--master', is_flag=True, default=False, help='list master directory') -@click.option('-s', '--slave', is_flag=True, default=False, help='list slave directory') +@click.option('-m', '--main', is_flag=True, default=False, help='list main directory') +@click.option('-s', '--subordinate', is_flag=True, default=False, help='list subordinate directory') @click.option('-a', '--list_all', is_flag=True, default=False, help='list all task directories') -@click.option('--ai', 'ai_results', is_flag=True, default=False, help='list slave directory') -@click.option('--stack', 'stack_results', is_flag=True, default=False, help='list slave directory') +@click.option('--ai', 'ai_results', is_flag=True, default=False, help='list subordinate directory') +@click.option('--stack', 'stack_results', is_flag=True, default=False, help='list subordinate directory') @click.option('-t', '--terse', is_flag=True, default=False, help='terse output') @pass_task @ensure_task_resolved -def task_ls(task: Task, master, slave, ai_results, stack_results, terse, list_all): - """ list content of task master or slave directory""" +def task_ls(task: Task, main, subordinate, ai_results, stack_results, terse, list_all): + """ list content of task main or subordinate directory""" def comment(str): if not terse: output.comment(str) e, eo_data = task.get_valid_key('eodata') - if not any([master, slave, ai_results, stack_results, list_all]): + if not any([main, subordinate, ai_results, stack_results, list_all]): list_all = terse = True if terse: cmd = ['du', '-shc'] @@ -850,20 +850,20 @@ def comment(str): raise click.BadArgumentUsage(f"Task config key 'eodata' is invalid, reason: {','.join(e)}") paths = [] _, kind = task.get_valid_key('kind') - if list_all or master: - e, _m = task.get_valid_key('master_path') + if list_all or main: + e, _m = task.get_valid_key('main_path') if e: - raise click.BadArgumentUsage(f"Task config key 'master_path' is invalid, reason: {','.join(e)}") + raise click.BadArgumentUsage(f"Task config key 'main_path' is invalid, reason: {','.join(e)}") _p = _local_eodata_relative_path(eo_data, _m) - comment(f"master path: {_p}\n\n") + comment(f"main path: {_p}\n\n") paths += [_p] - if kind in ['cluster'] and (list_all or slave): + if kind in ['cluster'] and (list_all or subordinate): - e, _s = task.get_valid_key('slave_path') + e, _s = task.get_valid_key('subordinate_path') if e: - raise click.BadArgumentUsage(f"Task config key 'slave_path' is invalid, reason: {','.join(e)}") + raise click.BadArgumentUsage(f"Task config key 'subordinate_path' is invalid, reason: {','.join(e)}") _p = _local_eodata_relative_path(eo_data, _s) - comment(f"master path: {_p}\n\n") + comment(f"main path: {_p}\n\n") paths += [_p] if list_all or ai_results: try: @@ -892,11 +892,11 @@ def comment(str): @cli_task.command('clear') @option_yes @click.option('-d', '--data', 'data', is_flag=True, default=False, help='clear meta-data and data') -@click.option('-m', '--master', is_flag=True, default=False, help='clear master') -@click.option('-s', '--slave', is_flag=True, default=False, help='clear slave') +@click.option('-m', '--main', is_flag=True, default=False, help='clear main') +@click.option('-s', '--subordinate', is_flag=True, default=False, help='clear subordinate') @pass_task @ensure_task_resolved -def task_clear(task: Task, master, slave, data, yes): +def task_clear(task: Task, main, subordinate, data, yes): """ delete task data and results""" e, eo_data = task.get_valid_key('eodata') @@ -914,11 +914,11 @@ def __clear_data(key): raise click.UsageError(f"{er}") if data & yes_or_confirm(yes, f'Remove all product data for task {task.name}?'): - if master: - __clear_data('master_path') - if slave: - __clear_data('slave_path') - if master or slave: + if main: + __clear_data('main_path') + if subordinate: + __clear_data('subordinate_path') + if main or subordinate: # TODO clean snap and AI out data output.comment("Not implemented - remove SNAP intermediate data") output.comment("Not implemented - remove AI out data") @@ -1071,14 +1071,14 @@ def task_run_stack_sarpy(repo: Repo, task: Task, yes, dry_run, decimation, no_cl if single: click.get_current_context().invoke( single_stack, - master=_local_eodata_relative_path(_eodata, task.config['master_path']), + main=_local_eodata_relative_path(_eodata, task.config['main_path']), **kw ) else: click.get_current_context().invoke( full_stack, - master=_local_eodata_relative_path(_eodata, task.config['master_path']), - slave=_local_eodata_relative_path(_eodata, task.config['slave_path']), + main=_local_eodata_relative_path(_eodata, task.config['main_path']), + subordinate=_local_eodata_relative_path(_eodata, task.config['subordinate_path']), **kw ) p0 = perf_counter() - p0 diff --git a/ocli/preview/__init__.py b/ocli/preview/__init__.py index 4e29956..a187e26 100644 --- a/ocli/preview/__init__.py +++ b/ocli/preview/__init__.py @@ -111,8 +111,8 @@ def preview_roi_swath(df, roi, title='', zoom=8, basemap=None, roi_crs={'init': continue df.set_geometry(_iw, inplace=True) iw = df[df[_iw].notna()].to_crs(crs_proj4) - for i, v in enumerate([[True, 'master'], [False, 'slave']]): - _d = iw[iw['master'] == v[0]] + for i, v in enumerate([[True, 'main'], [False, 'subordinate']]): + _d = iw[iw['main'] == v[0]] if burst_range: _d = _d.iloc[burst_range[0] - 1:burst_range[1]] diff --git a/ocli/project/stack.py b/ocli/project/stack.py index 12129d2..f866a33 100644 --- a/ocli/project/stack.py +++ b/ocli/project/stack.py @@ -6,7 +6,7 @@ def task_stack_snap(task: Task, dry_run, gpt_cache,cmd_dir,log): # TODO http://remote-sensing.eu/preprocessing-of-sentinel-1-sar-data-via-snappy-python-module/ - """ Run master-slave Stacking + """ Run main-subordinate Stacking """ snap_path = task.get_stack_path(full=True) @@ -24,8 +24,8 @@ def task_stack_snap(task: Task, dry_run, gpt_cache,cmd_dir,log): '--firstBurstIndex', task.config['firstBurstIndex'], '--lastBurstIndex', task.config['lastBurstIndex'], - '--master', _local_eodata_relative_path(_eodata, task.config['master_path']), - '--slave', _local_eodata_relative_path(_eodata, task.config['slave_path']), + '--main', _local_eodata_relative_path(_eodata, task.config['main_path']), + '--subordinate', _local_eodata_relative_path(_eodata, task.config['subordinate_path']), ]