Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions ocli/aikp/cluster/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@
'eodata': None,
'ai_results': '/optoss/out',
'stack_results': '/optoss/stack',
'slave': None,
'slave_path': None,
'master': None,
'master_path': None,
'subordinate': None,
'subordinate_path': None,
'main': None,
'main_path': None,
}

RECIPE_CLUSTER_TPL = {
Expand Down Expand Up @@ -282,9 +282,9 @@ def validate_task(task, key)->(bool,List[str]):
errors.append('Not found')
elif not os.access(value, os.W_OK):
errors.append('Not writable')
elif key in ['master', 'slave'] and not value:
elif key in ['main', 'subordinate'] and not value:
errors.append(REQUIRED)
elif key in ['master_path', 'slave_path']:
elif key in ['main_path', 'subordinate_path']:
if value is None:
errors.append(REQUIRED)
elif _eodata is None:
Expand All @@ -302,7 +302,7 @@ def task_set(task: Task, d: Dict):
if k in d:
value = d[k] # type: string
"""set new value"""
if k in ['master', 'slave']:
if k in ['main', 'subordinate']:
cache_file_name = _cache_pairs_file_name(task)

try:
Expand Down
8 changes: 4 additions & 4 deletions ocli/aikp/cluster/sentinel_1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,14 @@ def get_stack_path(task:Task,full=False):
if _e:
e.append("stack_results: "+",".join(_e))

for k in ['master', 'slave', 'swath', 'firstBurstIndex', 'lastBurstIndex']:
for k in ['main', 'subordinate', 'swath', 'firstBurstIndex', 'lastBurstIndex']:
_e = sentinel_1.validate_task(task, k)[1]
if _e:
e.append(f"{k} "+",".join(_e))
if e:
raise AssertionError(','.join(e))
master_id = s1_prod_id(task.config['master'])
slave_id = s1_prod_id(task.config['slave'])
snap_name = f"{master_id}_{slave_id}_{task.config['swath']}" + \
main_id = s1_prod_id(task.config['main'])
subordinate_id = s1_prod_id(task.config['subordinate'])
snap_name = f"{main_id}_{subordinate_id}_{task.config['swath']}" + \
f"_{task.config['firstBurstIndex']}_{task.config['lastBurstIndex']}" # noqa
return str(Path(task.config['stack_results'], snap_name).absolute()) if full else snap_name
8 changes: 4 additions & 4 deletions ocli/cli/ai_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def option_stack_vis(f):
--------------------------- --------- ------------ ---------- --------------------
raw 3 b1 b2 b3 usable for histograms (--hist)
composite 3 b1 b2 b3 b1-coh, b2-VV, b3-VH (same as raw)
sar 3 b1 (b2+b3)/2 b2-b3 b1-coh, b2-master, b3-slave (use the same polarization)
sar 3 b1 (b2+b3)/2 b2-b3 b1-coh, b2-main, b3-subordinate (use the same polarization)
simple 2 b1 b2 b1/b2 (VV, VH, VV/VH)
rgb-ratio 2 b1 2*b2 (b1/b2)/100 (VV, 2VH, VV/VH/100)
rgb-diff 2 b1 b2 b1-b2 (VH, VV, VH-VV)
Expand All @@ -217,9 +217,9 @@ def option_stack_vis(f):
--vis: bands R G B comment
--------------------------- --------- ------------ ---------- --------------------
raw 3 b1 b2 b3 useful for histograms (--hist)
sar 3 b1 (lg(b2)+lg(b3))/2 lg(b2)-lg(b3) b1-coh, b2-master, b3-slave (use the same polarization)
composite-u 3 lg(b1) lg(b2) b3 b1-master, b2-slave, b3-coh (use the same polarization)
composite 3 b1 lg(b2) lg(b3) b1-coh, b2-slave, b3-master (use the same polarization)
sar 3 b1 (lg(b2)+lg(b3))/2 lg(b2)-lg(b3) b1-coh, b2-main, b3-subordinate (use the same polarization)
composite-u 3 lg(b1) lg(b2) b3 b1-main, b2-subordinate, b3-coh (use the same polarization)
composite 3 b1 lg(b2) lg(b3) b1-coh, b2-subordinate, b3-main (use the same polarization)
false-color 2 b1=VH b2=VV
false-color-enhanced - same params as for false-color

Expand Down
34 changes: 17 additions & 17 deletions ocli/cli/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
def _task_ms(task: Task) -> (str, str):
try:
task.resolve()
_, _m = task.get_valid_key('master')
_, _m = task.get_valid_key('main')
if task.kind == 'cluster':
_, _s = task.get_valid_key('slave')
_, _s = task.get_valid_key('subordinate')
else:
_s = None
return _m, _s
Expand All @@ -34,7 +34,7 @@ def _task_ms(task: Task) -> (str, str):
return None, None


def _bkt_list(repo: Repo, master: str, slave: str, geometry: Polygon, fit: int) -> (GeoDataFrame, list):
def _bkt_list(repo: Repo, main: str, subordinate: str, geometry: Polygon, fit: int) -> (GeoDataFrame, list):
""" list avaliable buckets"""
_df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo)))
try:
Expand All @@ -53,16 +53,16 @@ def _bkt_list(repo: Repo, master: str, slave: str, geometry: Polygon, fit: int)
raise RuntimeError(e)
headers = ['#', 'bucket', 'mean fit', 'from', 'to', 'Cnt']

if master or slave:
if main or subordinate:

def _get_bucket_mytitle(t: str):
_m = _bk.loc[_bk['title'] == t]
if not _m.empty:
return _m.iloc[0]['bucket']
return None

_m = _get_bucket_mytitle(master)
_s = _get_bucket_mytitle(slave)
_m = _get_bucket_mytitle(main)
_s = _get_bucket_mytitle(subordinate)

def _ms(b):
_x = 'm' if _m == b else ' '
Expand Down Expand Up @@ -153,7 +153,7 @@ def bucket_cli():
@option_locate_task
@option_roi
@click.option('--check', 'check', is_flag=True, required=False, default=False,
help='Check master-slave data exists')
help='Check main-subordinate data exists')
@click.option('--update', '-u', 'reload', is_flag=True, required=False, default=False, help='force products load')
@click.argument('bucket_name', metavar='<BUCKET_NAME | RECORD>')
@products_list_options(def_col=None, def_sort=['+startDate'])
Expand Down Expand Up @@ -200,7 +200,7 @@ def bkt_info(ctx, repo, task: Task, roi_id, bucket_name, reload, less, sort, lim
if task.loaded:
output.comment(f"Task: {task.name}")
if 'task' in cols:
output.comment(f"INFO: 'task' column: 'm' - used as master in task, 's' - used as slave in task ")
output.comment(f"INFO: 'task' column: 'm' - used as main in task, 's' - used as subordinate in task ")
if 'exists' in cols:
output.comment(f"INFO: 'exists' column: '+' - full data loaded, '~' - metadata only loaded")

Expand All @@ -215,7 +215,7 @@ def bkt_info(ctx, repo, task: Task, roi_id, bucket_name, reload, less, sort, lim
@option_locate_task
@option_roi
@click.option('--check', 'check', is_flag=True, required=False, default=False,
help='Check master-slave data exists')
help='Check main-subordinate data exists')
# @products_list_options(def_col=['productId', 'startDate', 'title'], def_sort=['+startDate'])
@option_less
@click.argument('product_id', metavar="PRODUCT_ID")
Expand All @@ -235,7 +235,7 @@ def bkt_info(repo: Repo, task: Task, roi_id, less,
_id, _roi = resolve_roi(roi_id, repo)
_m, _s = _task_ms(task)
geometry = _roi['geometry']
output.comment(f"active task master: {_m}")
output.comment(f"active task main: {_m}")

_df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo)))
_df = _df.set_index('productId')
Expand Down Expand Up @@ -272,11 +272,11 @@ def bkt_info(repo: Repo, task: Task, roi_id, less,
if _m in _df.index:
_df.loc[_m, 'task'] = 'm'
else:
output.warning('Current task master not found in bucket')
output.warning('Current task main not found in bucket')
if _s in _df.index:
_df.loc[_s, 'task'] = 's'
else:
output.warning('Current task slave not found in bucket')
output.warning('Current task subordinate not found in bucket')
_df = _df.reset_index()
_e, eodata = task.get_valid_key('eodata')

Expand All @@ -301,16 +301,16 @@ def _ch_fs(b):
headers = ['#'] + cols
output.table(_df, headers=headers, )

# if master or slave:
# if main or subordinate:
#
# def _get_bucket_mytitle(t: str):
# _m = _bk.loc[_bk['title'] == t]
# if not _m.empty:
# return _m.iloc[0]['bucket']
# return None
#
# _m = _get_bucket_mytitle(master)
# _s = _get_bucket_mytitle(slave)
# _m = _get_bucket_mytitle(main)
# _s = _get_bucket_mytitle(subordinate)
#
# def _ms(b):
# _x = 'm' if _m == b else ' '
Expand Down Expand Up @@ -345,8 +345,8 @@ def bkt_list(ctx: click.Context, repo: Repo, task: Task, roi_id, reload, fit):
_t, headers = _bkt_list(repo,
geometry=_roi['geometry'],
fit=fit,
master=_m,
slave=_s,
main=_m,
subordinate=_s,
)
except RuntimeError as e:
raise click.UsageError(str(e))
Expand Down
42 changes: 21 additions & 21 deletions ocli/cli/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ def resolve(self):
@ensure_task_loaded
def validate(self, key):
config = self.config
# TODO validate master-slave is in
# TODO validate main-subordinate is in
if key not in config:
raise click.BadArgumentUsage(f'Could not validate Key {key}: key not found')
value = config[key]
Expand Down Expand Up @@ -687,10 +687,10 @@ def get_validation_data_frame(self) -> 'gpd.pd.DataFrame':
return _l

@ensure_task_loaded
def get_geometry_fit_data_frame(self, geometry, key='master') -> 'gpd.pd.DataFrame':
def get_geometry_fit_data_frame(self, geometry, key='main') -> 'gpd.pd.DataFrame':
# TODO use validate_all
if key not in ['master', 'slave']:
raise AssertionError("key: only 'master' or 'salve are supported'")
if key not in ['main', 'subordinate']:
raise AssertionError("key: only 'main' or 'salve are supported'")
for k in ['eodata', key + '_path']:
e = self.validate(k)
if e:
Expand All @@ -707,8 +707,8 @@ def get_stack_path(self, full=False):

format is :

cluster Sentinel-1: <masterID>_<slaveID>_<swath>_<firstBurstIndex>_<lastBurstIndex>
rvi Sentinel-1: <masterID>_<swath>_<firstBurstIndex>_<lastBurstIndex>
cluster Sentinel-1: <mainID>_<subordinateID>_<swath>_<firstBurstIndex>_<lastBurstIndex>
rvi Sentinel-1: <mainID>_<swath>_<firstBurstIndex>_<lastBurstIndex>

if full==True os.join with with <task.config.stack_results>
"""
Expand All @@ -730,43 +730,43 @@ def get_stack_path(self, full=False):
# kind = self.config['kind']
# source = self.config['source']
# if kind == 'cluster' and source == 'Sentinel-1':
# e = self.validate_all(['stack_results', 'master', 'slave', 'swath', 'firstBurstIndex', 'lastBurstIndex'])
# e = self.validate_all(['stack_results', 'main', 'subordinate', 'swath', 'firstBurstIndex', 'lastBurstIndex'])
# if e:
# raise AssertionError(','.join(e))
# master_id = s1_prod_id(self.config['master'])
# slave_id = s1_prod_id(self.config['slave'])
# snap_name = f"{master_id}_{slave_id}_{self.config['swath']}" + \
# main_id = s1_prod_id(self.config['main'])
# subordinate_id = s1_prod_id(self.config['subordinate'])
# snap_name = f"{main_id}_{subordinate_id}_{self.config['swath']}" + \
# f"_{self.config['firstBurstIndex']}_{self.config['lastBurstIndex']}" # noqa
# elif kind == 'rvi' and source == 'Sentinel-1':
# e = self.validate_all(['stack_results', 'master', 'swath', 'firstBurstIndex', 'lastBurstIndex'])
# e = self.validate_all(['stack_results', 'main', 'swath', 'firstBurstIndex', 'lastBurstIndex'])
# if e:
# raise AssertionError(','.join(e))
# master_id = s1_prod_id(self.config['master'])
# snap_name = f"{master_id}_{self.config['swath']}" + \
# main_id = s1_prod_id(self.config['main'])
# snap_name = f"{main_id}_{self.config['swath']}" + \
# f"_{self.config['firstBurstIndex']}_{self.config['lastBurstIndex']}" # noqa
# else:
# raise AssertionError(f'Could not build path for task config kind "{kind}" and source {source} ')
# return os.path.join(self.config['stack_results'], snap_name) if full else snap_name

@ensure_task_loaded
def _compose_friendly_keys(self, roi_name):
e, master = self.get_valid_key('master')
prod_fields = {'m_' + k: v for (k, v) in parse_title(master).items()}
prod_fields = {**prod_fields, **{'m_' + k: v for (k, v) in parse_title(master).items()}}
prod_fields['m_id'] = s1_prod_id(master)
e, main = self.get_valid_key('main')
prod_fields = {'m_' + k: v for (k, v) in parse_title(main).items()}
prod_fields = {**prod_fields, **{'m_' + k: v for (k, v) in parse_title(main).items()}}
prod_fields['m_id'] = s1_prod_id(main)

prod_fields['s_id'] = ''
if self.kind == 'cluster':
e, slave = self.get_valid_key('slave')
prod_fields = {**prod_fields, **{'s_' + k: v for (k, v) in parse_title(slave).items()}}
prod_fields['s_id'] = s1_prod_id(slave)
e, subordinate = self.get_valid_key('subordinate')
prod_fields = {**prod_fields, **{'s_' + k: v for (k, v) in parse_title(subordinate).items()}}
prod_fields['s_id'] = s1_prod_id(subordinate)
fields = {**self.config, **prod_fields}
fields['predictor'] = fields['predictor'].split('/')[-1]
fields['roi'] = roi_name
return fields

def format_pattern(self, key, roi_name):
ms = ['master', 'slave'] if self.kind == 'cluster' else ['master']
ms = ['main', 'subordinate'] if self.kind == 'cluster' else ['main']
e = self.validate_all(ms)
if e:
raise AssertionError(','.join(e))
Expand Down
Loading