Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
5099323
activate start time column
Dec 5, 2024
b6e0d7b
Adding boundary type selector to recorded data type
alexbourret Dec 12, 2024
eee84cb
Adding boundary selector to connector
alexbourret Dec 12, 2024
20adf06
v1.2.4
alexbourret Dec 12, 2024
81197c1
Updating changelog
alexbourret Dec 12, 2024
07b7a1f
beta marker
alexbourret Dec 12, 2024
9d59cda
v1.2.4
alexbourret Dec 12, 2024
edd18f9
add Items.Value.Value to fields selector
alexbourret Dec 20, 2024
7cb64f6
beta 2 marker
alexbourret Dec 20, 2024
ca55b61
update changelog
alexbourret Dec 20, 2024
11c8779
fix recursive_get_rows_from_webid
Jan 2, 2025
027203c
Add option to copy input rows into recipe output [sc-227010]
alexbourret Jan 31, 2025
c026bf3
update changelog
alexbourret Jan 31, 2025
601fb6f
beta 2
alexbourret Jan 31, 2025
0d0cae2
Merge branch 'bug/sc-220666-some-value-not-available' into feature/sc…
alexbourret Feb 4, 2025
4b72df5
beta 3
alexbourret Feb 4, 2025
9d05710
Merge pull request #60 from jerometerrier/6-fix_recursive_get_rows_fr…
alexbourret Feb 4, 2025
e44bcba
update changelog
alexbourret Feb 4, 2025
2117443
Merge pull request #54 from jerometerrier/2-activate-use-start-time-c…
alexbourret Feb 4, 2025
d8e072e
beta 4
alexbourret Feb 4, 2025
ae29b86
update changelog
alexbourret Feb 4, 2025
9c482c3
Add interpolated value for EF recipe and dataset
alexbourret Feb 4, 2025
2fa074e
Fix for [sc-229162]
alexbourret Feb 13, 2025
ecb01ed
beta 5
alexbourret Feb 13, 2025
aa04327
Recursive get rows webid [sc-229599]
MayeulRousselet Feb 17, 2025
9aa6f6d
Activate start time col [sc-229601]
MayeulRousselet Feb 17, 2025
74e6328
add interpolate boundary type for record data type
alexbourret Feb 19, 2025
f8d9384
UI update
alexbourret Feb 24, 2025
76a62b9
add warning of possible column overwritting
alexbourret Mar 7, 2025
5c06172
removing the lonely '
alexbourret Mar 7, 2025
31fc7b6
fix boundary type for ef connector (selected ef)
alexbourret Mar 11, 2025
df49ec4
Merge pull request #66 from dataiku/bug/sc-229162-missing-timestamps
alexbourret Mar 11, 2025
0750f9d
Merge pull request #65 from dataiku/feature/sc-227482-interpolated-va…
alexbourret Mar 11, 2025
7c19265
Merge branch 'test/recursive_get_rows_webid' into test/activate-start…
alexbourret Mar 11, 2025
b6fd464
Merge pull request #64 from dataiku/test/activate-start-time-col
alexbourret Mar 11, 2025
7d46303
Merge pull request #63 from dataiku/test/recursive_get_rows_webid
alexbourret Mar 11, 2025
26a213e
Merge pull request #62 from dataiku/feature/sc-227010-option-to-copy-…
alexbourret Mar 11, 2025
9afa9ea
Merge pull request #58 from dataiku/bug/sc-220666-some-value-not-avai…
alexbourret Mar 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# Changelog

## [Version 1.2.4](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.4) - Feature release - 2024-12-12

- Add boundary type selector to recorded data type
- Add boundary type selector to attribute search connector

## [Version 1.2.3](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.3) - Feature release - 2024-09-26

- Add summaryDuration input (duration of each summary interval)
Expand Down
2 changes: 1 addition & 1 deletion custom-recipes/pi-system-retrieve-list/recipe.json
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@
{
"name": "boundary_type",
"label": "Boundary type",
"visibilityCondition": "['InterpolatedData'].includes(model.data_type)",
"visibilityCondition": "['InterpolatedData','RecordedData'].includes(model.data_type)",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
Expand Down
3 changes: 3 additions & 0 deletions custom-recipes/pi-system-retrieve-list/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
end_time_column = config.get("end_time_column")
server_url_column = config.get("server_url_column")
interval, sync_time, boundary_type = get_interpolated_parameters(config)
record_boundary_type = config.get("boundary_type") if data_type == "RecordedData" else None
summary_type, summary_duration = get_summary_parameters(config)

network_timer = PerformanceTimer()
Expand Down Expand Up @@ -102,6 +103,7 @@
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
max_count=max_count,
can_raise=False,
object_id=object_id,
Expand All @@ -117,6 +119,7 @@
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
max_count=max_count,
can_raise=False,
endpoint_type="AF",
Expand Down
2 changes: 1 addition & 1 deletion plugin.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"id": "pi-system",
"version": "1.2.3",
"version": "1.2.4",
"meta": {
"label": "PI System",
"description": "Retrieve data from your OSIsoft PI System servers",
Expand Down
11 changes: 11 additions & 0 deletions python-connectors/pi-system_attribute-search/connector.json
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,17 @@
],
"visibilityCondition": "((model.must_retrieve_metrics) && (model.data_type == 'SummaryData'))"
},
{
"name": "boundary_type",
"label": "Boundary type",
"visibilityCondition": "((model.must_retrieve_metrics) && ['InterpolatedData','RecordedData'].includes(model.data_type))",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
},
{
"name": "summary_duration",
"label": "Summary duration",
Expand Down
14 changes: 8 additions & 6 deletions python-connectors/pi-system_attribute-search/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from osisoft_plugin_common import (
PISystemConnectorError, RecordsLimit, get_credentials, assert_time_format,
remove_unwanted_columns, format_output, filter_columns_from_schema, is_child_attribute_path,
check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector
check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector,
get_interpolated_parameters
)
from osisoft_constants import OSIsoftConstants

Expand Down Expand Up @@ -36,9 +37,7 @@ def __init__(self, config, plugin_config):
self.start_time = self.client.parse_pi_time(self.start_time)
self.end_time = config.get("end_time")
self.end_time = self.client.parse_pi_time(self.end_time)
is_interpolated_data = config.get("data_type", "").endswith("InterpolatedData")
self.interval = config.get("interval") if is_interpolated_data else None
self.sync_time = config.get("sync_time") if is_interpolated_data else None
self.interval, self.sync_time, self.boundary_type = get_interpolated_parameters(config)
self.sync_time = self.client.parse_pi_time(self.sync_time)
assert_time_format(self.start_time, error_source="start time")
assert_time_format(self.end_time, error_source="end time")
Expand All @@ -57,6 +56,8 @@ def __init__(self, config, plugin_config):
self.config = config
self.summary_type, self.summary_duration = get_summary_parameters(config)

self.record_boundary_type = config.get("boundary_type") if self.data_type == "RecordedData" else None

def extract_database_webid(self, database_endpoint):
return database_endpoint.split("/")[-1]

Expand Down Expand Up @@ -114,8 +115,9 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None,
selected_fields=fields_selector(self.data_type),
max_count=self.max_count,
summary_type=self.summary_type,
summary_duration=self.summary_duration
# boundary_type=self.boundary_type
summary_duration=self.summary_duration,
boundary_type=self.boundary_type,
record_boundary_type=self.record_boundary_type
):
if limit.is_reached():
return
Expand Down
45 changes: 29 additions & 16 deletions python-lib/osisoft_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def get_auth(self, auth_type, username, password):
return None

def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None,
interval=None, sync_time=None, boundary_type=None, selected_fields=None,
interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None,
can_raise=True, endpoint_type="event_frames", search_full_hierarchy=None,
max_count=None, summary_type=None, summary_duration=None):
# Split the time range until no more HTTP 400
Expand All @@ -57,7 +57,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d
while not done:
logger.info("Attempting download webids from {} to {}".format(start_date, end_date))
rows = self.get_rows_from_webid(webid, data_type, start_date=start_date, end_date=end_date,
interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields,
interval=interval, sync_time=sync_time, boundary_type=boundary_type,
record_boundary_type=record_boundary_type, selected_fields=selected_fields,
can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy,
max_count=max_count, summary_type=summary_type, summary_duration=summary_duration)
counter = 0
Expand All @@ -79,7 +80,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d
start_timestamp, end_timestamp, half_time_iso = self.halve_time_range(start_date, end_date)
first_half_rows = self.recursive_get_rows_from_webid(
webid, data_type, start_date=start_timestamp, end_date=half_time_iso,
interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields,
interval=interval, sync_time=sync_time, boundary_type=boundary_type,
record_boundary_type=record_boundary_type, selected_fields=selected_fields,
can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy, max_count=max_count,
summary_type=summary_type, summary_duration=summary_duration
)
Expand All @@ -88,7 +90,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d
logger.info("Successfully retrieved first half ({} to {})".format(start_timestamp, half_time_iso))
second_half_rows = self.recursive_get_rows_from_webid(
webid, data_type, start_date=half_time_iso, end_date=end_timestamp,
interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields,
interval=interval, sync_time=sync_time, boundary_type=boundary_type,
record_boundary_type=record_boundary_type, selected_fields=selected_fields,
can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy, max_count=max_count,
summary_type=summary_type, summary_duration=summary_duration
)
Expand All @@ -109,7 +112,7 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d
done = True

def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_date=None,
interval=None, sync_time=None, boundary_type=None,
interval=None, sync_time=None, boundary_type=None, record_boundary_type=None,
can_raise=True, object_id=None, endpoint_type="event_frames", search_full_hierarchy=None,
max_count=None, summary_type=None, summary_duration=None):
# item can be an pi tag, a path to an element or event frame
Expand All @@ -119,7 +122,8 @@ def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_dat
while not done:
logger.info("Attempting download items from {} to {}".format(start_date, end_date))
rows = self.get_rows_from_item(item, data_type, start_date=start_date, end_date=end_date, interval=interval,
sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id,
sync_time=sync_time, boundary_type=boundary_type, record_boundary_type=record_boundary_type,
can_raise=True, object_id=object_id,
search_full_hierarchy=search_full_hierarchy, max_count=max_count,
summary_type=summary_type, summary_duration=summary_duration)
counter = 0
Expand All @@ -141,15 +145,17 @@ def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_dat
start_timestamp, end_timestamp, half_time_iso = self.halve_time_range(start_date, end_date)
first_half_rows = self.recursive_get_rows_from_item(
item, data_type, start_date=start_timestamp, end_date=half_time_iso,
interval=interval, sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id,
interval=interval, sync_time=sync_time, boundary_type=boundary_type,
record_boundary_type=record_boundary_type, can_raise=True, object_id=object_id,
search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration
)
for row in first_half_rows:
yield row
logger.info("Successfully retrieved first half ({} to {})".format(start_timestamp, half_time_iso))
second_half_rows = self.recursive_get_rows_from_item(
item, data_type, start_date=half_time_iso, end_date=end_timestamp,
interval=interval, sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id,
interval=interval, sync_time=sync_time, boundary_type=boundary_type,
record_boundary_type=record_boundary_type, can_raise=True, object_id=object_id,
search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration
)
for row in second_half_rows:
Expand Down Expand Up @@ -219,7 +225,7 @@ def parse_pi_time(self, pi_time, to_epoch=False):
return iso_timestamp

def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None,
interval=None, sync_time=None, boundary_type=None, selected_fields=None,
interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None,
can_raise=True, endpoint_type="event_frames", search_full_hierarchy=None,
max_count=None, summary_type=None, summary_duration=None):

Expand All @@ -234,6 +240,7 @@ def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None,
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
selected_fields=selected_fields,
search_full_hierarchy=search_full_hierarchy,
max_count=max_count,
Expand All @@ -252,7 +259,7 @@ def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None,
yield item

def get_rows_from_webids(self, input_rows, data_type, start_date=None, end_date=None,
interval=None, sync_time=None, boundary_type=None, selected_fields=None, search_full_hierarchy=None,
interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None,
max_count=None, can_raise=True, endpoint_type="event_frames", batch_size=500, summary_type=None, summary_duration=None):
batch_requests_parameters = []
number_processed_webids = 0
Expand Down Expand Up @@ -318,7 +325,7 @@ def _batch_requests(self, batch_requests_parameters):
yield batch_section.get("Content", {})

def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync_time=None,
boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None,
boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None,
summary_type=None, summary_duration=None, can_raise=None):
headers = self.get_requests_headers()
params = self.get_requests_params(
Expand All @@ -327,6 +334,7 @@ def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
selected_fields=selected_fields,
search_full_hierarchy=search_full_hierarchy,
max_count=max_count,
Expand All @@ -339,7 +347,7 @@ def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync
}

def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_time=None,
boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None,
boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None,
can_raise=None, summary_type=None, summary_duration=None):
headers = self.get_requests_headers()
params = self.get_requests_params(
Expand All @@ -348,6 +356,7 @@ def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_t
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
selected_fields=selected_fields,
search_full_hierarchy=search_full_hierarchy,
max_count=max_count,
Expand All @@ -363,7 +372,7 @@ def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_t
return json_response

def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, interval=None,
sync_time=None, boundary_type=None, can_raise=True, object_id=None,
sync_time=None, boundary_type=None, record_boundary_type=None, can_raise=True, object_id=None,
search_full_hierarchy=None, max_count=None, summary_type=None,
summary_duration=None):
# item can be an pi tag, a path to an element or event frame
Expand All @@ -378,6 +387,7 @@ def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, in
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
max_count=max_count,
search_full_hierarchy=search_full_hierarchy,
can_raise=can_raise,
Expand All @@ -392,7 +402,7 @@ def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, in
yield self.loop_sub_items(item)

def get_link_from_item(self, item, data_type, start_date, end_date, interval=None,
sync_time=None, boundary_type=None, search_full_hierarchy=None,
sync_time=None, boundary_type=None, record_boundary_type=None, search_full_hierarchy=None,
max_count=None, can_raise=True, summary_type=None,
summary_duration=None):
url = self.extract_link_with_key(item, data_type)
Expand All @@ -404,7 +414,8 @@ def get_link_from_item(self, item, data_type, start_date, end_date, interval=Non
headers = self.get_requests_headers()
params = build_requests_params(
start_time=start_date, end_time=end_date, interval=interval,
sync_time=sync_time, sync_time_boundary_type=boundary_type, search_full_hierarchy=search_full_hierarchy,
sync_time=sync_time, sync_time_boundary_type=boundary_type, record_boundary_type=record_boundary_type,
search_full_hierarchy=search_full_hierarchy,
max_count=max_count, summary_type=summary_type, summary_duration=summary_duration
)
json_response = self.get(
Expand Down Expand Up @@ -625,7 +636,7 @@ def get_requests_headers(self):
}

def get_requests_params(self, start_date=None, end_date=None, interval=None, sync_time=None,
boundary_type=None, selected_fields=None, search_full_hierarchy=None,
boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None,
max_count=None, summary_type=None, summary_duration=None):
params = {}
if start_date:
Expand All @@ -638,6 +649,8 @@ def get_requests_params(self, start_date=None, end_date=None, interval=None, syn
params.update({"syncTime": sync_time})
if boundary_type:
params.update({"syncTimeBoundaryType": boundary_type})
if record_boundary_type:
params.update({"boundaryType": record_boundary_type})
if selected_fields:
params.update({"selectedFields": selected_fields})
if search_full_hierarchy:
Expand Down
2 changes: 1 addition & 1 deletion python-lib/osisoft_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ class OSIsoftConstants(object):
"Security": "{base_url}/eventframes/{webid}/security",
"SecurityEntries": "{base_url}/eventframes/{webid}/securityentries"
}
PLUGIN_VERSION = "1.2.3"
PLUGIN_VERSION = "1.2.4-beta.1"
VALUE_COLUMN_SUFFIX = "_val"
WEB_API_PATH = "piwebapi"
WRITE_HEADERS = {'X-Requested-With': 'XmlHttpRequest'}
Loading