diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a55f0c..4f77b8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [Version 1.2.4](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.4) - Feature and bug release - 2025-02-18 + +- Add boundary type selector to recorded data type +- Add boundary type selector to attribute search connector +- *Assets values downloader* Add option to copy each input row into output dataset +- Fix issue with recorded data type +- *Assets values downloader* Time range pagination when using webids +- *Assets values downloader* Time can be selected from a column of the input dataset + ## [Version 1.2.3](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.3) - Feature release - 2024-09-26 - Add summaryDuration input (duration of each summary interval) diff --git a/custom-recipes/pi-system-retrieve-event-frames/recipe.json b/custom-recipes/pi-system-retrieve-event-frames/recipe.json index e9cb714..5b344d8 100644 --- a/custom-recipes/pi-system-retrieve-event-frames/recipe.json +++ b/custom-recipes/pi-system-retrieve-event-frames/recipe.json @@ -143,7 +143,7 @@ { "visibilityCondition": "model.use_start_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", "name": "start_time_column", - "label": "Start times' column", + "label": "Start times column", "description": "Optional", "type": "COLUMN", "columnRole": "input_dataset" @@ -167,7 +167,7 @@ { "visibilityCondition": "model.use_end_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", "name": "end_time_column", - "label": "End times' column", + "label": "End times column", "description": "Optional", "type": "COLUMN", "columnRole": "input_dataset" @@ -180,6 +180,24 @@ "type": "STRING", "defaultValue": "" }, + { + "type": "SEPARATOR", + "description": "Data to retrieve" + }, + { + "name": "data_type", + "label": "Data type", + "type": "SELECT", + "selectChoices":[ + {"value": "InterpolatedData", "label": "Interpolated"}, + {"value": "PlotData", "label": "Plot"}, + {"value": "RecordedData", "label": "Recorded"}, + {"value": "SummaryData", "label": "SummaryData"}, + {"value": "Value", "label": "Value"}, + {"value": "EndValue", "label": "EndValue"} + ], + "defaultValue": "SummaryData" + }, { "type": "SEPARATOR", "description": "Interpolation", @@ -212,23 +230,6 @@ ], "defaultValue": "Inside" }, - { - "type": "SEPARATOR", - "description": "Data to retrieve" - }, - { - "name": "data_type", - "label": "Data type", - "type": "SELECT", - "selectChoices":[ - {"value": "PlotData", "label": "Plot"}, - {"value": "RecordedData", "label": "Recorded"}, - {"value": "SummaryData", "label": "SummaryData"}, - {"value": "Value", "label": "Value"}, - {"value": "EndValue", "label": "EndValue"} - ], - "defaultValue": "SummaryData" - }, { "name": "summary_type", "label": "Summary type", diff --git a/custom-recipes/pi-system-retrieve-list/recipe.json b/custom-recipes/pi-system-retrieve-list/recipe.json index 78f58a7..b871bf1 100644 --- a/custom-recipes/pi-system-retrieve-list/recipe.json +++ b/custom-recipes/pi-system-retrieve-list/recipe.json @@ -112,6 +112,12 @@ "type": "COLUMN", "columnRole": "input_dataset" }, + { + "name": "do_duplicate_input_row", + "label": "Copy other input columns", + "description": "(those with matching names will be overwritten by PI system data)", + "type": "BOOLEAN" + }, { "type": "SEPARATOR", "description": "Time", @@ -125,16 +131,16 @@ { "name": "use_start_time_column", "label": "Use start time value per row", - "description": "Description here", - "visibilityCondition": "false && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", + "description": "", + "visibilityCondition": "['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type)", "type": "BOOLEAN", "defaultValue": false }, { "visibilityCondition": "model.use_start_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", "name": "start_time_column", - "label": "Start times' column", - "description": "Optional", + "label": "Start times column", + "description": "Column type must be string", "type": "COLUMN", "columnRole": "input_dataset" }, @@ -148,17 +154,17 @@ }, { "name": "use_end_time_column", - "label": "Use start time value per row", + "label": "Use end time value per row", "description": "", - "visibilityCondition": "false && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", + "visibilityCondition": "['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type)", "type": "BOOLEAN", "defaultValue": false }, { "visibilityCondition": "model.use_end_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))", "name": "end_time_column", - "label": "End times' column", - "description": "Optional", + "label": "End times column", + "description": "Column type must be string", "type": "COLUMN", "columnRole": "input_dataset" }, @@ -194,10 +200,22 @@ { "name": "boundary_type", "label": "Boundary type", - "visibilityCondition": "['InterpolatedData'].includes(model.data_type)", + "visibilityCondition": "((model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type))", + "type": "SELECT", + "selectChoices":[ + {"value": "Inside", "label": "Inside"}, + {"value": "Outside", "label": "Outside"} + ], + "defaultValue": "Inside" + }, + { + "name": "record_boundary_type", + "label": "Boundary type", + "visibilityCondition": "((model.must_retrieve_metrics) && ['RecordedData'].includes(model.data_type))", "type": "SELECT", "selectChoices":[ {"value": "Inside", "label": "Inside"}, + {"value": "Interpolated", "label": "Interpolated"}, {"value": "Outside", "label": "Outside"} ], "defaultValue": "Inside" diff --git a/custom-recipes/pi-system-retrieve-list/recipe.py b/custom-recipes/pi-system-retrieve-list/recipe.py index b09c071..a4e3e93 100644 --- a/custom-recipes/pi-system-retrieve-list/recipe.py +++ b/custom-recipes/pi-system-retrieve-list/recipe.py @@ -48,7 +48,9 @@ end_time_column = config.get("end_time_column") server_url_column = config.get("server_url_column") interval, sync_time, boundary_type = get_interpolated_parameters(config) +record_boundary_type = config.get("record_boundary_type") if data_type == "RecordedData" else None summary_type, summary_duration = get_summary_parameters(config) +do_duplicate_input_row = config.get("do_duplicate_input_row", False) network_timer = PerformanceTimer() processing_timer = PerformanceTimer() @@ -63,6 +65,9 @@ client = None previous_server_url = "" time_not_parsed = True + +input_columns = list(input_parameters_dataframe.columns) if do_duplicate_input_row else [] + with output_dataset.get_writer() as writer: first_dataframe = True for index, input_parameters_row in input_parameters_dataframe.iterrows(): @@ -70,6 +75,9 @@ start_time = input_parameters_row.get(start_time_column, start_time) if use_start_time_column else start_time end_time = input_parameters_row.get(end_time_column, end_time) if use_end_time_column else end_time row_name = input_parameters_row.get("Name") + duplicate_initial_row = {} + for input_column in input_columns: + duplicate_initial_row[input_column] = input_parameters_row.get(input_column) if client is None or previous_server_url != server_url: client = OSIsoftClient( @@ -82,10 +90,8 @@ # make sure all OSIsoft time string format are evaluated at the same time # rather than at every request, at least for start / end times set in the UI time_not_parsed = False - if not use_start_time_column: - start_time = client.parse_pi_time(start_time) - if not use_end_time_column: - end_time = client.parse_pi_time(end_time) + start_time = client.parse_pi_time(start_time) + end_time = client.parse_pi_time(end_time) sync_time = client.parse_pi_time(sync_time) object_id = input_parameters_row.get(path_column) @@ -102,6 +108,7 @@ interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, max_count=max_count, can_raise=False, object_id=object_id, @@ -109,7 +116,7 @@ summary_duration=summary_duration ) else: - rows = client.get_rows_from_webid( + rows = client.recursive_get_rows_from_webid( object_id, data_type, start_date=start_time, @@ -117,6 +124,7 @@ interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, max_count=max_count, can_raise=False, endpoint_type="AF", @@ -134,6 +142,8 @@ results.extend(extention) else: base = get_base_for_data_type(data_type, object_id) + if duplicate_initial_row: + base.update(duplicate_initial_row) base.update(row) extention = client.unnest_row(base) results.extend(extention) diff --git a/plugin.json b/plugin.json index 50903e2..88c4611 100644 --- a/plugin.json +++ b/plugin.json @@ -1,6 +1,6 @@ { "id": "pi-system", - "version": "1.2.3", + "version": "1.2.4", "meta": { "label": "PI System", "description": "Retrieve data from your OSIsoft PI System servers", diff --git a/python-connectors/pi-system_attribute-search/connector.json b/python-connectors/pi-system_attribute-search/connector.json index c8707c4..2702b7e 100644 --- a/python-connectors/pi-system_attribute-search/connector.json +++ b/python-connectors/pi-system_attribute-search/connector.json @@ -360,6 +360,29 @@ ], "visibilityCondition": "((model.must_retrieve_metrics) && (model.data_type == 'SummaryData'))" }, + { + "name": "boundary_type", + "label": "Boundary type", + "visibilityCondition": "((model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type))", + "type": "SELECT", + "selectChoices":[ + {"value": "Inside", "label": "Inside"}, + {"value": "Outside", "label": "Outside"} + ], + "defaultValue": "Inside" + }, + { + "name": "record_boundary_type", + "label": "Boundary type", + "visibilityCondition": "((model.must_retrieve_metrics) && ['RecordedData'].includes(model.data_type))", + "type": "SELECT", + "selectChoices":[ + {"value": "Inside", "label": "Inside"}, + {"value": "Interpolated", "label": "Interpolated"}, + {"value": "Outside", "label": "Outside"} + ], + "defaultValue": "Inside" + }, { "name": "summary_duration", "label": "Summary duration", diff --git a/python-connectors/pi-system_attribute-search/connector.py b/python-connectors/pi-system_attribute-search/connector.py index 7977436..50daa2b 100644 --- a/python-connectors/pi-system_attribute-search/connector.py +++ b/python-connectors/pi-system_attribute-search/connector.py @@ -6,7 +6,8 @@ from osisoft_plugin_common import ( PISystemConnectorError, RecordsLimit, get_credentials, assert_time_format, remove_unwanted_columns, format_output, filter_columns_from_schema, is_child_attribute_path, - check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector + check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector, + get_interpolated_parameters ) from osisoft_constants import OSIsoftConstants @@ -36,9 +37,7 @@ def __init__(self, config, plugin_config): self.start_time = self.client.parse_pi_time(self.start_time) self.end_time = config.get("end_time") self.end_time = self.client.parse_pi_time(self.end_time) - is_interpolated_data = config.get("data_type", "").endswith("InterpolatedData") - self.interval = config.get("interval") if is_interpolated_data else None - self.sync_time = config.get("sync_time") if is_interpolated_data else None + self.interval, self.sync_time, self.boundary_type = get_interpolated_parameters(config) self.sync_time = self.client.parse_pi_time(self.sync_time) assert_time_format(self.start_time, error_source="start time") assert_time_format(self.end_time, error_source="end time") @@ -57,6 +56,8 @@ def __init__(self, config, plugin_config): self.config = config self.summary_type, self.summary_duration = get_summary_parameters(config) + self.record_boundary_type = config.get("record_boundary_type") if self.data_type == "RecordedData" else None + def extract_database_webid(self, database_endpoint): return database_endpoint.split("/")[-1] @@ -114,8 +115,9 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None, selected_fields=fields_selector(self.data_type), max_count=self.max_count, summary_type=self.summary_type, - summary_duration=self.summary_duration - # boundary_type=self.boundary_type + summary_duration=self.summary_duration, + boundary_type=self.boundary_type, + record_boundary_type=self.record_boundary_type ): if limit.is_reached(): return diff --git a/python-connectors/pi-system_event-frames-search/connector.json b/python-connectors/pi-system_event-frames-search/connector.json index 4e5b9f0..0391175 100644 --- a/python-connectors/pi-system_event-frames-search/connector.json +++ b/python-connectors/pi-system_event-frames-search/connector.json @@ -200,6 +200,38 @@ ], "mandatory": true }, + { + "type": "SEPARATOR", + "description": "Interpolation", + "visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)" + }, + { + "name": "interval", + "label": "Interval", + "visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)", + "description": "Optional", + "type": "STRING", + "defaultValue": "" + }, + { + "name": "sync_time", + "label": "Sync time", + "visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)", + "description": "Optional", + "type": "STRING", + "defaultValue": "" + }, + { + "name": "boundary_type", + "label": "Boundary type", + "visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)", + "type": "SELECT", + "selectChoices":[ + {"value": "Inside", "label": "Inside"}, + {"value": "Outside", "label": "Outside"} + ], + "defaultValue": "Inside" + }, { "name": "summary_type", "label": "Summary type", diff --git a/python-connectors/pi-system_event-frames-search/connector.py b/python-connectors/pi-system_event-frames-search/connector.py index 8c1b4d4..1775ca9 100644 --- a/python-connectors/pi-system_event-frames-search/connector.py +++ b/python-connectors/pi-system_event-frames-search/connector.py @@ -7,7 +7,7 @@ from osisoft_plugin_common import ( PISystemConnectorError, RecordsLimit, get_credentials, build_requests_params, assert_time_format, get_advanced_parameters, check_debug_mode, - PerformanceTimer, get_max_count, get_summary_parameters + PerformanceTimer, get_max_count, get_summary_parameters, get_interpolated_parameters ) @@ -47,6 +47,7 @@ def __init__(self, config, plugin_config): self.end_time = self.client.parse_pi_time(self.end_time) if self.end_time: config["end_time"] = self.end_time + self.interval, self.sync_time, self.boundary_type = get_interpolated_parameters(config) self.search_mode = config.get("search_mode", None) self.output_type = config.get("output_type") assert_time_format(self.start_time, error_source="start time") @@ -79,7 +80,8 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None, if self.object_id: for event_frame in self.client.get_rows_from_urls( self.object_id, self.data_type, start_date=self.start_time, - end_date=self.end_time, max_count=self.max_count): + end_date=self.end_time, interval=self.interval, sync_time=self.sync_time, + boundary_type=self.boundary_type, max_count=self.max_count): self.yields_timer.start() yield event_frame self.yields_timer.stop() @@ -115,6 +117,9 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None, batch_size=self.batch_size, summary_type=self.summary_type, summary_duration=self.summary_duration, + boundary_type=self.boundary_type, + interval=self.interval, + sync_time=self.sync_time, max_count=self.max_count ) for batch_row in batch_rows: @@ -145,6 +150,7 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None, event_frame_id = event_frame.get("WebId") event_frame_metrics = self.client.get_rows_from_webid( event_frame_id, self.data_type, summary_type=self.summary_type, summary_duration=self.summary_duration, + interval=self.interval, sync_time=self.sync_time, boundary_type=self.boundary_type, search_full_hierarchy=self.search_full_hierarchy, max_count=self.max_count, can_raise=False ) diff --git a/python-lib/osisoft_client.py b/python-lib/osisoft_client.py index 6d66ad0..385b8fe 100644 --- a/python-lib/osisoft_client.py +++ b/python-lib/osisoft_client.py @@ -48,7 +48,7 @@ def get_auth(self, auth_type, username, password): return None def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None, - interval=None, sync_time=None, boundary_type=None, selected_fields=None, + interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None, can_raise=True, endpoint_type="event_frames", search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None): # Split the time range until no more HTTP 400 @@ -57,7 +57,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d while not done: logger.info("Attempting download webids from {} to {}".format(start_date, end_date)) rows = self.get_rows_from_webid(webid, data_type, start_date=start_date, end_date=end_date, - interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields, + interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration) counter = 0 @@ -79,7 +80,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d start_timestamp, end_timestamp, half_time_iso = self.halve_time_range(start_date, end_date) first_half_rows = self.recursive_get_rows_from_webid( webid, data_type, start_date=start_timestamp, end_date=half_time_iso, - interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields, + interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration ) @@ -88,7 +90,8 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d logger.info("Successfully retrieved first half ({} to {})".format(start_timestamp, half_time_iso)) second_half_rows = self.recursive_get_rows_from_webid( webid, data_type, start_date=half_time_iso, end_date=end_timestamp, - interval=interval, sync_time=sync_time, boundary_type=boundary_type, selected_fields=selected_fields, + interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, can_raise=can_raise, endpoint_type=endpoint_type, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration ) @@ -109,7 +112,7 @@ def recursive_get_rows_from_webid(self, webid, data_type, start_date=None, end_d done = True def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_date=None, - interval=None, sync_time=None, boundary_type=None, + interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, can_raise=True, object_id=None, endpoint_type="event_frames", search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None): # item can be an pi tag, a path to an element or event frame @@ -119,7 +122,8 @@ def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_dat while not done: logger.info("Attempting download items from {} to {}".format(start_date, end_date)) rows = self.get_rows_from_item(item, data_type, start_date=start_date, end_date=end_date, interval=interval, - sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id, + sync_time=sync_time, boundary_type=boundary_type, record_boundary_type=record_boundary_type, + can_raise=True, object_id=object_id, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration) counter = 0 @@ -141,7 +145,8 @@ def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_dat start_timestamp, end_timestamp, half_time_iso = self.halve_time_range(start_date, end_date) first_half_rows = self.recursive_get_rows_from_item( item, data_type, start_date=start_timestamp, end_date=half_time_iso, - interval=interval, sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id, + interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, can_raise=True, object_id=object_id, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration ) for row in first_half_rows: @@ -149,7 +154,8 @@ def recursive_get_rows_from_item(self, item, data_type, start_date=None, end_dat logger.info("Successfully retrieved first half ({} to {})".format(start_timestamp, half_time_iso)) second_half_rows = self.recursive_get_rows_from_item( item, data_type, start_date=half_time_iso, end_date=end_timestamp, - interval=interval, sync_time=sync_time, boundary_type=boundary_type, can_raise=True, object_id=object_id, + interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, can_raise=True, object_id=object_id, search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration ) for row in second_half_rows: @@ -219,7 +225,7 @@ def parse_pi_time(self, pi_time, to_epoch=False): return iso_timestamp def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None, - interval=None, sync_time=None, boundary_type=None, selected_fields=None, + interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None, can_raise=True, endpoint_type="event_frames", search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None): @@ -234,6 +240,7 @@ def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None, interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, search_full_hierarchy=search_full_hierarchy, max_count=max_count, @@ -252,7 +259,7 @@ def get_rows_from_webid(self, webid, data_type, start_date=None, end_date=None, yield item def get_rows_from_webids(self, input_rows, data_type, start_date=None, end_date=None, - interval=None, sync_time=None, boundary_type=None, selected_fields=None, search_full_hierarchy=None, + interval=None, sync_time=None, boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, can_raise=True, endpoint_type="event_frames", batch_size=500, summary_type=None, summary_duration=None): batch_requests_parameters = [] number_processed_webids = 0 @@ -318,7 +325,7 @@ def _batch_requests(self, batch_requests_parameters): yield batch_section.get("Content", {}) def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync_time=None, - boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, + boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None, can_raise=None): headers = self.get_requests_headers() params = self.get_requests_params( @@ -327,6 +334,7 @@ def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, search_full_hierarchy=search_full_hierarchy, max_count=max_count, @@ -339,7 +347,7 @@ def generic_get_kwargs(self, start_date=None, end_date=None, interval=None, sync } def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_time=None, - boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, + boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, can_raise=None, summary_type=None, summary_duration=None): headers = self.get_requests_headers() params = self.get_requests_params( @@ -348,6 +356,7 @@ def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_t interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, selected_fields=selected_fields, search_full_hierarchy=search_full_hierarchy, max_count=max_count, @@ -363,7 +372,7 @@ def generic_get(self, url, start_date=None, end_date=None, interval=None, sync_t return json_response def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, interval=None, - sync_time=None, boundary_type=None, can_raise=True, object_id=None, + sync_time=None, boundary_type=None, record_boundary_type=None, can_raise=True, object_id=None, search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None): # item can be an pi tag, a path to an element or event frame @@ -378,6 +387,7 @@ def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, in interval=interval, sync_time=sync_time, boundary_type=boundary_type, + record_boundary_type=record_boundary_type, max_count=max_count, search_full_hierarchy=search_full_hierarchy, can_raise=can_raise, @@ -392,7 +402,7 @@ def get_rows_from_item(self, item, data_type, start_date=None, end_date=None, in yield self.loop_sub_items(item) def get_link_from_item(self, item, data_type, start_date, end_date, interval=None, - sync_time=None, boundary_type=None, search_full_hierarchy=None, + sync_time=None, boundary_type=None, record_boundary_type=None, search_full_hierarchy=None, max_count=None, can_raise=True, summary_type=None, summary_duration=None): url = self.extract_link_with_key(item, data_type) @@ -404,7 +414,8 @@ def get_link_from_item(self, item, data_type, start_date, end_date, interval=Non headers = self.get_requests_headers() params = build_requests_params( start_time=start_date, end_time=end_date, interval=interval, - sync_time=sync_time, sync_time_boundary_type=boundary_type, search_full_hierarchy=search_full_hierarchy, + sync_time=sync_time, sync_time_boundary_type=boundary_type, record_boundary_type=record_boundary_type, + search_full_hierarchy=search_full_hierarchy, max_count=max_count, summary_type=summary_type, summary_duration=summary_duration ) json_response = self.get( @@ -415,13 +426,13 @@ def get_link_from_item(self, item, data_type, start_date, end_date, interval=Non ) return json_response - def get_rows_from_url(self, url=None, start_date=None, end_date=None, interval=None, sync_time=None, max_count=None): + def get_rows_from_url(self, url=None, start_date=None, end_date=None, interval=None, sync_time=None, boundary_type=None, max_count=None): pagination = OffsetPagination() has_more = True while has_more: json_response, has_more = pagination.get_offset_paginated( self.get_link_from_url, - url, start_date=start_date, end_date=end_date, interval=interval, sync_time=sync_time, max_count=max_count + url, start_date=start_date, end_date=end_date, interval=interval, sync_time=sync_time, boundary_type=boundary_type, max_count=max_count ) items = json_response.get(OSIsoftConstants.API_ITEM_KEY, [json_response]) for item in items: @@ -432,15 +443,18 @@ def get_rows_from_url(self, url=None, start_date=None, end_date=None, interval=N else: yield item - def get_rows_from_urls(self, links=None, data_type=None, start_date=None, end_date=None, interval=None, sync_time=None, max_count=None): + def get_rows_from_urls(self, links=None, data_type=None, start_date=None, end_date=None, interval=None, sync_time=None, boundary_type=None, max_count=None): links = links or [] for link in links: url = link - rows = self.get_rows_from_url(url, start_date=start_date, end_date=end_date, interval=interval, sync_time=sync_time, max_count=max_count) + rows = self.get_rows_from_url( + url, start_date=start_date, end_date=end_date, interval=interval, + sync_time=sync_time, boundary_type=boundary_type, max_count=max_count + ) for row in rows: yield row - def get_link_from_url(self, url, start_date=None, end_date=None, interval=None, sync_time=None, start_index=None, max_count=None): + def get_link_from_url(self, url, start_date=None, end_date=None, interval=None, sync_time=None, start_index=None, boundary_type=None, max_count=None): if not url: url = self.endpoint.get_base_url() headers = self.get_requests_headers() @@ -450,6 +464,7 @@ def get_link_from_url(self, url, start_date=None, end_date=None, interval=None, interval=interval, sync_time=sync_time, start_index=start_index, + sync_time_boundary_type=boundary_type, max_count=max_count ) json_response = self.get( @@ -625,7 +640,7 @@ def get_requests_headers(self): } def get_requests_params(self, start_date=None, end_date=None, interval=None, sync_time=None, - boundary_type=None, selected_fields=None, search_full_hierarchy=None, + boundary_type=None, record_boundary_type=None, selected_fields=None, search_full_hierarchy=None, max_count=None, summary_type=None, summary_duration=None): params = {} if start_date: @@ -638,6 +653,8 @@ def get_requests_params(self, start_date=None, end_date=None, interval=None, syn params.update({"syncTime": sync_time}) if boundary_type: params.update({"syncTimeBoundaryType": boundary_type}) + if record_boundary_type: + params.update({"boundaryType": record_boundary_type}) if selected_fields: params.update({"selectedFields": selected_fields}) if search_full_hierarchy: diff --git a/python-lib/osisoft_constants.py b/python-lib/osisoft_constants.py index 3266ce4..4957a01 100644 --- a/python-lib/osisoft_constants.py +++ b/python-lib/osisoft_constants.py @@ -403,7 +403,7 @@ class OSIsoftConstants(object): "Security": "{base_url}/eventframes/{webid}/security", "SecurityEntries": "{base_url}/eventframes/{webid}/securityentries" } - PLUGIN_VERSION = "1.2.3" + PLUGIN_VERSION = "1.2.4-beta.5" VALUE_COLUMN_SUFFIX = "_val" WEB_API_PATH = "piwebapi" WRITE_HEADERS = {'X-Requested-With': 'XmlHttpRequest'} diff --git a/python-lib/osisoft_plugin_common.py b/python-lib/osisoft_plugin_common.py index aae0ac1..1089b34 100644 --- a/python-lib/osisoft_plugin_common.py +++ b/python-lib/osisoft_plugin_common.py @@ -133,6 +133,7 @@ def build_requests_params(**kwargs): "interval": "interval", "sync_time": "syncTime", "sync_time_boundary_type": "syncTimeBoundaryType", + "record_boundary_type": "boundaryType", "name_filter": "nameFilter", "category_name": "categoryName", "template_name": "templateName", @@ -263,7 +264,7 @@ def format_output(input_row, reference_row=None, is_enumeration_value=False): type_column = None if "Value" in output_row and isinstance(output_row.get("Value"), dict): type_column = output_row.get("Type") - output_row = output_row.get("Value") + output_row.update(output_row.get("Value")) output_row.pop("Good", None) output_row.pop("Questionable", None) output_row.pop("Substituted", None) @@ -448,7 +449,7 @@ def fields_selector(data_type): if data_type in ["Value", "EndValue"]: return "Links%3BTimestamp%3BValue%3BType%3BUnitsAbbreviation" else: - return "Links%3BItems.Timestamp%3BItems.Value%3BItems.Type" + return "Links%3BItems.Timestamp%3BItems.Value%3BItems.Type%3BItems.Value.Value" def get_next_page_url(json): @@ -482,7 +483,7 @@ class PerformanceTimer(): - adds up all start / stop intervals - count the number of intervals - compute the average event time - - provides a lists of the NUMBER_OF_SLOWEST_EVENTS_KEPT longest events by event id, for instance url + - provides a lists of the NUMBER_OF_SLOWEST_EVENTS_KEPT longest events by event id, for instance url """ NUMBER_OF_SLOWEST_EVENTS_KEPT = 5