Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
5099323
activate start time column
Dec 5, 2024
b6e0d7b
Adding boundary type selector to recorded data type
alexbourret Dec 12, 2024
eee84cb
Adding boundary selector to connector
alexbourret Dec 12, 2024
20adf06
v1.2.4
alexbourret Dec 12, 2024
81197c1
Updating changelog
alexbourret Dec 12, 2024
07b7a1f
beta marker
alexbourret Dec 12, 2024
9d59cda
v1.2.4
alexbourret Dec 12, 2024
edd18f9
add Items.Value.Value to fields selector
alexbourret Dec 20, 2024
7cb64f6
beta 2 marker
alexbourret Dec 20, 2024
ca55b61
update changelog
alexbourret Dec 20, 2024
11c8779
fix recursive_get_rows_from_webid
Jan 2, 2025
027203c
Add option to copy input rows into recipe output [sc-227010]
alexbourret Jan 31, 2025
c026bf3
update changelog
alexbourret Jan 31, 2025
601fb6f
beta 2
alexbourret Jan 31, 2025
0d0cae2
Merge branch 'bug/sc-220666-some-value-not-available' into feature/sc…
alexbourret Feb 4, 2025
4b72df5
beta 3
alexbourret Feb 4, 2025
9d05710
Merge pull request #60 from jerometerrier/6-fix_recursive_get_rows_fr…
alexbourret Feb 4, 2025
e44bcba
update changelog
alexbourret Feb 4, 2025
2117443
Merge pull request #54 from jerometerrier/2-activate-use-start-time-c…
alexbourret Feb 4, 2025
d8e072e
beta 4
alexbourret Feb 4, 2025
ae29b86
update changelog
alexbourret Feb 4, 2025
9c482c3
Add interpolated value for EF recipe and dataset
alexbourret Feb 4, 2025
2fa074e
Fix for [sc-229162]
alexbourret Feb 13, 2025
ecb01ed
beta 5
alexbourret Feb 13, 2025
aa04327
Recursive get rows webid [sc-229599]
MayeulRousselet Feb 17, 2025
9aa6f6d
Activate start time col [sc-229601]
MayeulRousselet Feb 17, 2025
74e6328
add interpolate boundary type for record data type
alexbourret Feb 19, 2025
f8d9384
UI update
alexbourret Feb 24, 2025
76a62b9
add warning of possible column overwritting
alexbourret Mar 7, 2025
5c06172
removing the lonely '
alexbourret Mar 7, 2025
31fc7b6
fix boundary type for ef connector (selected ef)
alexbourret Mar 11, 2025
df49ec4
Merge pull request #66 from dataiku/bug/sc-229162-missing-timestamps
alexbourret Mar 11, 2025
0750f9d
Merge pull request #65 from dataiku/feature/sc-227482-interpolated-va…
alexbourret Mar 11, 2025
7c19265
Merge branch 'test/recursive_get_rows_webid' into test/activate-start…
alexbourret Mar 11, 2025
b6fd464
Merge pull request #64 from dataiku/test/activate-start-time-col
alexbourret Mar 11, 2025
7d46303
Merge pull request #63 from dataiku/test/recursive_get_rows_webid
alexbourret Mar 11, 2025
26a213e
Merge pull request #62 from dataiku/feature/sc-227010-option-to-copy-…
alexbourret Mar 11, 2025
9afa9ea
Merge pull request #58 from dataiku/bug/sc-220666-some-value-not-avai…
alexbourret Mar 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Changelog

## [Version 1.2.4](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.4) - Feature and bug release - 2025-02-18

- Add boundary type selector to recorded data type
- Add boundary type selector to attribute search connector
- *Assets values downloader* Add option to copy each input row into output dataset
- Fix issue with recorded data type
- *Assets values downloader* Time range pagination when using webids
- *Assets values downloader* Time can be selected from a column of the input dataset

## [Version 1.2.3](https://github.com/dataiku/dss-plugin-pi-server/releases/tag/v1.2.3) - Feature release - 2024-09-26

- Add summaryDuration input (duration of each summary interval)
Expand Down
39 changes: 20 additions & 19 deletions custom-recipes/pi-system-retrieve-event-frames/recipe.json
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@
{
"visibilityCondition": "model.use_start_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"name": "start_time_column",
"label": "Start times' column",
"label": "Start times column",
"description": "Optional",
"type": "COLUMN",
"columnRole": "input_dataset"
Expand All @@ -167,7 +167,7 @@
{
"visibilityCondition": "model.use_end_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"name": "end_time_column",
"label": "End times' column",
"label": "End times column",
"description": "Optional",
"type": "COLUMN",
"columnRole": "input_dataset"
Expand All @@ -180,6 +180,24 @@
"type": "STRING",
"defaultValue": ""
},
{
"type": "SEPARATOR",
"description": "Data to retrieve"
},
{
"name": "data_type",
"label": "Data type",
"type": "SELECT",
"selectChoices":[
{"value": "InterpolatedData", "label": "Interpolated"},
{"value": "PlotData", "label": "Plot"},
{"value": "RecordedData", "label": "Recorded"},
{"value": "SummaryData", "label": "SummaryData"},
{"value": "Value", "label": "Value"},
{"value": "EndValue", "label": "EndValue"}
],
"defaultValue": "SummaryData"
},
{
"type": "SEPARATOR",
"description": "Interpolation",
Expand Down Expand Up @@ -212,23 +230,6 @@
],
"defaultValue": "Inside"
},
{
"type": "SEPARATOR",
"description": "Data to retrieve"
},
{
"name": "data_type",
"label": "Data type",
"type": "SELECT",
"selectChoices":[
{"value": "PlotData", "label": "Plot"},
{"value": "RecordedData", "label": "Recorded"},
{"value": "SummaryData", "label": "SummaryData"},
{"value": "Value", "label": "Value"},
{"value": "EndValue", "label": "EndValue"}
],
"defaultValue": "SummaryData"
},
{
"name": "summary_type",
"label": "Summary type",
Expand Down
36 changes: 27 additions & 9 deletions custom-recipes/pi-system-retrieve-list/recipe.json
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@
"type": "COLUMN",
"columnRole": "input_dataset"
},
{
"name": "do_duplicate_input_row",
"label": "Copy other input columns",
"description": "(those with matching names will be overwritten by PI system data)",
"type": "BOOLEAN"
},
{
"type": "SEPARATOR",
"description": "Time",
Expand All @@ -125,16 +131,16 @@
{
"name": "use_start_time_column",
"label": "Use start time value per row",
"description": "Description here",
"visibilityCondition": "false && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"description": "",
"visibilityCondition": "['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type)",
"type": "BOOLEAN",
"defaultValue": false
},
{
"visibilityCondition": "model.use_start_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"name": "start_time_column",
"label": "Start times' column",
"description": "Optional",
"label": "Start times column",
"description": "Column type must be string",
"type": "COLUMN",
"columnRole": "input_dataset"
},
Expand All @@ -148,17 +154,17 @@
},
{
"name": "use_end_time_column",
"label": "Use start time value per row",
"label": "Use end time value per row",
"description": "",
"visibilityCondition": "false && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"visibilityCondition": "['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type)",
"type": "BOOLEAN",
"defaultValue": false
},
{
"visibilityCondition": "model.use_end_time_column==true && (['InterpolatedData', 'RecordedData', 'PlotData', 'EventFrames'].includes(model.data_type))",
"name": "end_time_column",
"label": "End times' column",
"description": "Optional",
"label": "End times column",
"description": "Column type must be string",
"type": "COLUMN",
"columnRole": "input_dataset"
},
Expand Down Expand Up @@ -194,10 +200,22 @@
{
"name": "boundary_type",
"label": "Boundary type",
"visibilityCondition": "['InterpolatedData'].includes(model.data_type)",
"visibilityCondition": "((model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type))",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
},
{
"name": "record_boundary_type",
"label": "Boundary type",
"visibilityCondition": "((model.must_retrieve_metrics) && ['RecordedData'].includes(model.data_type))",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Interpolated", "label": "Interpolated"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
Expand Down
20 changes: 15 additions & 5 deletions custom-recipes/pi-system-retrieve-list/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@
end_time_column = config.get("end_time_column")
server_url_column = config.get("server_url_column")
interval, sync_time, boundary_type = get_interpolated_parameters(config)
record_boundary_type = config.get("record_boundary_type") if data_type == "RecordedData" else None
summary_type, summary_duration = get_summary_parameters(config)
do_duplicate_input_row = config.get("do_duplicate_input_row", False)

network_timer = PerformanceTimer()
processing_timer = PerformanceTimer()
Expand All @@ -63,13 +65,19 @@
client = None
previous_server_url = ""
time_not_parsed = True

input_columns = list(input_parameters_dataframe.columns) if do_duplicate_input_row else []

with output_dataset.get_writer() as writer:
first_dataframe = True
for index, input_parameters_row in input_parameters_dataframe.iterrows():
server_url = input_parameters_row.get(server_url_column, server_url) if use_server_url_column else server_url
start_time = input_parameters_row.get(start_time_column, start_time) if use_start_time_column else start_time
end_time = input_parameters_row.get(end_time_column, end_time) if use_end_time_column else end_time
row_name = input_parameters_row.get("Name")
duplicate_initial_row = {}
for input_column in input_columns:
duplicate_initial_row[input_column] = input_parameters_row.get(input_column)

if client is None or previous_server_url != server_url:
client = OSIsoftClient(
Expand All @@ -82,10 +90,8 @@
# make sure all OSIsoft time string format are evaluated at the same time
# rather than at every request, at least for start / end times set in the UI
time_not_parsed = False
if not use_start_time_column:
start_time = client.parse_pi_time(start_time)
if not use_end_time_column:
end_time = client.parse_pi_time(end_time)
start_time = client.parse_pi_time(start_time)
end_time = client.parse_pi_time(end_time)
sync_time = client.parse_pi_time(sync_time)

object_id = input_parameters_row.get(path_column)
Expand All @@ -102,21 +108,23 @@
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
max_count=max_count,
can_raise=False,
object_id=object_id,
summary_type=summary_type,
summary_duration=summary_duration
)
else:
rows = client.get_rows_from_webid(
rows = client.recursive_get_rows_from_webid(
object_id,
data_type,
start_date=start_time,
end_date=end_time,
interval=interval,
sync_time=sync_time,
boundary_type=boundary_type,
record_boundary_type=record_boundary_type,
max_count=max_count,
can_raise=False,
endpoint_type="AF",
Expand All @@ -134,6 +142,8 @@
results.extend(extention)
else:
base = get_base_for_data_type(data_type, object_id)
if duplicate_initial_row:
base.update(duplicate_initial_row)
base.update(row)
extention = client.unnest_row(base)
results.extend(extention)
Expand Down
2 changes: 1 addition & 1 deletion plugin.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"id": "pi-system",
"version": "1.2.3",
"version": "1.2.4",
"meta": {
"label": "PI System",
"description": "Retrieve data from your OSIsoft PI System servers",
Expand Down
23 changes: 23 additions & 0 deletions python-connectors/pi-system_attribute-search/connector.json
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,29 @@
],
"visibilityCondition": "((model.must_retrieve_metrics) && (model.data_type == 'SummaryData'))"
},
{
"name": "boundary_type",
"label": "Boundary type",
"visibilityCondition": "((model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type))",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
},
{
"name": "record_boundary_type",
"label": "Boundary type",
"visibilityCondition": "((model.must_retrieve_metrics) && ['RecordedData'].includes(model.data_type))",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Interpolated", "label": "Interpolated"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
},
{
"name": "summary_duration",
"label": "Summary duration",
Expand Down
14 changes: 8 additions & 6 deletions python-connectors/pi-system_attribute-search/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from osisoft_plugin_common import (
PISystemConnectorError, RecordsLimit, get_credentials, assert_time_format,
remove_unwanted_columns, format_output, filter_columns_from_schema, is_child_attribute_path,
check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector
check_debug_mode, PerformanceTimer, get_max_count, get_summary_parameters, fields_selector,
get_interpolated_parameters
)
from osisoft_constants import OSIsoftConstants

Expand Down Expand Up @@ -36,9 +37,7 @@ def __init__(self, config, plugin_config):
self.start_time = self.client.parse_pi_time(self.start_time)
self.end_time = config.get("end_time")
self.end_time = self.client.parse_pi_time(self.end_time)
is_interpolated_data = config.get("data_type", "").endswith("InterpolatedData")
self.interval = config.get("interval") if is_interpolated_data else None
self.sync_time = config.get("sync_time") if is_interpolated_data else None
self.interval, self.sync_time, self.boundary_type = get_interpolated_parameters(config)
self.sync_time = self.client.parse_pi_time(self.sync_time)
assert_time_format(self.start_time, error_source="start time")
assert_time_format(self.end_time, error_source="end time")
Expand All @@ -57,6 +56,8 @@ def __init__(self, config, plugin_config):
self.config = config
self.summary_type, self.summary_duration = get_summary_parameters(config)

self.record_boundary_type = config.get("record_boundary_type") if self.data_type == "RecordedData" else None

def extract_database_webid(self, database_endpoint):
return database_endpoint.split("/")[-1]

Expand Down Expand Up @@ -114,8 +115,9 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None,
selected_fields=fields_selector(self.data_type),
max_count=self.max_count,
summary_type=self.summary_type,
summary_duration=self.summary_duration
# boundary_type=self.boundary_type
summary_duration=self.summary_duration,
boundary_type=self.boundary_type,
record_boundary_type=self.record_boundary_type
):
if limit.is_reached():
return
Expand Down
32 changes: 32 additions & 0 deletions python-connectors/pi-system_event-frames-search/connector.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,38 @@
],
"mandatory": true
},
{
"type": "SEPARATOR",
"description": "Interpolation",
"visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)"
},
{
"name": "interval",
"label": "Interval",
"visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)",
"description": "Optional",
"type": "STRING",
"defaultValue": ""
},
{
"name": "sync_time",
"label": "Sync time",
"visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)",
"description": "Optional",
"type": "STRING",
"defaultValue": ""
},
{
"name": "boundary_type",
"label": "Boundary type",
"visibilityCondition": "(model.must_retrieve_metrics) && ['InterpolatedData'].includes(model.data_type)",
"type": "SELECT",
"selectChoices":[
{"value": "Inside", "label": "Inside"},
{"value": "Outside", "label": "Outside"}
],
"defaultValue": "Inside"
},
{
"name": "summary_type",
"label": "Summary type",
Expand Down
Loading