Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion custom-recipes/pi-system-retrieve-list/recipe.json
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
"name": "use_batch_mode",
"label": "Use batch mode",
"type": "BOOLEAN",
"description": "",
"description": "Use to quickly retrieve small samples from multiple paths. ⚠️Not for large time ranges",
"visibilityCondition": "model.show_advanced_parameters==true",
"defaultValue": false
},
Expand Down
22 changes: 22 additions & 0 deletions parameter-sets/basic-auth/parameter-set.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,28 @@
"description": "(optional)",
"defaultValue": ""
},
{
"name": "max_request_size",
"label": "Maximum request size",
"type": "INT",
"description": "",
"defaultValue": 1000
},
{
"name": "estimated_density",
"label": "Estimated point density",
"type": "DOUBLE",
"description": "points/hour",
"defaultValue": 2
},
{
"name": "maximum_points_returned",
"label": "Maximum points return",
"type": "INT",
"description": "Target optimum number of points returned by batch. Calculated based on point density.",
"defaultValue": 1000000,
"minI": 1
},
{
"name": "osisoft_basic",
"type": "CREDENTIAL_REQUEST",
Expand Down
37 changes: 37 additions & 0 deletions python-lib/osisoft_plugin_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,21 @@ def get_credentials(config, can_raise=True):
return auth_type, username, password, server_url, is_ssl_check_disabled, error_message


def get_batch_parameters(config):
credentials = config.get("credentials", {})
max_request_size = credentials.get("max_request_size", 1000)
estimated_density = credentials.get("estimated_density", 6000)
maximum_points_returned = credentials.get("maximum_points_returned", 1000000)
return max_request_size, estimated_density, maximum_points_returned


def compute_time_spent(start, end, bla):
# 2023-06-30T13:05:10.8692786Z->2024-06-30T13:05:10.9640942Z
start = iso_to_epoch(start)
end = iso_to_epoch(end)
return end - start


def get_advanced_parameters(config):
show_advanced_parameters = config.get('show_advanced_parameters', False)
batch_size = 500
Expand Down Expand Up @@ -600,3 +615,25 @@ def get_worst_performers(self):
for slowest_event, slowest_time in zip(self.slowest_events, self.slowest_times):
worst_performers.append("{}: {}s".format(slowest_event, slowest_time))
return worst_performers


class BatchTimeCounter(object):
def __init__(self, max_time_to_retrieve_per_batch):
self.max_time_to_retrieve_per_batch = max_time_to_retrieve_per_batch * 60 * 60
self.total_batch_time = 0
# 2 points /h each line
# max 1 000 000 lines back -> 500k hours max

def is_batch_full(self):
# return False
if self.max_time_to_retrieve_per_batch < 0:
return False
if self.total_batch_time > self.max_time_to_retrieve_per_batch:
logger.warning("batch contains {}s of request, needs to flush now".format(self.total_batch_time))
self.total_batch_time = 0
return True
logger.info("Batch below time threshold")
return False

def add(self, start_time, end_time, interval):
self.total_batch_time += compute_time_spent(start_time, end_time, interval)