Skip to content

Commit

Permalink
Bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
Big-Tree committed Feb 15, 2024
1 parent 9e91f63 commit a42c172
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 21 deletions.
49 changes: 32 additions & 17 deletions custom_components/optispark/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,15 @@ def __init__(
"""Sample API Client."""
self._session = session

def extra_to_datetime(self, extra):
"""Corvert unix_time to datetime object."""
if 'oldest_dates' in extra and 'newest_dates' in extra:
for key in ['oldest_dates', 'newest_dates']:
for column in extra[key]:
if extra[key][column] is not None:
extra[key][column] = datetime.fromtimestamp(extra[key][column])
return extra
#def extra_to_datetime(self, extra):
# """Corvert unix_time to datetime object."""
# if 'oldest_dates' in extra and 'newest_dates' in extra:
# for key in ['oldest_dates', 'newest_dates']:
# for column in extra[key]:
# if extra[key][column] is not None:
# breakpoint()
# extra[key][column] = datetime.fromtimestamp(extra[key][column])
# return extra

async def upload_history(self, dynamo_data):
"""Upload historical data to dynamoDB without calculating heat pump profile."""
Expand All @@ -111,7 +112,7 @@ async def upload_history(self, dynamo_data):
url=lambda_url,
data=payload,
)
extra = self.extra_to_datetime(extra)
#extra = self.extra_to_datetime(extra)
return extra['oldest_dates'], extra['newest_dates']

async def get_data_dates(self, dynamo_data: dict):
Expand All @@ -127,7 +128,7 @@ async def get_data_dates(self, dynamo_data: dict):
url=lambda_url,
data=payload,
)
extra = self.extra_to_datetime(extra)
#extra = self.extra_to_datetime(extra)
return extra['oldest_dates'], extra['newest_dates']

async def async_get_profile(self, lambda_args: dict):
Expand All @@ -152,6 +153,23 @@ async def async_get_profile(self, lambda_args: dict):
results['projected_percent_savings'] = results['base_cost']/results['optimised_cost']*100 - 100
return results

def json_serialisable(self, data):
"""Convert to compressed bytes so that data can be converted to json."""
uncompressed_data = pickle.dumps(data)
compressed_data = gzip.compress(uncompressed_data)
LOGGER.debug(f'len(uncompressed_data): {len(uncompressed_data)}')
LOGGER.debug(f'len(compressed_data): {len(compressed_data)}')
base64_string = base64.b64encode(compressed_data).decode('utf-8')
return base64_string

def json_deserialise(self, payload):
"""Convert from the compressed bytes to original objects."""
payload = payload['serialised_payload']
payload = base64.b64decode(payload)
payload = gzip.decompress(payload)
payload = pickle.loads(payload)
return payload

async def _api_wrapper(
self,
method: str,
Expand All @@ -162,17 +180,13 @@ async def _api_wrapper(
try:
if 'dynamo_data' in data:
data['dynamo_data'] = floats_to_decimal(data['dynamo_data'])
uncompressed_data = pickle.dumps(data)
compressed_data = gzip.compress(uncompressed_data)
LOGGER.debug(f'len(uncompressed_data): {len(uncompressed_data)}')
LOGGER.debug(f'len(compressed_data): {len(compressed_data)}')
base64_string = base64.b64encode(compressed_data).decode('utf-8')
data = self.json_serialisable(data)

async with async_timeout.timeout(40):
response = await self._session.request(
method=method,
url=url,
json=base64_string,
json=data,
)
if response.status in (401, 403):
raise OptisparkApiClientAuthenticationError(
Expand All @@ -184,7 +198,8 @@ async def _api_wrapper(
raise OptisparkApiClientCommunicationError(
'502 Bad Gateway - check payload')
response.raise_for_status()
return await response.json()
payload = await response.json()
return self.json_deserialise(payload)

except asyncio.TimeoutError as exception:
LOGGER.error(traceback.format_exc())
Expand Down
4 changes: 2 additions & 2 deletions custom_components/optispark/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

NAME = "Optispark"
DOMAIN = "optispark"
VERSION = "0.1.8"
VERSION = "0.1.9"
ATTRIBUTION = "Data provided by http://jsonplaceholder.typicode.com/"

LAMBDA_TEMP = 'temps'
Expand All @@ -24,7 +24,7 @@
LAMBDA_INITIAL_INTERNAL_TEMP = 'initial_internal_temp'

HISTORY_DAYS = 28 # the number of days initially required by our algorithm
DYNAMO_HISTORY_DAYS = 365*5
DYNAMO_HISTORY_DAYS = 365*2
MAX_UPLOAD_HISTORY_READINGS = 5000
DATABASE_COLUMN_SENSOR_HEAT_PUMP_POWER = 'heat_pump_power'
DATABASE_COLUMN_SENSOR_EXTERNAL_TEMPERATURE = 'external_temperature'
Expand Down
2 changes: 1 addition & 1 deletion custom_components/optispark/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def get_missing_new_histories_states(self, history_states, column):
return history_states[idx_bound+1:]

async def upload_new_history(self):
"""Upload section of new history states that are older than anything in dynamo.
"""Upload section of new history states that are newer than anything in dynamo.
self.dynamo_dates is updated so that if this function is called again a new section will be
uploaded.
Expand Down
2 changes: 1 addition & 1 deletion custom_components/optispark/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@
"iot_class": "cloud_polling",
"issue_tracker": "https://github.com/Big-Tree/HomeAssistant-OptiSpark/issues",
"requirements": ["geopy==2.4.1"],
"version": "0.1.8"
"version": "0.1.9"
}

0 comments on commit a42c172

Please sign in to comment.