diff --git a/custom_components/optispark/api.py b/custom_components/optispark/api.py index 8852e1b..2c72b86 100644 --- a/custom_components/optispark/api.py +++ b/custom_components/optispark/api.py @@ -92,14 +92,15 @@ def __init__( """Sample API Client.""" self._session = session - def extra_to_datetime(self, extra): - """Corvert unix_time to datetime object.""" - if 'oldest_dates' in extra and 'newest_dates' in extra: - for key in ['oldest_dates', 'newest_dates']: - for column in extra[key]: - if extra[key][column] is not None: - extra[key][column] = datetime.fromtimestamp(extra[key][column]) - return extra + #def extra_to_datetime(self, extra): + # """Corvert unix_time to datetime object.""" + # if 'oldest_dates' in extra and 'newest_dates' in extra: + # for key in ['oldest_dates', 'newest_dates']: + # for column in extra[key]: + # if extra[key][column] is not None: + # breakpoint() + # extra[key][column] = datetime.fromtimestamp(extra[key][column]) + # return extra async def upload_history(self, dynamo_data): """Upload historical data to dynamoDB without calculating heat pump profile.""" @@ -111,7 +112,7 @@ async def upload_history(self, dynamo_data): url=lambda_url, data=payload, ) - extra = self.extra_to_datetime(extra) + #extra = self.extra_to_datetime(extra) return extra['oldest_dates'], extra['newest_dates'] async def get_data_dates(self, dynamo_data: dict): @@ -127,7 +128,7 @@ async def get_data_dates(self, dynamo_data: dict): url=lambda_url, data=payload, ) - extra = self.extra_to_datetime(extra) + #extra = self.extra_to_datetime(extra) return extra['oldest_dates'], extra['newest_dates'] async def async_get_profile(self, lambda_args: dict): @@ -152,6 +153,23 @@ async def async_get_profile(self, lambda_args: dict): results['projected_percent_savings'] = results['base_cost']/results['optimised_cost']*100 - 100 return results + def json_serialisable(self, data): + """Convert to compressed bytes so that data can be converted to json.""" + uncompressed_data = pickle.dumps(data) + compressed_data = gzip.compress(uncompressed_data) + LOGGER.debug(f'len(uncompressed_data): {len(uncompressed_data)}') + LOGGER.debug(f'len(compressed_data): {len(compressed_data)}') + base64_string = base64.b64encode(compressed_data).decode('utf-8') + return base64_string + + def json_deserialise(self, payload): + """Convert from the compressed bytes to original objects.""" + payload = payload['serialised_payload'] + payload = base64.b64decode(payload) + payload = gzip.decompress(payload) + payload = pickle.loads(payload) + return payload + async def _api_wrapper( self, method: str, @@ -162,17 +180,13 @@ async def _api_wrapper( try: if 'dynamo_data' in data: data['dynamo_data'] = floats_to_decimal(data['dynamo_data']) - uncompressed_data = pickle.dumps(data) - compressed_data = gzip.compress(uncompressed_data) - LOGGER.debug(f'len(uncompressed_data): {len(uncompressed_data)}') - LOGGER.debug(f'len(compressed_data): {len(compressed_data)}') - base64_string = base64.b64encode(compressed_data).decode('utf-8') + data = self.json_serialisable(data) async with async_timeout.timeout(40): response = await self._session.request( method=method, url=url, - json=base64_string, + json=data, ) if response.status in (401, 403): raise OptisparkApiClientAuthenticationError( @@ -184,7 +198,8 @@ async def _api_wrapper( raise OptisparkApiClientCommunicationError( '502 Bad Gateway - check payload') response.raise_for_status() - return await response.json() + payload = await response.json() + return self.json_deserialise(payload) except asyncio.TimeoutError as exception: LOGGER.error(traceback.format_exc()) diff --git a/custom_components/optispark/const.py b/custom_components/optispark/const.py index 2fe440f..a000448 100644 --- a/custom_components/optispark/const.py +++ b/custom_components/optispark/const.py @@ -5,7 +5,7 @@ NAME = "Optispark" DOMAIN = "optispark" -VERSION = "0.1.8" +VERSION = "0.1.9" ATTRIBUTION = "Data provided by http://jsonplaceholder.typicode.com/" LAMBDA_TEMP = 'temps' @@ -24,7 +24,7 @@ LAMBDA_INITIAL_INTERNAL_TEMP = 'initial_internal_temp' HISTORY_DAYS = 28 # the number of days initially required by our algorithm -DYNAMO_HISTORY_DAYS = 365*5 +DYNAMO_HISTORY_DAYS = 365*2 MAX_UPLOAD_HISTORY_READINGS = 5000 DATABASE_COLUMN_SENSOR_HEAT_PUMP_POWER = 'heat_pump_power' DATABASE_COLUMN_SENSOR_EXTERNAL_TEMPERATURE = 'external_temperature' diff --git a/custom_components/optispark/coordinator.py b/custom_components/optispark/coordinator.py index 5b3ca46..6512cd3 100644 --- a/custom_components/optispark/coordinator.py +++ b/custom_components/optispark/coordinator.py @@ -351,7 +351,7 @@ def get_missing_new_histories_states(self, history_states, column): return history_states[idx_bound+1:] async def upload_new_history(self): - """Upload section of new history states that are older than anything in dynamo. + """Upload section of new history states that are newer than anything in dynamo. self.dynamo_dates is updated so that if this function is called again a new section will be uploaded. diff --git a/custom_components/optispark/manifest.json b/custom_components/optispark/manifest.json index 7c81e27..b864ab1 100644 --- a/custom_components/optispark/manifest.json +++ b/custom_components/optispark/manifest.json @@ -11,5 +11,5 @@ "iot_class": "cloud_polling", "issue_tracker": "https://github.com/Big-Tree/HomeAssistant-OptiSpark/issues", "requirements": ["geopy==2.4.1"], - "version": "0.1.8" + "version": "0.1.9" }