diff --git a/.github/workflows/build_push_docker.yml b/.github/workflows/build_push_docker.yml index 520ab3b3..518be88b 100755 --- a/.github/workflows/build_push_docker.yml +++ b/.github/workflows/build_push_docker.yml @@ -69,7 +69,7 @@ jobs: m4dm4rtig4n/enedisgateway2mqtt:${{ steps.vars.outputs.version }} - name: Discord notification - if: steps.check-tag.outputs.dev == 'false' +# if: steps.check-tag.outputs.dev == 'false' env: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} # DISCORD_EMBEDS: {color: 3447003, author: { name: client.user.username, icon_url: client.user.displayAvatarURL() }, title: "EnedisGateway2MQTT new version available => ${{ steps.vars.outputs.version }}", url: "https://hub.docker.com/r/m4dm4rtig4n/enedisgateway2mqtt", fields: [{ name: "Github", value: "https://github.com/m4dm4rtig4n/enedisgateway2mqtt"}, {name: "Docker.io", value: "https://hub.docker.com/r/m4dm4rtig4n/enedisgateway2mqtt"}], timestamp: new Date(), footer: {icon_url: client.user.displayAvatarURL(), text: "© m4dm4rtig4n"}} diff --git a/README.md b/README.md index 98bba0d8..a44975d2 100755 --- a/README.md +++ b/README.md @@ -39,6 +39,12 @@ and curl test command. The easiest way is to use Firefox in the consent process** +## EnedisGateway2MQTT limit + +In order to avoid saturation of Enedis Gateway services, the number of API calls is limited to 15 per day. +Most of the information will be collected during the first launch. +You will just need a few days to report all "detailed" consumption over 2 years (about 1 week) + ## Enedis Gateway limit Enedis Gateway limit to 50 call per day / per pdl. @@ -50,13 +56,15 @@ If you reach this limit, you will be banned for 24 hours! | Parameters | Call number | |:---------------|:---------------:| | GET_CONSUMPTION | 3 | +| GET_CONSUMPTION_DETAIL | 105 | | GET_PRODUCTION | 3 | +| GET_PRODUCTION_DETAIL | 105 | | ADDRESSES | 1 | +| CONTRACT | 1 | See chapter [persistance](#persistance), to reduce API call number. - ## Environment variable | Variable | Information | Mandatory/Default | @@ -72,23 +80,47 @@ See chapter [persistance](#persistance), to reduce API call number. | RETAIN | Retain data in MQTT | False | | QOS | Quality Of Service MQTT | 0 | | GET_CONSUMPTION | Enable API call to get your consumption | True | +| GET_CONSUMPTION_DETAIL | Enable API call to get your consumption in detail mode | True | | GET_PRODUCTION | Enable API call to get your production | False | +| GET_PRODUCTION_DETAIL | Enable API call to get your production in detail mode | False | | HA_AUTODISCOVERY | Enable auto-discovery | False | | HA_AUTODISCOVERY_PREFIX | Home Assistant auto discovery prefix | homeassistant | -| BASE_PRICE | Price of kWh in base plan | 0 | -| CYCLE | Data refresh cycle (3600s minimum) | 3600 | +| OFFPEAK_HOURS | Force HP/HC format : "HHhMM-HHhMM;HHhMM-HHhMM;..." | "" | +| CONSUMPTION_PRICE_BASE | Price of kWh in base plan | 0 | +| CONSUMPTION_PRICE_HC | Price of HC kWh | 0 | +| CONSUMPTION_PRICE_HP | Price of HP kWh | 0 | +| CYCLE | Data refresh cycle (1h minimum) | 3600 | | ADDRESSES | Get all addresses information | False | +| REFRESH_CONTRACT | Refresh contract data | False | +| REFRESH_ADDRESSES | Refresh addresses data | False | +| WIPE_CACHE | Force refresh all data (wipe all cached data) | False | +| DEBUG | Display debug information | False | -*Why is there no calculation for the HC / HP ?* +## Cache -The HC / HP calculations require a lot of API calls and the limit will be reached very quickly +Since v0.3, Enedis Gateway use SQLite database to store all data and reduce API call number. +> **Don't forget to mount /data to keep database persistance !!** -> Need database => Roadmap +If you change your contract, plan it is necessary to do a reset "**REFRESH_CONTRACT**" to "**True**" -## Persistance +if you move, it is necessary to make a "**REFRESH_ADDRESSES**" to "**True**" -Since v0.3, Enedis Gateway use SQLite database to store all data and reduce API call number. -Don't forget to mount /data to keep database persistance !! +If you want force refresh all data you can set environment variable "**WIPE_CACHE**" to "**True**". + +**WARNING, This parameters wipe all data (addresses, contracts, consumption, production) and generate lot of API Call (don't forget [Enedis Gateway limit](#Enedis Gateway limit))** + +> It doesn't forget that it takes several days to recover consumption/production in detail mode. + +## Consumption BASE vs HP/HC + +Even if you are on a basic plan (and not HP / HC), it is interesting to enter the prices of each plan. +The tool will do calculation for you and tell you which plan is the most advantageous for you based on your consumption. + +### Blacklist + +Sometimes there are holes in the Enedis consumption records. So I set up a blacklist system for certain dates. + +If date does not return information after 7 try (7 x CYCLE), I blacklist this date and will no longer generate an API call ## Usage : @@ -107,8 +139,15 @@ GET_CONSUMPTION="True" GET_PRODUCTION="False" HA_AUTODISCOVERY="False" HA_AUTODISCOVERY_PREFIX='homeassistant' -CYCLE=86400 -BASE_PRICE=0 +CYCLE=3600 +OFFPEAK_HOURS="" +CONSUMPTION_PRICE_BASE=0 +CONSUMPTION_PRICE_HC=0 +CONSUMPTION_PRICE_HP=0 +REFRESH_CONTRACT="False" +REFRESH_ADDRESSES="False" +WIPE_CACHE="False" +DEBUG="False" docker run -it --restart=unless-stopped \ -e ACCESS_TOKEN="$ACCESS_TOKEN" \ @@ -125,8 +164,15 @@ docker run -it --restart=unless-stopped \ -e GET_PRODUCTION="$GET_PRODUCTION" \ -e HA_AUTODISCOVERY="$HA_AUTODISCOVERY" \ -e HA_AUTODISCOVERY_PREFIX="$HA_AUTODISCOVERY_PREFIX" \ - -e CYCLE="$CYCLE" \ - -e BASE_PRICE="$BASE_PRICE" \ + -e CYCLE="$CYCLE" \ + -e OFFPEAK_HOURS="$OFFPEAK_HOURS" \ + -e CONSUMPTION_PRICE_BASE="$CONSUMPTION_PRICE_BASE" \ + -e CONSUMPTION_PRICE_HC="$CONSUMPTION_PRICE_HC" \ + -e CONSUMPTION_PRICE_HP="$CONSUMPTION_PRICE_HP" \ + -e REFRESH_CONTRACT="$REFRESH_CONTRACT" \ + -e REFRESH_ADDRESSES="$REFRESH_ADDRESSES" \ + -e WIPE_CACHE="$WIPE_CACHE" \ + -e DEBUG="$DEBUG" \ -v $(pwd):/data m4dm4rtig4n/enedisgateway2mqtt:latest ``` @@ -156,7 +202,14 @@ services: HA_AUTODISCOVERY: "False" HA_AUTODISCOVERY_PREFIX: 'homeassistant' CYCLE: 86400 - BASE_PRICE: 0.1445 + OFFPEAK_HOURS: "" + CONSUMPTION_PRICE_BASE: 0 + CONSUMPTION_PRICE_HC: 0 + CONSUMPTION_PRICE_HP: 0 + REFRESH_CONTRACT: "False" + REFRESH_ADDRESSES: "False" + WIPE_CACHE: "False" + DEBUG: "False" volumes: mydata: ``` @@ -164,12 +217,21 @@ volumes: ## Roadmap - Add **DJU18** -- Add HC/HP - Create Home Assistant OS Addons -- Add Postgres/MariaDB connector* +- Add Postgres/MariaDB connector ## Change log: +### [0.5.0] - 2021-10-13 + +- Add HC/HP +- Rework database structure (all cached data are reset) +- Add new params to reset all cache. + +### [0.4.1] - 2021-10-06 + +- Cache addresses & contracts data. + ### [0.4.0] - 2021-10-05 - Switch locale to fr_FR.UTF8 (french date format) diff --git a/VERSION b/VERSION index 60a2d3e9..79a2734b 100755 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.4.0 \ No newline at end of file +0.5.0 \ No newline at end of file diff --git a/app/addresses.py b/app/addresses.py index 8f6e6e9c..c3f4d5a6 100755 --- a/app/addresses.py +++ b/app/addresses.py @@ -8,7 +8,16 @@ main = import_module("main") f = import_module("function") -def getAddresses(client, cur): +def getAddresses(client, con, cur): + + def queryApi(url, headers, data, count=0): + addresses = f.apiRequest(cur, con, type="POST", url=f"{url}", headers=headers, data=json.dumps(data)) + if not "error_code" in addresses: + query = f"INSERT OR REPLACE INTO addresses VALUES (?,?,?)" + cur.execute(query, [pdl, json.dumps(addresses), count]) + con.commit() + return addresses + pdl = main.pdl url = main.url headers = main.headers @@ -18,33 +27,57 @@ def getAddresses(client, cur): "usage_point_id": str(pdl), } + ha_discovery = { + pdl: {} + } + query = f"SELECT * FROM addresses WHERE pdl = '{pdl}'" cur.execute(query) query_result = cur.fetchone() if query_result is None: - addresses = requests.request("POST", url=f"{url}", headers=headers, data=json.dumps(data)).json() - addresses_b64 = str(addresses) - addresses_b64 = addresses_b64.encode('ascii') - addresses_b64 = base64.b64encode(addresses_b64) - cur.execute(f"INSERT OR REPLACE INTO addresses VALUES ('{pdl}','{addresses_b64}')") + f.log(" => Query API") + addresses = queryApi(url, headers, data) else: - addresses = json.loads(query_result[1]) + if main.refresh_addresses == True: + f.log(" => Query API (Refresh Cache)") + addresses = queryApi(url, headers, data, 0) + else: + f.log(f" => Query Cache") + addresses = json.loads(query_result[1]) + query = f"INSERT OR REPLACE INTO addresses VALUES (?,?,?)" + cur.execute(query, [pdl, json.dumps(addresses), 0]) + con.commit() - pprint(addresses) - quit() - if not "customer" in addresses: - f.publish(client, f"{pdl}/consumption/current_year/error", str(1)) + if 'error_code' in addresses: + f.log(addresses['description']) + ha_discovery = { + "error_code": True, + "detail": { + "message": addresses['description'] + } + } + f.publish(client, f"{pdl}/addresses/error", str(1)) for key, value in addresses.items(): - f.publish(client, f"{pdl}/consumption/current_year/errorMsg/{key}", str(value)) + f.publish(client, f"{pdl}/addresses/errorMsg/{key}", str(value)) else: - customer = addresses["customer"] - f.publish(client, f"{pdl}/customer_id", str(customer["customer_id"])) - for usage_points in customer['usage_points']: - for usage_point_key, usage_point_data in usage_points['usage_point'].items(): - if isinstance(usage_point_data, dict): - for usage_point_data_key, usage_point_data_data in usage_point_data.items(): - f.publish(client, f"{pdl}/addresses/{usage_point_key}/{usage_point_data_key}", - str(usage_point_data_data)) - else: - f.publish(client, f"{pdl}/addresses/{usage_point_key}", str(usage_point_data)) + if "customer" in addresses: + customer = addresses["customer"] + f.publish(client, f"{pdl}/customer_id", str(customer["customer_id"])) + for usage_points in customer['usage_points']: + for usage_point_key, usage_point_data in usage_points['usage_point'].items(): + if isinstance(usage_point_data, dict): + for usage_point_data_key, usage_point_data_data in usage_point_data.items(): + f.publish(client, f"{pdl}/addresses/{usage_point_key}/{usage_point_data_key}", + str(usage_point_data_data)) + else: + f.publish(client, f"{pdl}/addresses/{usage_point_key}", str(usage_point_data)) + else: + ha_discovery = { + "error_code": True, + "detail": { + "message": addresses + } + } + + return ha_discovery \ No newline at end of file diff --git a/app/contract.py b/app/contract.py index e62773bb..9b2213a9 100755 --- a/app/contract.py +++ b/app/contract.py @@ -1,12 +1,23 @@ import requests import json from dateutil.relativedelta import * +from pprint import pprint +from datetime import datetime from importlib import import_module main = import_module("main") f = import_module("function") -def getContract(client): +def getContract(client, con, cur): + + def queryApi(url, headers, data, count=0): + contract = f.apiRequest(cur, con, type="POST", url=f"{url}", headers=headers, data=json.dumps(data)) + if not "error_code" in contract: + query = f"INSERT OR REPLACE INTO contracts VALUES (?,?,?)" + cur.execute(query, [pdl, json.dumps(contract), count]) + con.commit() + return contract + pdl = main.pdl headers = main.headers url = main.url @@ -19,36 +30,79 @@ def getContract(client): "type": "contracts", "usage_point_id": str(pdl), } - contract = requests.request("POST", url=f"{url}", headers=headers, data=json.dumps(data)).json() - if "customer" in contract: - customer = contract["customer"] - f.publish(client, f"{pdl}/customer_id", str(customer["customer_id"])) - for usage_points in customer['usage_points']: - for usage_point_key, usage_point_data in usage_points['usage_point'].items(): - f.publish(client, f"{pdl}/contract/{usage_point_key}", str(usage_point_data)) - for contracts_key, contracts_data in usage_points['contracts'].items(): - f.publish(client, f"{pdl}/contract/{contracts_key}", str(contracts_data)) - if contracts_key == "last_distribution_tariff_change_date": - f.publish(client, f"{pdl}/last_distribution_tariff_change_date", str(contracts_data)) - ha_discovery[pdl]["last_distribution_tariff_change_date"] = str(contracts_data) - if contracts_key == "last_activation_date": - f.publish(client, f"{pdl}/last_activation_date", str(contracts_data)) - ha_discovery[pdl]["last_activation_date"] = str(contracts_data) - if contracts_key == "subscribed_power": - f.publish(client, f"{pdl}/subscribed_power", str(contracts_data.split()[0])) - ha_discovery[pdl]["subscribed_power"] = str(contracts_data.split()[0]) - if contracts_key == "offpeak_hours": - offpeak_hours = contracts_data[contracts_data.find("(") + 1:contracts_data.find(")")].split(';') - ha_discovery[pdl]["offpeak_hours"] = str(contracts_data) - index = 0 - for oh in offpeak_hours: - f.publish(client, f"{pdl}/offpeak_hours/{index}/start", str(oh.split('-')[0])) - f.publish(client, f"{pdl}/offpeak_hours/{index}/stop", str(oh.split('-')[1])) - index = index + 1 - f.publish(client, f"{pdl}/offpeak_hours", str(contracts_data)) + + query = f"SELECT * FROM contracts WHERE pdl = '{pdl}'" + cur.execute(query) + query_result = cur.fetchone() + if query_result is None: + f.log(" => Query API") + contract = queryApi(url, headers, data) else: + if main.refresh_contract == True: + f.log(" => Query API (Refresh Cache)") + contract = queryApi(url, headers, data, 0) + else: + f.log(f" => Query Cache") + contract = json.loads(query_result[1]) + query = f"INSERT OR REPLACE INTO contracts VALUES (?,?,?)" + cur.execute(query, [pdl, json.dumps(contract), 0]) + con.commit() + + if 'error_code' in contract: + f.log(contract['description']) ha_discovery = { - "error": True, - "errorMsg": contract + "error_code": True, + "detail": { + "message": contract['description'] + } } + f.publish(client, f"{pdl}/contract/error", str(1)) + for key, value in contract.items(): + f.publish(client, f"{pdl}/contract/errorMsg/{key}", str(value)) + else: + f.publish(client, f"{pdl}/contract/error", str(0)) + if "customer" in contract: + customer = contract["customer"] + f.publish(client, f"{pdl}/customer_id", str(customer["customer_id"])) + for usage_points in customer['usage_points']: + for usage_point_key, usage_point_data in usage_points['usage_point'].items(): + f.publish(client, f"{pdl}/contract/{usage_point_key}", str(usage_point_data)) + + for contracts_key, contracts_data in usage_points['contracts'].items(): + f.publish(client, f"{pdl}/contract/{contracts_key}", str(contracts_data)) + + if contracts_key == "last_distribution_tariff_change_date": + f.publish(client, f"{pdl}/last_distribution_tariff_change_date", str(contracts_data)) + ha_discovery[pdl]["last_distribution_tariff_change_date"] = str(contracts_data) + + if contracts_key == "last_activation_date": + f.publish(client, f"{pdl}/last_activation_date", str(contracts_data)) + ha_discovery[pdl]["last_activation_date"] = str(contracts_data) + + if contracts_key == "subscribed_power": + f.publish(client, f"{pdl}/subscribed_power", str(contracts_data.split()[0])) + ha_discovery[pdl]["subscribed_power"] = str(contracts_data.split()[0]) + + offpeak_hours = [] + if main.offpeak_hours != None: + offpeak_hours = main.offpeak_hours.split(';') + else: + if contracts_key == "offpeak_hours": + offpeak_hours = contracts_data[contracts_data.find("(") + 1:contracts_data.find(")")].split(';') + + if offpeak_hours != []: + ha_discovery[pdl]["offpeak_hours"] = offpeak_hours + index = 0 + for oh in offpeak_hours: + f.publish(client, f"{pdl}/offpeak_hours/{index}/start", str(oh.split('-')[0])) + f.publish(client, f"{pdl}/offpeak_hours/{index}/stop", str(oh.split('-')[1])) + index += 1 + f.publish(client, f"{pdl}/offpeak_hours", str(offpeak_hours)) + else: + ha_discovery = { + "error_code": True, + "detail": { + "message": contract + } + } return ha_discovery diff --git a/app/daily.py b/app/daily.py index 4db93c1b..8df7f73c 100755 --- a/app/daily.py +++ b/app/daily.py @@ -10,11 +10,11 @@ f = import_module("function") -def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now()): +def getDaily(cur, con, client, mode="consumption", last_activation_date=datetime.now()): max_days = 1095 max_days_date = datetime.now() + relativedelta(days=-max_days) pdl = main.pdl - base_price = main.consumption_base_price + base_price = main.consumption_price_base ha_discovery = { pdl: {} @@ -30,7 +30,7 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( dateEnded = datetime.now() dateEnded = dateEnded.strftime('%Y-%m-%d') - data = dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date) + data = dailyBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date) if "error_code" in data: f.publish(client, f"{pdl}/{mode}/current_year/error", str(1)) for key, value in data.items(): @@ -38,7 +38,6 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( else: f.publish(client, f"{pdl}/{mode}/current_year/error", str(0)) for key, value in data.items(): - if key != "dateBegin" and key != "dateEnded": current_value = int(value["value"]) current_date = value["date"] @@ -48,7 +47,7 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( ha_discovery[pdl].update({ f"{mode}_{key.replace('-', '_')}": { "value": round(int(current_value) / 1000, 2), - "unit_of_meas": "kW", + "unit_of_meas": "kWh", "device_class": "energy", "state_class": "total_increasing", "attributes": {} @@ -71,14 +70,15 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( else: f.publish(client, f"{pdl}/{mode}/current_year/{key}", str(value)) - if base_price != 0: - if isinstance(current_value, int): - roundValue = round(int(current_value) / 1000 * base_price, 2) - f.publish(client, f"{pdl}/{mode}_price/current_year/{key}", roundValue) - if key != "dateBegin" and key != "dateEnded": - if not f"price" in ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'].keys(): - ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'][f"price"] = str( - roundValue) + if "current_value" in locals(): + if base_price != 0 and 'current_value' in locals(): + if isinstance(current_value, int): + roundValue = round(int(current_value) / 1000 * base_price, 2) + f.publish(client, f"{pdl}/{mode}_price/current_year/{key}", roundValue) + if key != "dateBegin" and key != "dateEnded": + if not f"price" in ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'].keys(): + ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'][f"price"] = str( + roundValue) lastData = data current_year = 1 @@ -91,7 +91,7 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( if last_activation_date > datetime.strptime(dateEnded, '%Y-%m-%d'): f.log(" - Skip (activation date > dateEnded)") else: - data = dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date) + data = dailyBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date) if "error_code" in data: f.publish(client, f"{pdl}/{mode}/year-{current_year}/error", str(1)) for key, value in data.items(): @@ -140,16 +140,22 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'].keys(): ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'][ f"history_year_{current_year}_date"] = value["date"] + + if "current_value" in locals(): + if base_price != 0: + if isinstance(current_value, int): + roundValue = round(int(current_value) / 1000 * base_price, 2) + f.publish(client, f"{pdl}/{mode}_price/year-{current_year}/{key}", roundValue) + if not f"{mode}_{key.replace('-', '_')}" in ha_discovery[pdl].keys(): + ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"] = { + "attributes": {} + } + if not f"price_year_{current_year}" in ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'].keys(): + ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'][ + f"price_year_{current_year}"] = str(roundValue) else: f.publish(client, f"{pdl}/{mode}/year-{current_year}/{key}", str(value)) - if base_price != 0: - if isinstance(current_value, int): - roundValue = round(int(current_value) / 1000 * base_price, 2) - f.publish(client, f"{pdl}/{mode}_price/year-{current_year}/{key}", roundValue) - if not f"price_year_{current_year}" in ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"][ - 'attributes'].keys(): - ha_discovery[pdl][f"{mode}_{key.replace('-', '_')}"]['attributes'][ - f"price_year_{current_year}"] = str(roundValue) + dateEnded = dateBegin dateEndedDelta = datetime.strptime(dateEnded, '%Y-%m-%d') dateBegin = dateEndedDelta + relativedelta(years=-1) @@ -160,54 +166,7 @@ def getDaily(cur, client, mode="consumption", last_activation_date=datetime.now( return ha_discovery -def checkHistoryDaily(cur, mode, dateBegin, dateEnded): - pdl = main.pdl - dateBegin = datetime.strptime(dateBegin, '%Y-%m-%d') - dateEnded = datetime.strptime(dateEnded, '%Y-%m-%d') - delta = dateEnded - dateBegin - result = { - "missing_data": False, - "date": {}, - "count": 0 - } - for i in range(delta.days + 1): - checkDate = dateBegin + timedelta(days=i) - checkDate = checkDate.strftime('%Y-%m-%d') - query = f"SELECT * FROM {mode}_daily WHERE pdl = '{pdl}' AND date = '{checkDate}'" - cur.execute(query) - query_result = cur.fetchone() - if query_result is None: - result["date"][checkDate] = { - "status": False, - "fail": 0, - "value": 0 - } - result["missing_data"] = True - result["count"] = result["count"] + 1 - elif query_result[3] >= main.fail_count: - result["date"][checkDate] = { - "status": True, - "fail": query_result[3], - "value": query_result[2] - } - elif query_result[2] == 0: - result["date"][checkDate] = { - "status": False, - "fail": query_result[3], - "value": query_result[2] - } - result["missing_data"] = True - result["count"] = result["count"] + 1 - else: - result["date"][checkDate] = { - "status": True, - "fail": 0, - "value": query_result[2] - } - return result - - -def dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date): +def dailyBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date): response = {} lastYears = datetime.strptime(dateEnded, '%Y-%m-%d') @@ -238,32 +197,39 @@ def dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date): else: f.log(f"Data is missing between {dateBegin} / {dateEnded}") f.log(f" => Load data from API") - daily = requests.request("POST", url=f"{main.url}", headers=main.headers, data=json.dumps(data)).json() - meter_reading = daily['meter_reading'] - f.log("Import data :") - for interval_reading in meter_reading["interval_reading"]: - date = interval_reading['date'] - value = interval_reading['value'] - cur.execute( - f"INSERT OR REPLACE INTO {mode}_daily VALUES ('{pdl}','{interval_reading['date']}','{interval_reading['value']}','0')") - new_date.append(interval_reading['date']) - mesures[date] = value + daily = f.apiRequest(cur, con, type="POST", url=f"{main.url}", headers=main.headers, data=json.dumps(data)) + if not "error_code" in daily: + meter_reading = daily['meter_reading'] + f.log("Import data :") + for interval_reading in meter_reading["interval_reading"]: + date = interval_reading['date'] + value = interval_reading['value'] + cur.execute( + f"INSERT OR REPLACE INTO {mode}_daily VALUES ('{pdl}','{interval_reading['date']}','{interval_reading['value']}','0')") + new_date.append(interval_reading['date']) + mesures[date] = value - f.splitLog(new_date) + f.splitLog(new_date) - not_found_data = [] - for date, date_data in current_data['date'].items(): - if not date in new_date: - not_found_data.append(date) - if date_data['fail'] == 0 and date_data['value'] == 0: - cur.execute(f"INSERT OR REPLACE INTO {mode}_daily VALUES ('{pdl}','{date}','0','1')") - else: - cur.execute( - f"UPDATE {mode}_daily SET fail = {date_data['fail'] + 1} WHERE pdl = '{pdl}' and date = '{date}'") + not_found_data = [] + for date, date_data in current_data['date'].items(): + if not date in new_date: + not_found_data.append(date) + if date_data['fail'] == 0 and date_data['value'] == 0: + cur.execute(f"INSERT OR REPLACE INTO {mode}_daily VALUES ('{pdl}','{date}','0','1')") + else: + cur.execute( + f"UPDATE {mode}_daily SET fail = {date_data['fail'] + 1} WHERE pdl = '{pdl}' and date = '{date}'") - if not_found_data != []: - f.log("Data not found :") - f.splitLog(not_found_data) + if not_found_data != []: + f.log("Data not found :") + f.splitLog(not_found_data) + + elif daily['error_code'] == 2: + f.log(f"Fetch data error detected beetween {dateBegin} / {dateEnded}") + f.log(f" => Load data from cache") + for date, data in current_data['date'].items(): + mesures[date] = data['value'] list_date = list(reversed(sorted(mesures.keys()))) @@ -341,15 +307,15 @@ def dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date): if current_date >= dateYears: energyYears = int(energyYears) + int(value) - response['thisWeek'] = { + response['this_week'] = { "value": energyWeek, "date": date } - response['thisMonth'] = { + response['this_month'] = { "value": energyMonths, "date": date } - response['thisYear'] = { + response['this_year'] = { "value": energyYears, "date": date } @@ -361,3 +327,50 @@ def dailyBeetwen(cur, pdl, mode, dateBegin, dateEnded, last_activation_date): f.log(f"==> {error_key} => {error_msg}") return response + + +def checkHistoryDaily(cur, mode, dateBegin, dateEnded): + pdl = main.pdl + dateBegin = datetime.strptime(dateBegin, '%Y-%m-%d') + dateEnded = datetime.strptime(dateEnded, '%Y-%m-%d') + delta = dateEnded - dateBegin + result = { + "missing_data": False, + "date": {}, + "count": 0 + } + for i in range(delta.days + 1): + checkDate = dateBegin + timedelta(days=i) + checkDate = checkDate.strftime('%Y-%m-%d') + query = f"SELECT * FROM {mode}_daily WHERE pdl = '{pdl}' AND date = '{checkDate}'" + cur.execute(query) + query_result = cur.fetchone() + if query_result is None: + result["date"][checkDate] = { + "status": False, + "fail": 0, + "value": 0 + } + result["missing_data"] = True + result["count"] = result["count"] + 1 + elif query_result[3] >= main.fail_count: + result["date"][checkDate] = { + "status": True, + "fail": query_result[3], + "value": query_result[2] + } + elif query_result[2] == 0: + result["date"][checkDate] = { + "status": False, + "fail": query_result[3], + "value": query_result[2] + } + result["missing_data"] = True + result["count"] = result["count"] + 1 + else: + result["date"][checkDate] = { + "status": True, + "fail": 0, + "value": query_result[2] + } + return result diff --git a/app/detail.py b/app/detail.py new file mode 100755 index 00000000..f4d13fbc --- /dev/null +++ b/app/detail.py @@ -0,0 +1,316 @@ +import requests +import json +from datetime import datetime, timedelta +from dateutil.relativedelta import * +from pprint import pprint +import re + +from importlib import import_module + +main = import_module("main") +f = import_module("function") + +date_format = "%Y-%m-%d %H:%M:%S" + + +def getDetail(cur, con, client, mode="consumption", last_activation_date=datetime.now(), offpeak_hours=None, + measure_total=None): + + max_days = 730 + max_days_per_demand = 7 + max_days_date = datetime.now() + relativedelta(days=-max_days) + pdl = main.pdl + price_base = main.consumption_price_base + price_hc = main.consumption_price_hc + price_hp = main.consumption_price_hp + + ha_discovery = { + pdl: {} + } + + # Check activation data + last_activation_date = last_activation_date.split("+")[0] + last_activation_date = datetime.strptime(last_activation_date, '%Y-%m-%d') + + lastYears = datetime.now() + relativedelta(days=-max_days_per_demand) + dateBegin = lastYears.strftime('%Y-%m-%d') + dateEnded = datetime.now() + dateEnded = dateEnded.strftime('%Y-%m-%d') + + data = detailBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date, max_days_per_demand, + offpeak_hours) + if "error_code" in data: + f.publish(client, f"{pdl}/{mode}/detail/error", str(1)) + for key, value in data.items(): + f.publish(client, f"{pdl}/{mode}/detail/errorMsg/{key}", str(value)) + else: + dateEnded = dateBegin + dateEndedDelta = datetime.strptime(dateEnded, '%Y-%m-%d') + dateBegin = dateEndedDelta + relativedelta(weeks=-1) + dateBegin = dateBegin.strftime('%Y-%m-%d') + current_week = 1 + finish = False + while max_days_date <= datetime.strptime(dateEnded, '%Y-%m-%d') and not "error_code" in data and finish == False: + f.log(f"Load {dateBegin} => {dateEnded}") + if last_activation_date > datetime.strptime(dateEnded, '%Y-%m-%d'): + f.log(" - Skip (activation date > dateEnded)") + finish = True + else: + data = detailBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date, + max_days_per_demand, offpeak_hours) + if "error_code" in data: + f.publish(client, f"{pdl}/{mode}/detail/error", str(1)) + for key, value in data.items(): + f.publish(client, f"{pdl}/{mode}/detail/errorMsg/{key}", str(value)) + else: + dateEnded = dateBegin + dateEndedDelta = datetime.strptime(dateEnded, '%Y-%m-%d') + dateBegin = dateEndedDelta + relativedelta(weeks=-1) + if dateBegin < max_days_date: + dateBegin = max_days_date + dateBegin = dateBegin.strftime('%Y-%m-%d') + current_week = current_week + 1 + + query = f"SELECT * FROM consumption_detail WHERE pdl = '{pdl}' ORDER BY date;" + cur.execute(query) + query_result = cur.fetchall() + + result = {} + base_vs_offpeak = 0 + + for data in query_result: + date = data[1] + value = data[2] + interval = data[3] + measure_type = data[4] + + dateObject = datetime.strptime(date, date_format) + year = dateObject.strftime('%Y') + month = dateObject.strftime('%m') + + if not year in result: + result[year] = {} + if not month in result[year]: + result[year].update({ + month: { + "measure_hp": 0, + "measure_hp_wh": 0, + "measure_hc": 0, + "measure_hc_wh": 0, + "measure_total": 0, + "measure_total_wh": 0 + } + }) + + value_wh = value * (interval / 60) + result[year][month]["measure_total"] += int(value) + result[year][month]["measure_total_wh"] += int(value_wh) + + if measure_type == "HP": + result[year][month]["measure_hp"] += int(value) + result[year][month]["measure_hp_wh"] += int(value_wh) + result[year][month]["measure_ration_hp"] = round( + 100 * result[year][month]["measure_hp"] / result[year][month]["measure_total"], 2) + if measure_type == "HC": + result[year][month]["measure_hc"] += int(value) + result[year][month]["measure_hc_wh"] += int(value_wh) + result[year][month]["measure_ration_hc"] = round( + 100 * result[year][month]["measure_hc"] / result[year][month]["measure_total"], 2) + + + if price_base != 0: + result[year][month]["measure_base_euro"] = result[year][month]["measure_total_wh"] / 1000 * price_base + + if offpeak_hours != None: + if price_hc != 0 and price_hp != 0: + result[year][month]["measure_hp_euro"] = result[year][month]["measure_hp_wh"] / 1000 * price_hp + result[year][month]["measure_hc_euro"] = result[year][month]["measure_hc_wh"] / 1000 * price_hc + result[year][month]["measure_hphc_euro"] = result[year][month]["measure_hp_euro"] + result[year][month]["measure_hc_euro"] + + if price_base != 0 and price_hc != 0 and price_hp != 0 and measure_type != "BASE": + result[year][month]["base_vs_offpeak"] = 100 - ( + 100 * result[year][month]["measure_base_euro"] / (result[year][month]["measure_hphc_euro"])) + + base_vs_offpeak += result[year][month]["base_vs_offpeak"] + + if result[year][month]["base_vs_offpeak"] > 0: + result[year][month]["best_plan"] = f"BASE" + result[year][month]["best_plan_percent"] = f"{abs(round(result[year][month]['base_vs_offpeak'], 2))}" + else: + result[year][month]["best_plan"] = f"HC/HP" + result[year][month]["best_plan_percent"] = f"{abs(round(result[year][month]['base_vs_offpeak'], 2))}" + + if offpeak_hours != None and price_base != 0 and price_hc != 0 and price_hp != 0: + if base_vs_offpeak > 0: + best_plan = f"BASE" + best_plan_percent = f"{abs(round(result[year][month]['base_vs_offpeak'], 2))}" + else: + best_plan = f"HC/HP" + best_plan_percent = f"{abs(round(result[year][month]['base_vs_offpeak'], 2))}" + + year = dateObject.strftime('%Y') + month = dateObject.strftime('%m') + if offpeak_hours != None: + for plan in ["hc", "hp"]: + ha_discovery[pdl].update({ + f"{mode}_detail_this_month_{plan}": { + "value": result[year][month][f"measure_{plan}_wh"], + "unit_of_meas": "kW", + "device_class": "energy", + "state_class": "total_increasing", + "attributes": {} + } + }) + ha_discovery[pdl][f"{mode}_detail_this_month_{plan}"]["attributes"]["ratio"] = result[year][month][f"measure_ration_{plan}"] + ha_discovery[pdl][f"{mode}_detail_this_month_{plan}"]["attributes"]["W"] = result[year][month][f"measure_{plan}"] + + if price_hc != 0 and price_hp != 0: + ha_discovery[pdl][f"{mode}_detail_this_month_{plan}"]["attributes"][f"measure_{plan}_euro"] = result[year][month][f"measure_{plan}_euro"] + + ha_discovery[pdl].update({ + f"{mode}_detail_this_month_base": { + "value": result[year][month]["measure_total_wh"], + "unit_of_meas": "kW", + "device_class": "energy", + "state_class": "total_increasing", + "attributes": {} + } + }) + ha_discovery[pdl][f"{mode}_detail_this_month_base"]["attributes"]["W"] = result[year][month][f"measure_total"] + if price_base != 0: + ha_discovery[pdl][f"{mode}_detail_this_month_base"]["attributes"][f"measure_base_euro"] = result[year][month][f"measure_base_euro"] + + if offpeak_hours != None: + if price_base != 0 and price_hc != 0 and price_hp != 0: + ha_discovery[pdl].update({ + f"{mode}_detail_this_month_compare": { + "value": result[year][month][f"best_plan"], + "attributes": {} + } + }) + ha_discovery[pdl][f"{mode}_detail_this_month_compare"]["attributes"]["best_plan_percent"] = result[year][month][f"best_plan_percent"] + ha_discovery[pdl].update({ + f"{mode}_detail_this_year_compare": { + "value": best_plan, + "attributes": {} + } + }) + ha_discovery[pdl][f"{mode}_detail_this_year_compare"]["attributes"]["best_plan_percent"] = best_plan_percent + + for year, value in result.items(): + for month, subvalue in value.items(): + for key, subsubvalue in subvalue.items(): + f.publish(client, f"{pdl}/{mode}/detail/{year}/{month}/{key}", str(subsubvalue)) + + return ha_discovery + + +def detailBeetwen(cur, con, pdl, mode, dateBegin, dateEnded, last_activation_date, max_days_per_demand, offpeak_hours): + + response = {} + + def is_between(time, time_range): + if time_range[1] < time_range[0]: + return time >= time_range[0] or time <= time_range[1] + return time_range[0] <= time <= time_range[1] + + lastYears = datetime.strptime(dateEnded, '%Y-%m-%d') + lastYears = lastYears + relativedelta(days=-max_days_per_demand) + if lastYears < last_activation_date: + dateBegin = last_activation_date + dateBegin = dateBegin.strftime('%Y-%m-%d') + + response['dateBegin'] = dateBegin + response['dateEnded'] = dateEnded + + data = { + "type": f"{mode}_load_curve", + "usage_point_id": str(pdl), + "start": str(dateBegin), + "end": str(dateEnded), + } + + try: + new_date = [] + dateBeginLong = datetime.strptime(dateBegin, '%Y-%m-%d') + dateEndedLong = datetime.strptime(dateEnded, '%Y-%m-%d') + current_data = checkHistoryDetail(cur, con, mode, dateBeginLong, dateEndedLong) + if current_data['missing_data'] == False: + f.log(f"Week allready in cache {dateBegin} / {dateEnded}") + f.log(f" => Load data from cache") + else: + f.log(f"Data is missing between {dateBegin} / {dateEnded}") + f.log(f" => Load data from API") + + detail = f.apiRequest(cur, con, type="POST", url=f"{main.url}", headers=main.headers, data=json.dumps(data)) + if not "error_code" in detail: + meter_reading = detail['meter_reading'] + f.log("Import data :") + new_date = [] + for interval_reading in meter_reading["interval_reading"]: + date = interval_reading['date'] + interval_length = re.findall(r'\d+', interval_reading['interval_length'])[0] + value = int(interval_reading['value']) + dateObject = datetime.strptime(date, '%Y-%m-%d %H:%M:%S') + dateHourMinute = dateObject.strftime('%H:%M') + if offpeak_hours != None: + measure_type = "HP" + for offpeak_hour in offpeak_hours: + offpeak_begin = offpeak_hour.split("-")[0].replace('h', ':').replace('H', ':') + # FORMAT HOUR WITH 2 DIGIT + offpeak_begin = datetime.strptime(offpeak_begin, '%H:%M') + offpeak_begin = datetime.strftime(offpeak_begin, '%H:%M') + offpeak_stop = offpeak_hour.split("-")[1].replace('h', ':').replace('H', ':') + # FORMAT HOUR WITH 2 DIGIT + offpeak_stop = datetime.strptime(offpeak_stop, '%H:%M') + offpeak_stop = datetime.strftime(offpeak_stop, '%H:%M') + result = is_between(dateHourMinute, (offpeak_begin, offpeak_stop)) + if result == True: + measure_type = "HC" + new_date.append(date) + else: + measure_type = "BASE" + query = f"INSERT OR REPLACE INTO {mode}_detail VALUES ('{pdl}','{date}',{value},{interval_length},'{measure_type}', 0)" + cur.execute(query) + con.commit() + f.log(f" => Import {len(new_date)} entry") + + elif detail['error_code'] == 2: + f.log(f"Fetch data error detected beetween {dateBegin} / {dateEnded}") + f.log(f" => {detail['description']}") + else: + f.log(f"API return error beetween {dateBegin} / {dateEnded}") + f.log(f" => {detail['description']}") + + con.commit() + except Exception as e: + f.log(f"=====> ERROR : Exception - detailBeetwen <======") + f.log(e) + for error_key, error_msg in detail.items(): + response[error_key] = error_msg + f.log(f"==> {error_key} => {error_msg}") + return response + + +def checkHistoryDetail(cur, con, mode, dateBegin, dateEnded): + pdl = main.pdl + + # FORCE THIS WEEK + if datetime.now().strftime('%Y-%m-%d') == dateEnded.strftime('%Y-%m-%d'): + result = { + "missing_data": True + } + else: + # CHECK CURRENT DATA + query = f"SELECT * FROM {mode}_detail WHERE pdl = '{pdl}' AND date BETWEEN '{dateBegin}' AND '{dateEnded}' ORDER BY date" + cur.execute(query) + query_result = cur.fetchall() + if len(query_result) < 160: + result = { + "missing_data": True + } + else: + result = { + "missing_data": False + } + return result diff --git a/app/function.py b/app/function.py index 41706dea..72b1276e 100755 --- a/app/function.py +++ b/app/function.py @@ -1,5 +1,9 @@ +import requests from paho.mqtt import client as mqtt_client from datetime import datetime +import json +from pprint import pprint +import main from importlib import import_module @@ -28,7 +32,7 @@ def publish(client, topic, msg, prefix=main.prefix): result = client.publish(f'{prefix}/{topic}', str(msg), qos=main.qos, retain=main.retain) status = result[0] if status == 0: - log(f" MQTT Send : {prefix}/{topic} => {msg}") + log(f" MQTT Send : {prefix}/{topic} => {msg}","debug") else: log(f" - Failed to send message to topic {prefix}/{topic}") msg_count += 1 @@ -42,10 +46,20 @@ def on_message(client, userdata, msg): client.subscribe(client, sub_topic) client.on_message = on_message -def log(msg): - now = datetime.now() - print(f"{now} : {msg}") +def logLine(): + log("####################################################################################") +def log(msg, level="INFO "): + global debug + now = datetime.now() + level = level.upper() + display = False + if main.debug == True and level == "DEBUG": + display = True + if level == "INFO ": + display = True + if display == True: + print(f"{now} - {level} : {msg}") def splitLog(msg): format_log = "" @@ -66,3 +80,27 @@ def splitLog(msg): else: i = i + 1 cur_length = cur_length + 1 + +def apiRequest(cur, con, type="POST", url=None, headers=None, data=None): + config_query = f"SELECT * FROM config WHERE key = 'config'" + cur.execute(config_query) + query_result = cur.fetchall() + query_result = json.loads(query_result[0][1]) + log(f"call_number : {query_result['call_number']}", "debug") + if query_result["day"] == datetime.now().strftime('%Y-%m-%d'): + if query_result["call_number"] > query_result["max_call"]: + return { + "error_code": 2, + "description": f"API Call number per day is reached ({query_result['max_call']}), please wait until tomorrow to load the rest of data" + } + else: + query_result["call_number"] = int(query_result["call_number"]) + 1 + query_result["day"] = datetime.now().strftime('%Y-%m-%d') + query = f"UPDATE config SET key = 'config', value = '{json.dumps(query_result)}' WHERE key = 'config'" + cur.execute(query) + con.commit() + + else: + query_result["call_number"] = 0 + retour = requests.request(type, url=f"{url}", headers=headers, data=data).json() + return retour diff --git a/app/main.py b/app/main.py index bdf297fc..54205848 100755 --- a/app/main.py +++ b/app/main.py @@ -1,16 +1,19 @@ import os import time from dateutil.relativedelta import * +from datetime import datetime from distutils.util import strtobool import sqlite3 import locale from pprint import pprint +import json from importlib import import_module f = import_module("function") addr = import_module("addresses") cont = import_module("contract") day = import_module("daily") +detail = import_module("detail") ha = import_module("home_assistant") locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8') @@ -93,18 +96,22 @@ get_consumption = bool(strtobool(os.environ['GET_CONSUMPTION'])) else: get_consumption = True +if "GET_CONSUMPTION_DETAIL" in os.environ: + get_consumption_detail = bool(strtobool(os.environ['GET_CONSUMPTION_DETAIL'])) +else: + get_consumption_detail = True if "CONSUMPTION_PRICE_BASE" in os.environ: - consumption_base_price = float(os.environ['CONSUMPTION_PRICE_BASE']) + consumption_price_base = float(os.environ['CONSUMPTION_PRICE_BASE']) else: - consumption_base_price = 0 + consumption_price_base = 0 if "CONSUMPTION_PRICE_HC" in os.environ: - consumption_base_price = float(os.environ['CONSUMPTION_PRICE_HC']) + consumption_price_hc = float(os.environ['CONSUMPTION_PRICE_HC']) else: - consumption_base_price = 0 + consumption_price_hc = 0 if "CONSUMPTION_PRICE_HP" in os.environ: - consumption_base_price = float(os.environ['CONSUMPTION_PRICE_HP']) + consumption_price_hp = float(os.environ['CONSUMPTION_PRICE_HP']) else: - consumption_base_price = 0 + consumption_price_hp = 0 ######################################################################################################################## # PRODUCTION @@ -112,14 +119,24 @@ get_production = bool(strtobool(os.environ['GET_PRODUCTION'])) else: get_production = False -if "PRODUCTION_PRICE" in os.environ: - production_base = float(os.environ['PRODUCTION_PRICE']) +if "GET_PRODUCTION_DETAIL" in os.environ: + get_production_detail = bool(strtobool(os.environ['GET_PRODUCTION_DETAIL'])) else: - production_base = 0 + get_production_detail = False +# if "PRODUCTION_PRICE" in os.environ: +# production_price = float(os.environ['PRODUCTION_PRICE']) +# else: +# production_price = 0 + +######################################################################################################################## +# HC/HP +if "OFFPEAK_HOURS" in os.environ: + offpeak_hours = str(os.environ['OFFPEAK_HOURS']) +else: + offpeak_hours = None ######################################################################################################################## # ADDRESSES -# ! GENERATE 1 API CALL ! if "ADDRESSES" in os.environ: addresses = bool(strtobool(os.environ['ADDRESSES'])) else: @@ -132,34 +149,106 @@ if cycle < 3600: cycle = 3600 else: - cycle = 86400 + cycle = 3600 + +######################################################################################################################## +# REFRESH_CONTRACT +if "REFRESH_CONTRACT" in os.environ: + refresh_contract = bool(strtobool(os.environ['REFRESH_CONTRACT'])) +else: + refresh_contract = False + +######################################################################################################################## +# REFRESH_ADDRESSES +if "REFRESH_ADDRESSES" in os.environ: + refresh_addresses = bool(strtobool(os.environ['REFRESH_ADDRESSES'])) +else: + refresh_addresses = False + +######################################################################################################################## +# WIPE_CACHE +if "WIPE_CACHE" in os.environ: + wipe_cache = bool(strtobool(os.environ['WIPE_CACHE'])) +else: + wipe_cache = False + +######################################################################################################################## +# DEBUG +if "DEBUG" in os.environ: + debug = bool(strtobool(os.environ['DEBUG'])) +else: + debug = False api_no_result = [] + def init_database(cur): f.log("Initialise database") - # ADDRESSES - cur.execute('''CREATE TABLE addresses - (pdl TEXT, json TEXT)''') + + ## CONFIG + cur.execute('''CREATE TABLE config ( + key TEXT PRIMARY KEY, + value json NOT NULL)''') + cur.execute('''CREATE UNIQUE INDEX idx_config_key + ON config (key)''') + + ## ADDRESSES + cur.execute('''CREATE TABLE addresses ( + pdl TEXT PRIMARY KEY, + json json NOT NULL, + count INTEGER)''') cur.execute('''CREATE UNIQUE INDEX idx_pdl_addresses ON addresses (pdl)''') - # CONTRACT - cur.execute('''CREATE TABLE contracts - (pdl TEXT, json TEXT)''') + + ## CONTRACT + cur.execute('''CREATE TABLE contracts ( + pdl TEXT PRIMARY KEY, + json json NOT NULL, + count INTEGER)''') cur.execute('''CREATE UNIQUE INDEX idx_pdl_contracts ON contracts (pdl)''') - # CONSUMPTION - cur.execute('''CREATE TABLE consumption_daily - (pdl TEXT, date TEXT, value REAL, fail INTEGER)''') + ## CONSUMPTION + # DAILY + cur.execute('''CREATE TABLE consumption_daily ( + pdl TEXT NOT NULL, + date TEXT NOT NULL, + value INTEGER NOT NULL, + fail INTEGER)''') cur.execute('''CREATE UNIQUE INDEX idx_date_consumption ON consumption_daily (date)''') - # PRODUCTION - cur.execute('''CREATE TABLE production_daily - (pdl TEXT, date TEXT, value REAL, fail INTEGER)''') + + # DETAIL + cur.execute('''CREATE TABLE consumption_detail ( + pdl TEXT NOT NULL, + date TEXT NOT NULL, + value INTEGER NOT NULL, + interval INTEGER NOT NULL, + measure_type TEXT NOT NULL, + fail INTEGER)''') + cur.execute('''CREATE UNIQUE INDEX idx_date_consumption_detail + ON consumption_detail (date)''') + ## PRODUCTION + # DAILY + cur.execute('''CREATE TABLE production_daily ( + pdl TEXT NOT NULL, + date TEXT NOT NULL, + value INTEGER NOT NULL, + fail INTEGER)''') cur.execute('''CREATE UNIQUE INDEX idx_date_production ON production_daily (date)''') + # DETAIL + cur.execute('''CREATE TABLE production_detail ( + pdl TEXT NOT NULL, + date TEXT NOT NULL, + value INTEGER NOT NULL, + interval INTEGER NOT NULL, + fail INTEGER)''') + cur.execute('''CREATE UNIQUE INDEX idx_date_production_detail + ON production_detail (date)''') def run(): + + global offpeak_hours try: client = f.connect_mqtt() client.loop_start() @@ -168,12 +257,17 @@ def run(): while True: - f.log("####################################################################################") - f.log("Check database") # SQLlite if not os.path.exists('/data'): os.mkdir('/data') + if wipe_cache == True: + if os.path.exists('/data/enedisgateway.db'): + f.logLine() + f.log("Reset Cache") + os.remove("/data/enedisgateway.db") + + f.log("Check database") if not os.path.exists('/data/enedisgateway.db'): f.log(" => Init SQLite Database") con = sqlite3.connect('/data/enedisgateway.db', timeout=10) @@ -185,17 +279,49 @@ def run(): con = sqlite3.connect('/data/enedisgateway.db', timeout=10) cur = con.cursor() + # Check database structure try: - cur.execute("INSERT OR REPLACE INTO addresses VALUES ('0','0')") - cur.execute("INSERT OR REPLACE INTO contracts VALUES ('0','0')") - cur.execute("INSERT OR REPLACE INTO consumption_daily VALUES ('0','1970-01-01','0','0')") - cur.execute("INSERT OR REPLACE INTO production_daily VALUES ('0','1970-01-01','0','0')") + + ## Default Config + config_query = f"INSERT OR REPLACE INTO config VALUES (?, ?)" + config = { + "day": datetime.now().strftime('%Y-%m-%d'), + "call_number": 0, + "max_call": 15 + } + cur.execute(config_query, ["config", json.dumps(config)]) + con.commit() + + list_tables = ["config", "addresses", "contracts", "consumption_daily", "consumption_detail", "production_daily", "production_detail"] + + query = f"SELECT name FROM sqlite_master WHERE type='table';" + cur.execute(query) + query_result = cur.fetchall() + tables = [] + for table in query_result: + tables.append(str(table).replace("('", "").replace("',)", '')) + for tab in list_tables: + if not tab in tables: + f.log(f"Table {tab} is missing") + raise + cur.execute("INSERT OR REPLACE INTO config VALUES (?,?)", [0, 0]) + cur.execute("INSERT OR REPLACE INTO addresses VALUES (?,?,?)", [0, 0, 0]) + cur.execute("INSERT OR REPLACE INTO contracts VALUES (?,?,?)", [0, 0, 0]) + cur.execute("INSERT OR REPLACE INTO consumption_daily VALUES (?,?,?,?)", [0, '1970-01-01', 0, 0]) + cur.execute("INSERT OR REPLACE INTO consumption_detail VALUES (?,?,?,?,?,?)", [0, '1970-01-01', 0, 0, "", 0]) + cur.execute("INSERT OR REPLACE INTO production_daily VALUES (?,?,?,?)", [0, '1970-01-01', 0, 0]) + cur.execute("INSERT OR REPLACE INTO production_detail VALUES (?,?,?,?,?)", [0, '1970-01-01', 0, 0, 0]) + cur.execute("DELETE FROM config WHERE key = 0") cur.execute("DELETE FROM addresses WHERE pdl = 0") cur.execute("DELETE FROM contracts WHERE pdl = 0") cur.execute("DELETE FROM consumption_daily WHERE pdl = 0") + cur.execute("DELETE FROM consumption_detail WHERE pdl = 0") cur.execute("DELETE FROM production_daily WHERE pdl = 0") - except: + cur.execute("DELETE FROM production_detail WHERE pdl = 0") + except Exception as e: + f.log("=====> ERROR : Exception <======") + f.log(e) f.log(' Database structure is invalid ') f.log("Reset database") con.close() @@ -205,13 +331,15 @@ def run(): cur = con.cursor() init_database(cur) - f.log("####################################################################################") + f.logLine() f.log("Get contract :") - contract = cont.getContract(client) - if "error" in contract: + contract = cont.getContract(client, con, cur) + f.log(contract,"debug") + if "error_code" in contract: f.publish(client, f"error", str(1)) - for key, data in contract["errorMsg"].items(): + for key, data in contract["detail"].items(): f.publish(client, f"errorMsg/{key}", str(data)) + f.log("-- Stop import --") else: f.publish(client, f"error", str(0)) @@ -219,21 +347,29 @@ def run(): for key, data in contract_data.items(): if key == "last_activation_date": last_activation_date = data + if key == "offpeak_hours": + offpeak_hours = data if ha_autodiscovery == True: ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=key, value=data) if addresses == True: - f.log("####################################################################################") + f.logLine() f.log("Get Addresses :") - addr.getAddresses(client, cur) + addresse = addr.getAddresses(client, con, cur) + if "error_code" in addresse: + f.publish(client, f"addresses/error", str(1)) + for key, data in addresse["detail"].items(): + f.publish(client, f"addresses/errorMsg/{key}", str(data)) + else: + f.publish(client, f"addresses/error", str(0)) if get_consumption == True: - f.log("####################################################################################") + f.logLine() f.log("Get Consumption :") - # ha_discovery_consumption = c.dailyConsumption(cur, client, last_activation_date) - ha_discovery_consumption = day.getDaily(cur, client, "consumption", last_activation_date) + ha_discovery_consumption = day.getDaily(cur, con, client, "consumption", last_activation_date) + # pprint(ha_discovery_consumption) if ha_autodiscovery == True: - f.log("####################################################################################") + f.logLine() f.log("Home Assistant auto-discovery (Consumption) :") for pdl, data in ha_discovery_consumption.items(): for name, sensor_data in data.items(): @@ -253,17 +389,49 @@ def run(): state_class = sensor_data['state_class'] else: state_class = None - ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'], - attributes=attributes, unit_of_meas=unit_of_meas, - device_class=device_class, state_class=state_class) + if "value" in sensor_data: + ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'], + attributes=attributes, unit_of_meas=unit_of_meas, + device_class=device_class, state_class=state_class) + # f.logLine() + + if get_consumption_detail == True: + f.log("Get Consumption Detail:") + ha_discovery_consumption = detail.getDetail(cur, con, client, "consumption", last_activation_date, offpeak_hours) + if ha_autodiscovery == True: + f.logLine() + f.log("Home Assistant auto-discovery (Consumption Detail) :") + for pdl, data in ha_discovery_consumption.items(): + for name, sensor_data in data.items(): + if "attributes" in sensor_data: + attributes = sensor_data['attributes'] + else: + attributes = None + if "unit_of_meas" in sensor_data: + unit_of_meas = sensor_data['unit_of_meas'] + else: + unit_of_meas = None + if "device_class" in sensor_data: + device_class = sensor_data['device_class'] + else: + device_class = None + if "state_class" in sensor_data: + state_class = sensor_data['state_class'] + else: + state_class = None + if "value" in sensor_data: + ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'], + attributes=attributes, unit_of_meas=unit_of_meas, + device_class=device_class, state_class=state_class) + + # f.logLine() if get_production == True: - f.log("####################################################################################") + f.logLine() f.log("Get production :") - # ha_discovery_production = p.dailyProduction(cur, client, last_activation_date) - ha_discovery_production = day.getDaily(cur, client, "production", last_activation_date) + ha_discovery_production = day.getDaily(cur, con, client, "production", last_activation_date) if ha_autodiscovery == True: - f.log("####################################################################################") + f.logLine() f.log("Home Assistant auto-discovery (Production) :") for pdl, data in ha_discovery_production.items(): for name, sensor_data in data.items(): @@ -286,11 +454,43 @@ def run(): ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'], attributes=attributes, unit_of_meas=unit_of_meas, device_class=device_class, state_class=state_class) + # f.logLine() + + if get_production_detail == True: + f.logLine() + f.log("Get production Detail:") + ha_discovery_consumption = detail.getDetail(cur, con, client, "production", last_activation_date, offpeak_hours) + if ha_autodiscovery == True: + f.logLine() + f.log("Home Assistant auto-discovery (Production Detail) :") + for pdl, data in ha_discovery_consumption.items(): + for name, sensor_data in data.items(): + if "attributes" in sensor_data: + attributes = sensor_data['attributes'] + else: + attributes = None + if "unit_of_meas" in sensor_data: + unit_of_meas = sensor_data['unit_of_meas'] + else: + unit_of_meas = None + if "device_class" in sensor_data: + device_class = sensor_data['device_class'] + else: + device_class = None + if "state_class" in sensor_data: + state_class = sensor_data['state_class'] + else: + state_class = None + if "value" in sensor_data: + ha.haAutodiscovery(client=client, type="sensor", pdl=pdl, name=name, value=sensor_data['value'], + attributes=attributes, unit_of_meas=unit_of_meas, + device_class=device_class, state_class=state_class) + query = f"SELECT * FROM consumption_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date" rows = con.execute(query) if rows.fetchone() is not None: - f.log("####################################################################################") + f.logLine() f.log(f"Consumption data not found on enedis (after {fail_count} retry) :") # pprint(rows.fetchall()) for row in rows: @@ -299,7 +499,7 @@ def run(): query = f"SELECT * FROM production_daily WHERE pdl == '{pdl}' AND fail > {fail_count} ORDER BY date" rows = con.execute(query) if rows.fetchone() is not None: - f.log("####################################################################################") + f.logLine() f.log(f"Production data not found on enedis (after {fail_count} retry) :") # pprint(rows.fetchall()) for row in rows: