From bf4b753c405d5968ad182dd83cd7d259da535e1d Mon Sep 17 00:00:00 2001 From: valsr Date: Fri, 21 Jul 2023 18:20:33 -0400 Subject: [PATCH 1/3] Check _attributes for None before accessing --- homeassistant2influxdb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant2influxdb.py b/homeassistant2influxdb.py index b790e8f..4363f95 100755 --- a/homeassistant2influxdb.py +++ b/homeassistant2influxdb.py @@ -221,7 +221,7 @@ def main(): continue # collect statistics (remove this code block to speed up processing slightly) - if "friendly_name" in _attributes: + if _attributes is not None and "friendly_name" in _attributes: friendly_name = _attributes["friendly_name"] if _entity_id not in statistics: From 7f5619a2cfb81c34440b1a8f218dba425d3afcfd Mon Sep 17 00:00:00 2001 From: valsr Date: Fri, 21 Jul 2023 18:23:15 -0400 Subject: [PATCH 2/3] Updated query for recent schema change (2023.6.3+) --- homeassistant2influxdb.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/homeassistant2influxdb.py b/homeassistant2influxdb.py index 4363f95..f66166b 100755 --- a/homeassistant2influxdb.py +++ b/homeassistant2influxdb.py @@ -289,23 +289,26 @@ def formulate_sql_query(table: str, arg_tables: str): if table == "states": # Using two different SQL queries in a Union to support data made with older HA db schema: # https://github.com/home-assistant/core/pull/71165 - sql_query = """select SQL_NO_CACHE states.entity_id, + sql_query = f"""select SQL_NO_CACHE states_meta.entity_id, states.state, states.attributes, events.event_type as event_type, - events.time_fired as time_fired + FROM_UNIXTIME(states.last_updated_ts) as time_fired from states, - events - where events.event_id = states.event_id + events, + states_meta + where events.event_id = states.event_id and states_meta.metadata_id = states.metadata_id and states.attributes is not null UNION - select states.entity_id, + select states_meta.entity_id, states.state, state_attributes.shared_attrs as attributes, 'state_changed', - states.last_updated as time_fired - from states, state_attributes + FROM_UNIXTIME(states.last_updated_ts) as time_fired + from states, state_attributes, states_meta where event_id is null - and states.attributes_id = state_attributes.attributes_id;""" + and states.attributes_id = state_attributes.attributes_id + and states_meta.metadata_id = states.metadata_id + and state_attributes.shared_attrs is not null""" elif table == "statistics": if arg_tables == 'both': # If we're adding both, we should not add statistics for the same time period we're adding events @@ -320,7 +323,7 @@ def formulate_sql_query(table: str, arg_tables: str): statistics.max, state_attributes.shared_attrs, 'state_changed', - statistics.start + {"datetime(statistics.start_ts, 'unixepoch', 'localtime')" if is_mysql else "FROM_UNIXTIME(statistics.start_ts)"} as time_fired FROM statistics_meta, statistics, state_attributes From e6187f1ba601f19aee88ec3e42a142c35254e854 Mon Sep 17 00:00:00 2001 From: valsr Date: Sat, 22 Jul 2023 12:20:15 -0400 Subject: [PATCH 3/3] Updated datetme fix to work with either sqlite or mysql --- homeassistant2influxdb.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/homeassistant2influxdb.py b/homeassistant2influxdb.py index f66166b..2df0358 100755 --- a/homeassistant2influxdb.py +++ b/homeassistant2influxdb.py @@ -124,7 +124,8 @@ def main(): influx = get_influx_connection(influx_config, test_write=True, test_read=True) converter = _generate_event_to_json(influx_config) - if args.type == "MySQL" or args.type == "MariaDB": + is_mysql = args.type == "MySQL" or args.type == "MariaDB" + if is_mysql: # connect to MySQL/MariaDB database connection = mysql_connect(host=args.host, user=args.user, @@ -168,7 +169,7 @@ def main(): # map to count names and number of measurements for each entity statistics = {} # Execute correct query for table - sql_query = formulate_sql_query(table, args.table) + sql_query = formulate_sql_query(table, args.table, is_mysql) cursor = connection.cursor() cursor.execute(sql_query) @@ -281,7 +282,7 @@ def get_tables(table_key: str) -> list: print("ERROR: argument --table should be \"states\" or \"statistics\"") -def formulate_sql_query(table: str, arg_tables: str): +def formulate_sql_query(table: str, arg_tables: str, is_mysql: bool): """ Retrieves data from the HA databse """ @@ -293,7 +294,7 @@ def formulate_sql_query(table: str, arg_tables: str): states.state, states.attributes, events.event_type as event_type, - FROM_UNIXTIME(states.last_updated_ts) as time_fired + {"FROM_UNIXTIME(states.last_updated_ts)" if is_mysql else "datetime(states.last_updated_ts, 'unixepoch', 'localtime')"} as time_fired from states, events, states_meta @@ -303,7 +304,7 @@ def formulate_sql_query(table: str, arg_tables: str): states.state, state_attributes.shared_attrs as attributes, 'state_changed', - FROM_UNIXTIME(states.last_updated_ts) as time_fired + {"datetime(states.last_updated_ts, 'unixepoch', 'localtime')" if is_mysql else "FROM_UNIXTIME(states.last_updated_ts)"} as time_fired from states, state_attributes, states_meta where event_id is null and states.attributes_id = state_attributes.attributes_id