Here is the essence of the python-code I used to import the values. I’m storing my data in mariaDB. But a similar procedure may be possible with other database types.
Note, that I didn’t manage yet, to also calculate the cost and compose values data. I’m not sure yet, what would be the simplest way to get these values.
My data from fronius is organized like showed below. I see the amount of power created or used within 5 minute time intervals.
#Advice: Stop the home assistant core, before you finally change the sql-data.
#Otherwise you may get some strange entries of current values
#Further, I set my purge-time to 1, so that I have minimal amount of entries in the short_term table
# Connect to MariaDB Platform1 - target table (Verify addon-port 3306 is enabled in the config on home-assistant)
# Module Imports
import mariadb
import sys
from datetime import datetime, timedelta
import pandas as pd
try:
conn2 = mariadb.connect(
user="<DBuser>",
password="<DBpassword>",
host="<serverip-address>",
port=3306,
database="homeassistant_db"
)
except mariadb.Error as e:
print(f"Error connecting to MariaDB Platform: {e}")
sys.exit(1)
# Get Cursor for SQL-queries
cur_t = conn2.cursor()
#update current entries in home-assistant database to use another metadata_id - so no entries are missing, if needed later
#don't forget to do this for all relevant sensors/metadata_ids that you want to manipulate
query="""Update statistics SET metadata_id = %s WHERE metadata_id = %s"""
metadata_id_curr=107
metadata_id_new=88107
record=(metadata_id_new, metadata_id_curr)
cur_t.execute(query,record)
conn2.commit()
#same for short_term table
query="""Update statistics_short_term SET metadata_id = %s WHERE metadata_id = %s"""
metadata_id_curr=107
metadata_id_new=88107
record=(metadata_id_new, metadata_id_curr)
cur_t.execute(query,record)
conn2.commit()
df_set=pd.read_excel(r"<pathToData>\2020_2022_PV_Daten.xlsx", parse_dates=[0], index_col=0,skiprows=[1] )
#Resample 5-minute measurement-values to hourly samples - sum up the values
#Sensors for consumed and produced energy are transformed into a cumulative sum, to simulate values of a meter reader
#Inverter data is transformed to values with a daily reset
energy_real_consumed_fronius_meter=pd.DataFrame()
df=df_set.resample('1H')['Energie vom Netz bezogen'].sum()
energy_real_consumed_fronius_meter['states']=df.cumsum()
energy_real_produced_fronius_meter=pd.DataFrame()
df=df_set.resample('1H')['Energie ins Netz eingespeist'].sum()
energy_real_produced_fronius_meter['states']=df.cumsum()
energy_day_fronius_inverter=pd.DataFrame()
df=df_set.resample('1H')['Energie | Symo 6.0-3-M (1)'].sum()
energy_day_fronius_inverter['states']=df.groupby(df.index.date).cumsum()
#The offset is set, to simulate a meter reader start at a certain start-time - e.g. mine was at 1940.26141 a few years ago
#Just calculate the current state - sum of produced or consumed power
offset=1940.26141
prevState=offset
prevSum=0
#counter for amount of values that have been imported
count=0
#metadata_id of the corresponding sensor in the statistics_meta table
q_metadata_id=105
query="""INSERT INTO statistics (created,start,state,sum,metadata_id) VALUES (%s,%s, %s, %s,%s)"""
for i in energy_real_produced_fronius_meter.index:
#create datetime is set to start datetime + 1h
#start datetime is set to table-entries -2h, due to timezone-difference. I think home assistant internally always uses GMT+0 in the database
#q_state is the total_increasing value of the meter reader
q_created=(pd.to_datetime(i, unit='us')+timedelta(hours=-1,seconds=+1 )).strftime('%Y-%m-%d %H:%M:%S')
q_start=(pd.to_datetime(i, unit='us')+timedelta(hours=-2)).strftime('%Y-%m-%d %H:%M:%S')
q_state=offset+energy_real_produced_fronius_meter['states'][i]/1000
#calculate delta of state to previous state for total increasing sum
if q_state-prevState>=0:
q_sum=prevSum+q_state-prevState
prevState=q_state
prevSum=q_sum
else:
q_sum=prevSum
prevState=q_state
record=(q_created,q_start,q_state,q_sum,q_metadata_id)
#fire query
cur_t.execute(query,record)
conn2.commit()
count=count+1
print(str(count)+ " datasets inserted" )
#Offset for second meter reader
offset=3818.66521
prevState=offset
prevSum=0
#...similar code for the second total increasing meter...
#for the daily reset values
deltaState=0
prevState=0
prevSum=0
from datetime import datetime, timedelta
count=0
q_metadata_id=96
query="""INSERT INTO statistics (created,start,state,sum,metadata_id) VALUES (%s,%s, %s, %s,%s)"""
for i in energy_day_fronius_inverter.index:
q_created=(pd.to_datetime(i, unit='us')+timedelta(hours=-1,seconds=+1 )).strftime('%Y-%m-%d %H:%M:%S')
q_start=(pd.to_datetime(i, unit='us')+timedelta(hours=-2)).strftime('%Y-%m-%d %H:%M:%S')
#notice, that the state has no offset here, the first entry starts with zero
q_state=round(energy_day_fronius_inverter['states'][i]/1000,3)
if q_state-prevState>=0:
q_sum=prevSum+q_state-prevState
prevState=q_state
prevSum=q_sum
else:
q_sum=prevSum
prevState=q_state
record=(q_created,q_start,q_state,q_sum,q_metadata_id)
cur_t.execute(query,record)
conn2.commit()
count=count+1
print(str(count)+ " datasets inserted" )
#finally, do some stuff with the short term statistics, that may be needed
#You can manually change the sum-values using phpMyAdmin before you change the needed values back. (Not more than 12 entries per sensor, if you set the purge time to 1)
#Or you may decide to forget them forever - then you can safely ignore this and miss 1 day of data.
query="""Update statistics_short_term SET metadata_id = %s WHERE metadata_id = %s"""
metadata_id_curr=88107
metadata_id_new=107
record=(metadata_id_new, metadata_id_curr)
cur_t.execute(query,record)
conn2.commit()