Compare commits
4 Commits
standard_r
...
forecaster
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afbcd81310 | ||
|
|
fd87257f37 | ||
|
|
876115cf6e | ||
|
|
f0e7c1338b |
213
data_base_operations/transform_old_db_to_new.py
Normal file
213
data_base_operations/transform_old_db_to_new.py
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
import os, re, math, time
|
||||||
|
from datetime import datetime, timezone, timedelta
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from influxdb_client import InfluxDBClient, Point, WritePrecision
|
||||||
|
from influxdb_client.client.write_api import SYNCHRONOUS
|
||||||
|
from influxdb_client.rest import ApiException
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------
|
||||||
|
# CONFIG
|
||||||
|
# -----------------------
|
||||||
|
INFLUX_URL = "http://192.168.1.146:8086"
|
||||||
|
INFLUX_ORG = "allmende"
|
||||||
|
INFLUX_TOKEN = os.environ.get("INFLUX_TOKEN", "Cw_naEZyvJ3isiAh1P4Eq3TsjcHmzzDFS7SlbKDsS6ZWL04fMEYixWqtNxGThDdG27S9aW5g7FP9eiq5z1rsGA==")
|
||||||
|
|
||||||
|
SOURCE_BUCKET = "allmende_db"
|
||||||
|
TARGET_BUCKET = "allmende_db_v2"
|
||||||
|
|
||||||
|
MEASUREMENTS = [
|
||||||
|
"hp_master", "hp_slave", "pv_forecast", "sg_ready",
|
||||||
|
"solaredge_master", "solaredge_meter", "solaredge_slave", "wohnung_2_6"
|
||||||
|
]
|
||||||
|
|
||||||
|
START_DT = datetime(2025, 6, 1, tzinfo=timezone.utc)
|
||||||
|
STOP_DT = datetime.now(timezone.utc)
|
||||||
|
WINDOW = timedelta(days=1)
|
||||||
|
|
||||||
|
EXCEL_PATH = "../modbus_registers/heat_pump_registers.xlsx"
|
||||||
|
EXCEL_SHEET = "Register_Map"
|
||||||
|
|
||||||
|
BATCH_SIZE = 1000
|
||||||
|
MAX_RETRIES = 8
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------
|
||||||
|
# Helpers
|
||||||
|
# -----------------------
|
||||||
|
def normalize(s) -> str:
|
||||||
|
s = "" if s is None else str(s).strip()
|
||||||
|
return re.sub(r"\s+", " ", s)
|
||||||
|
|
||||||
|
def is_invalid_sentinel(v: float) -> bool:
|
||||||
|
return v in (-999.9, -999.0, 30000.0, 32767.0, 65535.0)
|
||||||
|
|
||||||
|
def ensure_bucket(client: InfluxDBClient, name: str):
|
||||||
|
bapi = client.buckets_api()
|
||||||
|
if bapi.find_bucket_by_name(name):
|
||||||
|
return
|
||||||
|
bapi.create_bucket(bucket_name=name, org=INFLUX_ORG, retention_rules=None)
|
||||||
|
|
||||||
|
def build_field_type_map_from_excel(path: str) -> dict[str, str]:
|
||||||
|
df = pd.read_excel(path, sheet_name=EXCEL_SHEET)
|
||||||
|
df = df[df["Register_Type"].astype(str).str.upper() == "IR"].copy()
|
||||||
|
df["Address"] = df["Address"].astype(int)
|
||||||
|
df["Description"] = df["Description"].fillna("").astype(str)
|
||||||
|
df["Tag_Name"] = df["Tag_Name"].fillna("").astype(str)
|
||||||
|
df["Data_Type"] = df["Data_Type"].fillna("").astype(str)
|
||||||
|
|
||||||
|
m: dict[str, str] = {}
|
||||||
|
for _, r in df.iterrows():
|
||||||
|
addr = int(r["Address"])
|
||||||
|
desc = normalize(r["Description"])
|
||||||
|
tag = normalize(r["Tag_Name"])
|
||||||
|
dtp = normalize(r["Data_Type"]).upper()
|
||||||
|
if tag:
|
||||||
|
m[tag] = dtp
|
||||||
|
old_key = normalize(f"{addr} - {desc}".strip(" -"))
|
||||||
|
if old_key:
|
||||||
|
m[old_key] = dtp
|
||||||
|
return m
|
||||||
|
|
||||||
|
def coerce_value_to_dtype(v, dtype: str):
|
||||||
|
if v is None:
|
||||||
|
return None
|
||||||
|
dtp = (dtype or "").upper()
|
||||||
|
|
||||||
|
if isinstance(v, (int, float)):
|
||||||
|
fv = float(v)
|
||||||
|
if math.isnan(fv) or math.isinf(fv):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if dtp in ("BOOL", "BOOLEAN"):
|
||||||
|
if isinstance(v, bool): return v
|
||||||
|
if isinstance(v, (int, float)): return bool(int(v))
|
||||||
|
return None
|
||||||
|
|
||||||
|
if dtp.startswith("INT") or dtp.startswith("UINT"):
|
||||||
|
if isinstance(v, bool): return int(v)
|
||||||
|
if isinstance(v, (int, float)): return int(float(v))
|
||||||
|
return None
|
||||||
|
|
||||||
|
if dtp.startswith("FLOAT") or dtp in ("DOUBLE",):
|
||||||
|
if isinstance(v, bool): return float(int(v))
|
||||||
|
if isinstance(v, (int, float)): return float(v)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def write_with_retry(write_api, batch):
|
||||||
|
delay = 1.0
|
||||||
|
last_msg = ""
|
||||||
|
for _ in range(MAX_RETRIES):
|
||||||
|
try:
|
||||||
|
write_api.write(bucket=TARGET_BUCKET, org=INFLUX_ORG, record=batch)
|
||||||
|
return
|
||||||
|
except ApiException as e:
|
||||||
|
last_msg = getattr(e, "body", "") or str(e)
|
||||||
|
status = getattr(e, "status", None)
|
||||||
|
if "timeout" in last_msg.lower() or status in (429, 500, 502, 503, 504):
|
||||||
|
time.sleep(delay)
|
||||||
|
delay = min(delay * 2, 30)
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
raise RuntimeError(f"Write failed after {MAX_RETRIES} retries: {last_msg}")
|
||||||
|
|
||||||
|
def window_already_migrated(query_api, measurement: str, start: datetime, stop: datetime) -> bool:
|
||||||
|
# Prüft: gibt es im Zielbucket im Fenster mindestens 1 Punkt?
|
||||||
|
flux = f'''
|
||||||
|
from(bucket: "{TARGET_BUCKET}")
|
||||||
|
|> range(start: time(v: "{start.isoformat()}"), stop: time(v: "{stop.isoformat()}"))
|
||||||
|
|> filter(fn: (r) => r._measurement == "{measurement}")
|
||||||
|
|> limit(n: 1)
|
||||||
|
'''
|
||||||
|
tables = query_api.query(flux, org=INFLUX_ORG)
|
||||||
|
for t in tables:
|
||||||
|
if t.records:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def migrate_window(query_api, write_api, measurement: str,
|
||||||
|
start: datetime, stop: datetime,
|
||||||
|
type_map: dict[str, str],
|
||||||
|
do_type_cast: bool) -> int:
|
||||||
|
flux = f'''
|
||||||
|
from(bucket: "{SOURCE_BUCKET}")
|
||||||
|
|> range(start: time(v: "{start.isoformat()}"), stop: time(v: "{stop.isoformat()}"))
|
||||||
|
|> filter(fn: (r) => r._measurement == "{measurement}")
|
||||||
|
|> keep(columns: ["_time","_measurement","_field","_value"])
|
||||||
|
'''
|
||||||
|
tables = query_api.query(flux, org=INFLUX_ORG)
|
||||||
|
|
||||||
|
batch, written = [], 0
|
||||||
|
for table in tables:
|
||||||
|
for rec in table.records:
|
||||||
|
t = rec.get_time()
|
||||||
|
field = normalize(rec.get_field())
|
||||||
|
value = rec.get_value()
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if do_type_cast:
|
||||||
|
dtp = type_map.get(field)
|
||||||
|
if dtp:
|
||||||
|
cv = coerce_value_to_dtype(value, dtp)
|
||||||
|
if cv is None:
|
||||||
|
continue
|
||||||
|
if isinstance(cv, (int, float)) and is_invalid_sentinel(float(cv)):
|
||||||
|
continue
|
||||||
|
value = cv
|
||||||
|
# kein Mapping -> unverändert schreiben
|
||||||
|
|
||||||
|
batch.append(Point(measurement).field(field, value).time(t, WritePrecision.NS))
|
||||||
|
|
||||||
|
if len(batch) >= BATCH_SIZE:
|
||||||
|
write_with_retry(write_api, batch)
|
||||||
|
written += len(batch)
|
||||||
|
batch = []
|
||||||
|
|
||||||
|
if batch:
|
||||||
|
write_with_retry(write_api, batch)
|
||||||
|
written += len(batch)
|
||||||
|
|
||||||
|
return written
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------
|
||||||
|
# Main
|
||||||
|
# -----------------------
|
||||||
|
def main():
|
||||||
|
if not INFLUX_TOKEN:
|
||||||
|
raise RuntimeError("INFLUX_TOKEN fehlt (Env-Var INFLUX_TOKEN setzen).")
|
||||||
|
|
||||||
|
with InfluxDBClient(url=INFLUX_URL, token=INFLUX_TOKEN, org=INFLUX_ORG, timeout=900_000) as client:
|
||||||
|
ensure_bucket(client, TARGET_BUCKET)
|
||||||
|
|
||||||
|
type_map = build_field_type_map_from_excel(EXCEL_PATH)
|
||||||
|
query_api = client.query_api()
|
||||||
|
write_api = client.write_api(write_options=SYNCHRONOUS)
|
||||||
|
|
||||||
|
for meas in MEASUREMENTS:
|
||||||
|
do_cast = meas in ("hp_master", "hp_slave")
|
||||||
|
cur, total = START_DT, 0
|
||||||
|
print(f"\n== {meas} (cast={'ON' if do_cast else 'OFF'}) ==")
|
||||||
|
|
||||||
|
while cur < STOP_DT:
|
||||||
|
nxt = min(cur + WINDOW, STOP_DT)
|
||||||
|
|
||||||
|
if window_already_migrated(query_api, meas, cur, nxt):
|
||||||
|
print(f"{cur.isoformat()} -> {nxt.isoformat()} : SKIP (existiert schon)")
|
||||||
|
cur = nxt
|
||||||
|
continue
|
||||||
|
|
||||||
|
n = migrate_window(query_api, write_api, meas, cur, nxt, type_map, do_cast)
|
||||||
|
total += n
|
||||||
|
print(f"{cur.isoformat()} -> {nxt.isoformat()} : {n} (gesamt {total})")
|
||||||
|
cur = nxt
|
||||||
|
|
||||||
|
print(f"== Fertig {meas}: {total} Punkte ==")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
12
heat_pump.py
12
heat_pump.py
@@ -7,7 +7,7 @@ import math
|
|||||||
|
|
||||||
class HeatPump:
|
class HeatPump:
|
||||||
def __init__(self, device_name: str, ip_address: str, port: int = 502,
|
def __init__(self, device_name: str, ip_address: str, port: int = 502,
|
||||||
excel_path: str = "modbus_registers/heat_pump_registers_modbus.xlsx",
|
excel_path: str = "modbus_registers/heat_pump_registers.xlsx",
|
||||||
sheet_name: str = "Register_Map"):
|
sheet_name: str = "Register_Map"):
|
||||||
self.device_name = device_name
|
self.device_name = device_name
|
||||||
self.ip = ip_address
|
self.ip = ip_address
|
||||||
@@ -160,14 +160,10 @@ class HeatPump:
|
|||||||
# Optional filter
|
# Optional filter
|
||||||
# if self._is_invalid_sentinel(value):
|
# if self._is_invalid_sentinel(value):
|
||||||
# continue
|
# continue
|
||||||
|
value = float(value)
|
||||||
desc = meta.get("desc") or ""
|
desc = meta.get("desc") or ""
|
||||||
label = f"{address} - {desc}".strip(" -")
|
field_name = f"{address} - {desc}".strip(" -")
|
||||||
|
data[field_name] = float(value)
|
||||||
data[label] = value
|
|
||||||
tag = meta.get("tag")
|
|
||||||
if tag:
|
|
||||||
data[tag] = value
|
|
||||||
|
|
||||||
print(f"Adresse {address} - {desc}: {value}")
|
print(f"Adresse {address} - {desc}: {value}")
|
||||||
|
|
||||||
|
|||||||
2
main.py
2
main.py
@@ -23,7 +23,7 @@ db = DataBaseInflux(
|
|||||||
url="http://192.168.1.146:8086",
|
url="http://192.168.1.146:8086",
|
||||||
token="Cw_naEZyvJ3isiAh1P4Eq3TsjcHmzzDFS7SlbKDsS6ZWL04fMEYixWqtNxGThDdG27S9aW5g7FP9eiq5z1rsGA==",
|
token="Cw_naEZyvJ3isiAh1P4Eq3TsjcHmzzDFS7SlbKDsS6ZWL04fMEYixWqtNxGThDdG27S9aW5g7FP9eiq5z1rsGA==",
|
||||||
org="allmende",
|
org="allmende",
|
||||||
bucket="allmende_db"
|
bucket="allmende_db_v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
hp_master = HeatPump(device_name='hp_master', ip_address='10.0.0.10', port=502)
|
hp_master = HeatPump(device_name='hp_master', ip_address='10.0.0.10', port=502)
|
||||||
|
|||||||
Reference in New Issue
Block a user