15 Commits

Author SHA1 Message Date
Nils Reiners
98302b9af5 heat pump slave added 2025-09-28 20:21:54 +02:00
Nils Reiners
f3de1f9280 mode as binary 2025-09-25 21:45:09 +02:00
Nils Reiners
ecd0180483 debug 2025-09-25 21:30:42 +02:00
Nils Reiners
1784b7c283 storing sg ready mode to db 2025-09-25 21:24:45 +02:00
Nils Reiners
b066658eb0 controller implemented and tested 2025-09-25 21:16:51 +02:00
Nils Reiners
0bcf8a2d8c inverter and meter seems to run 2025-09-18 14:14:53 +02:00
Nils Reiners
397935f51a minor changes 2025-09-16 22:55:13 +02:00
Nils Reiners
8eda3bc954 reading out registers corrected 2025-09-16 22:46:42 +02:00
Nils Reiners
b9cba11be7 cleaned up 2025-09-16 12:57:37 +02:00
Nils Reiners
5319a299be inverter was included 2025-09-16 12:52:27 +02:00
Nils Reiners
2186c4d7db wechselrichter zum tesent eingebunden 2025-09-14 10:52:50 +02:00
Nils Reiners
7df61fd6c1 shelly upgedatet 2025-05-26 21:31:28 +02:00
Nils Reiners
0734f7a810 shelly hinzugefügt 2025-05-26 21:08:16 +02:00
Nils Reiners
65a75e061b läuft 2025-04-26 22:31:14 +01:00
Nils Reiners
974ec43f10 influx data base added 2025-04-26 23:13:22 +02:00
34 changed files with 585 additions and 238 deletions

178
.gitignore vendored
View File

@@ -1,178 +0,0 @@
# Allmende EMS specifics
terminal_log
modbus_log.csv
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc

3
.idea/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

View File

@@ -0,0 +1,15 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="2">
<item index="0" class="java.lang.String" itemvalue="pandas" />
<item index="1" class="java.lang.String" itemvalue="Pyomo" />
</list>
</value>
</option>
</inspection_tool>
</profile>
</component>

View File

@@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

6
.idea/misc.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 (waermepumpen_logger)" />
</component>
</project>

8
.idea/modules.xml generated Normal file
View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/waermepumpen_logger.iml" filepath="$PROJECT_DIR$/.idea/waermepumpen_logger.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

8
.idea/waermepumpen_logger.iml generated Normal file
View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.12 (waermepumpen_logger)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

38
README
View File

@@ -11,10 +11,42 @@ Was needs to be done on the Raspberry pi before the tool can run.
- pip install -r requirements.txt
How to run the script:
3) How to run the script for testing:
- nohup python main.py > terminal_log 2>&1 &
nohup python main.py > terminal_log 2>&1 &
For reading out the terminal_log while script is runing:
- tail -f terminal_log
tail -f terminal_log
4) Implement and run the ems as systemd service:
create:
/etc/systemd/system/allmende_ems.service
insert:
[Unit]
Description=Allmende EMS Python Script
After=network.target
[Service]
WorkingDirectory=/home/pi/projects/allmende_ems
ExecStart=/home/pi/allmende_ems/bin/python3.11 /home/pi/projects/allmende_ems/main.py
Restart=always
RestartSec=5
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
manage the service with the following commands:
Once:
sudo systemctl daemon-reload
sudo systemctl start allmende_ems.service
sudo systemctl enable allmende_ems.service
While running:
sudo systemctl status allmende_ems.service
sudo systemctl restart allmende_ems.service
sudo systemctl stop allmende_ems.service
journalctl -u allmende_ems.service

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,46 +0,0 @@
import csv
import os
import tempfile
import shutil
class DataBaseCsv:
def __init__(self, filename: str):
self.filename = filename
def store_data(self, data: dict):
new_fields = list(data.keys())
# If file does not exist or is empty → create new file with header
if not os.path.exists(self.filename) or os.path.getsize(self.filename) == 0:
with open(self.filename, mode='w', newline='') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=new_fields)
writer.writeheader()
writer.writerow(data)
return
# If file exists → read existing header and data
with open(self.filename, mode='r', newline='') as csv_file:
reader = csv.DictReader(csv_file)
existing_fields = reader.fieldnames
existing_data = list(reader)
# Merge old and new fields (keep original order, add new ones)
all_fields = existing_fields.copy()
for field in new_fields:
if field not in all_fields:
all_fields.append(field)
# Write to a temporary file with updated header
with tempfile.NamedTemporaryFile(mode='w', delete=False, newline='', encoding='utf-8') as tmp_file:
writer = csv.DictWriter(tmp_file, fieldnames=all_fields)
writer.writeheader()
# Write old rows with updated field list
for row in existing_data:
writer.writerow({field: row.get(field, '') for field in all_fields})
# Write new data row
writer.writerow({field: data.get(field, '') for field in all_fields})
# Replace original file with updated temporary file
shutil.move(tmp_file.name, self.filename)

28
data_base_influx.py Normal file
View File

@@ -0,0 +1,28 @@
from influxdb_client import InfluxDBClient, Point, WritePrecision
from datetime import datetime
class DataBaseInflux:
def __init__(self, url: str, token: str, org: str, bucket: str):
self.url = url
self.token = token
self.org = org
self.bucket = bucket
self.client = InfluxDBClient(url=self.url, token=self.token, org=self.org)
self.write_api = self.client.write_api()
def store_data(self, device_name: str, data: dict):
measurement = device_name # Fest auf "messungen" gesetzt
point = Point(measurement)
# Alle Key/Value-Paare als Fields speichern
for key, value in data.items():
point = point.field(key, value)
# Zeitstempel automatisch auf jetzt setzen
point = point.time(datetime.utcnow(), WritePrecision.NS)
# Punkt in InfluxDB schreiben
self.write_api.write(bucket=self.bucket, org=self.org, record=point)

25
energysystem.py Normal file
View File

@@ -0,0 +1,25 @@
class EnergySystem():
def __init__(self):
self.components = []
def add_components(self, *args):
for comp in args:
self.components.append(comp)
def get_state_and_store_to_database(self, db):
state = {}
for comp in self.components:
component_state = comp.get_state()
state[comp.device_name] = component_state
db.store_data(comp.device_name, component_state)
return state
def get_component_by_name(self, name):
for comp in self.components:
if comp.device_name == name:
return comp

View File

@@ -3,15 +3,17 @@ import pandas as pd
import time
class HeatPump:
def __init__(self, ip_address: str):
def __init__(self, device_name: str, ip_address: str, port: int=502):
self.device_name = device_name
self.ip = ip_address
self.port = port
self.client = None
self.connect_to_modbus()
self.registers = None
self.get_registers()
def connect_to_modbus(self):
port = 502
port = self.port
self.client = ModbusTcpClient(self.ip, port=port)
try:
if not self.client.connect():
@@ -25,7 +27,7 @@ class HeatPump:
def get_registers(self):
# Excel-Datei mit den Input-Registerinformationen
excel_path = "data/ModBus TCPIP 1.17(1).xlsx"
excel_path = "modbus_registers/heat_pump_registers.xlsx"
xls = pd.ExcelFile(excel_path)
df_input_registers = xls.parse('04 Input Register')
@@ -42,7 +44,7 @@ class HeatPump:
for _, row in df_clean.iterrows()
}
def get_data(self):
def get_state(self):
data = {}
data['Zeit'] = time.strftime('%Y-%m-%d %H:%M:%S')
for address, info in self.registers.items():

41
main.py
View File

@@ -1,17 +1,46 @@
import time
from datetime import datetime
from data_base_csv import DataBaseCsv
from data_base_influx import DataBaseInflux
from heat_pump import HeatPump
from pv_inverter import PvInverter
from solaredge_meter import SolaredgeMeter
from shelly_pro_3m import ShellyPro3m
from energysystem import EnergySystem
from sg_ready_controller import SgReadyController
interval = 10 # z.B. alle 10 Sekunden
# For dev-System run in terminal: ssh -N -L 127.0.0.1:8111:10.0.0.10:502 pi@192.168.1.146
# For productive-System change IP-adress in heatpump to '10.0.0.10' and port to 502
db = DataBaseCsv('modbus_log.csv')
hp = HeatPump(ip_address='10.0.0.10')
interval_seconds = 10
es = EnergySystem()
db = DataBaseInflux(
url="http://192.168.1.146:8086",
token="Cw_naEZyvJ3isiAh1P4Eq3TsjcHmzzDFS7SlbKDsS6ZWL04fMEYixWqtNxGThDdG27S9aW5g7FP9eiq5z1rsGA==",
org="allmende",
bucket="allmende_db"
)
hp_master = HeatPump(device_name='hp_master', ip_address='10.0.0.10', port=502)
hp_slave = HeatPump(device_name='hp_slave', ip_address='10.0.0.11', port=502)
shelly = ShellyPro3m(device_name='wohnung_2_6', ip_address='192.168.1.121')
wr = PvInverter(device_name='solaredge_master', ip_address='192.168.1.112')
meter = SolaredgeMeter(device_name='solaredge_meter', ip_address='192.168.1.112')
es.add_components(hp_master, hp_slave, shelly, wr, meter)
controller = SgReadyController(es)
while True:
now = datetime.now()
if now.second % interval == 0 and now.microsecond < 100_000:
db.store_data(hp.get_data())
if now.second % interval_seconds == 0 and now.microsecond < 100_000:
state = es.get_state_and_store_to_database(db)
mode = controller.perform_action(heat_pump_name='hp_master', meter_name='solaredge_meter', state=state)
if mode == 'mode1':
mode_as_binary = 0
else:
mode_as_binary = 1
db.store_data('sg_ready', {'mode': mode_as_binary})
time.sleep(0.1)

Binary file not shown.

Binary file not shown.

139
pv_inverter.py Normal file
View File

@@ -0,0 +1,139 @@
import time
import struct
import pandas as pd
from typing import Dict, Any, List, Tuple, Optional
from pymodbus.client import ModbusTcpClient
EXCEL_PATH = "modbus_registers/pv_inverter_registers.xlsx"
# Obergrenze: bis EXKLUSIVE 40206 (d.h. max. 40205)
MAX_ADDR_EXCLUSIVE = 40121
class PvInverter:
def __init__(self, device_name: str, ip_address: str, port: int = 502, unit: int = 1):
self.device_name = device_name
self.ip = ip_address
self.port = port
self.unit = unit
self.client: Optional[ModbusTcpClient] = None
self.registers: Dict[int, Dict[str, Any]] = {} # addr -> {"desc":..., "type":...}
self.connect_to_modbus()
self.load_registers(EXCEL_PATH)
# ---------- Verbindung ----------
def connect_to_modbus(self):
self.client = ModbusTcpClient(self.ip, port=self.port, timeout=3.0, retries=3)
if not self.client.connect():
print("❌ Verbindung zu Wechselrichter fehlgeschlagen.")
raise SystemExit(1)
print("✅ Verbindung zu Wechselrichter hergestellt.")
def close(self):
if self.client:
self.client.close()
self.client = None
# ---------- Register-Liste ----------
def load_registers(self, excel_path: str):
xls = pd.ExcelFile(excel_path)
df = xls.parse()
# Passe Spaltennamen hier an, falls nötig:
cols = ["MB Adresse", "Beschreibung", "Variabel Typ"]
df = df[cols].dropna()
df["MB Adresse"] = df["MB Adresse"].astype(int)
# 1) Vorab-Filter: nur Adressen < 40206 übernehmen
df = df[df["MB Adresse"] < MAX_ADDR_EXCLUSIVE]
self.registers = {
int(row["MB Adresse"]): {
"desc": str(row["Beschreibung"]).strip(),
"type": str(row["Variabel Typ"]).strip()
}
for _, row in df.iterrows()
}
# ---------- Low-Level Lesen ----------
def _try_read(self, fn_name: str, address: int, count: int) -> Optional[List[int]]:
fn = getattr(self.client, fn_name)
# pymodbus 3.8.x hat 'slave='; Fallbacks schaden nicht
for kwargs in (dict(address=address, count=count, slave=self.unit),
dict(address=address, count=count)):
try:
res = fn(**kwargs)
if res is None or (hasattr(res, "isError") and res.isError()):
continue
return res.registers
except TypeError:
continue
return None
def _read_any(self, address: int, count: int) -> Optional[List[int]]:
regs = self._try_read("read_holding_registers", address, count)
if regs is None:
regs = self._try_read("read_input_registers", address, count)
return regs
# ---------- Decoding ----------
@staticmethod
def _to_i16(u16: int) -> int:
return struct.unpack(">h", struct.pack(">H", u16))[0]
@staticmethod
def _to_f32_from_two(u16_hi: int, u16_lo: int, msw_first: bool = True) -> float:
b = struct.pack(">HH", u16_hi, u16_lo) if msw_first else struct.pack(">HH", u16_lo, u16_hi)
return struct.unpack(">f", b)[0]
# Hilfsfunktion: wie viele 16-Bit-Register braucht dieser Typ?
@staticmethod
def _word_count_for_type(rtype: str) -> int:
rt = (rtype or "").lower()
# Passe hier an deine Excel-Typen an:
if "uint32" in rt or "real" in rt or "float" in rt or "string(32)" in rt:
return 2
# Default: 1 Wort (z.B. int16/uint16)
return 1
def read_one(self, address_excel: int, rtype: str) -> Optional[float]:
"""
Liest einen Wert nach Typ ('INT' oder 'REAL' etc.).
Es werden ausschließlich Register < 40206 gelesen.
"""
addr = int(address_excel)
words = self._word_count_for_type(rtype)
# 2) Harte Grenze prüfen: höchstes angefasstes Register muss < 40206 sein
if addr + words - 1 >= MAX_ADDR_EXCLUSIVE:
# Überspringen, da der Lesevorgang die Grenze >= 40206 berühren würde
return None
if words == 2:
regs = self._read_any(addr, 2)
if not regs or len(regs) < 2:
return None
# Deine bisherige Logik interpretiert 2 Worte als Float32:
return self._to_f32_from_two(regs[0], regs[1])
else:
regs = self._read_any(addr, 1)
if not regs:
return None
return float(self._to_i16(regs[0]))
def get_state(self) -> Dict[str, Any]:
"""
Liest ALLE Register aus self.registers und gibt dict zurück.
Achtet darauf, dass keine Adresse (inkl. Mehrwort) >= 40206 gelesen wird.
"""
data = {"Zeit": time.strftime("%Y-%m-%d %H:%M:%S")}
for address, meta in sorted(self.registers.items()):
words = self._word_count_for_type(meta["type"])
# 3) Nochmals Schutz auf Ebene der Iteration:
if address + words - 1 >= MAX_ADDR_EXCLUSIVE:
continue
val = self.read_one(address, meta["type"])
if val is None:
continue
key = f"{address} - {meta['desc']}"
data[key] = val
return data

View File

@@ -1,3 +1,4 @@
pymodbus~=3.8.6
pandas
openpyxl
openpyxl
sshtunnel

65
sg_ready_controller.py Normal file
View File

@@ -0,0 +1,65 @@
from pymodbus.client import ModbusTcpClient
class SgReadyController():
def __init__(self, es):
self.es = es
def perform_action(self, heat_pump_name, meter_name, state):
hp = self.es.get_component_by_name(heat_pump_name)
meter_values = state[meter_name]
power_to_grid = meter_values['40206 - M_AC_Power'] * 10 ** meter_values['40210 - M_AC_Power_SF']
mode = None
if power_to_grid > 10000:
mode = 'mode2'
self.switch_sg_ready_mode(hp.ip, hp.port, mode)
elif power_to_grid < 0:
mode = 'mode1'
self.switch_sg_ready_mode(hp.ip, hp.port, mode)
return mode
def switch_sg_ready_mode(self, ip, port, mode):
"""
Register 300: 1=BUS 0= Hardware Kontakte
Register 301 & 302:
0-0= Kein Offset
0-1 Boiler und Heizung Offset
1-1 Boiler Offset + E-Einsatz Sollwert Erhöht
1-0 SG EVU Sperre
:param ip:
:param mode:
'mode1' = [True, False, False] => SG Ready deactivated
'mode2' = [True, False, True] => SG ready activated for heatpump only
'mode3' = [True, True, True] => SG ready activated for heatpump and heat rod
:return:
"""
client = ModbusTcpClient(ip, port=port)
if not client.connect():
print("Verbindung zur Wärmepumpe fehlgeschlagen.")
return
mode_code = None
if mode == 'mode1':
mode_code = [True, False, False]
elif mode == 'mode2':
mode_code = [True, False, True]
elif mode == 'mode3':
mode_code = [True, True, True]
else:
print('Uncorrect or no string for mode!')
try:
response_300 = client.write_coil(300, mode_code[0])
response_301 = client.write_coil(301, mode_code[1])
response_302 = client.write_coil(302, mode_code[2])
# Optional: Rückmeldungen prüfen
for addr, resp in zip([300, 301, 302], [response_300, response_301, response_302]):
if resp.isError():
print(f"Fehler beim Schreiben von Coil {addr}: {resp}")
else:
print(f"Coil {addr} erfolgreich geschrieben.")
finally:
client.close()

64
shelly_pro_3m.py Normal file
View File

@@ -0,0 +1,64 @@
import struct
from pymodbus.client import ModbusTcpClient
import pandas as pd
import time
class ShellyPro3m:
def __init__(self, device_name: str, ip_address: str, port: int=502):
self.device_name = device_name
self.ip = ip_address
self.port = port
self.client = None
self.connect_to_modbus()
self.registers = None
self.get_registers()
def connect_to_modbus(self):
port = self.port
self.client = ModbusTcpClient(self.ip, port=port)
try:
if not self.client.connect():
print("Verbindung zum Shelly-Logger fehlgeschlagen.")
exit(1)
print("Verbindung zum Shelly-Logger erfolgreich.")
except KeyboardInterrupt:
print("Beendet durch Benutzer (Ctrl+C).")
finally:
self.client.close()
def get_registers(self):
# Excel-Datei mit den Input-Registerinformationen
excel_path = "modbus_registers/shelly_pro_3m_registers.xlsx"
xls = pd.ExcelFile(excel_path)
df_input_registers = xls.parse()
# Relevante Spalten bereinigen
df_clean = df_input_registers[['MB Adresse', 'Beschreibung', 'Variabel Typ']].dropna()
df_clean['MB Adresse'] = df_clean['MB Adresse'].astype(int)
# Dictionary aus Excel erzeugen
self.registers = {
row['MB Adresse']: {
'desc': row['Beschreibung'],
'type': 'REAL' if row['Variabel Typ'] == 'REAL' else 'INT'
}
for _, row in df_clean.iterrows()
}
def get_state(self):
data = {}
data['Zeit'] = time.strftime('%Y-%m-%d %H:%M:%S')
for address, info in self.registers.items():
reg_type = info['type']
result = self.client.read_input_registers(address, count=2 if reg_type == 'REAL' else 1)
if result.isError():
print(f"Fehler beim Lesen von Adresse {address}: {result}")
continue
packed = struct.pack(">HH", result.registers[1], result.registers[0])
value = round(struct.unpack(">f", packed)[0], 2)
print(f"Adresse {address} - {info['desc']}: {value}")
data[f"{address} - {info['desc']}"] = value
return data

134
solaredge_meter.py Normal file
View File

@@ -0,0 +1,134 @@
import time
import struct
import pandas as pd
from typing import Dict, Any, List, Tuple, Optional
from pymodbus.client import ModbusTcpClient
EXCEL_PATH = "modbus_registers/pv_inverter_registers.xlsx"
# Obergrenze: bis EXKLUSIVE 40206 (d.h. max. 40205)
MIN_ADDR_INCLUSIVE = 40121
ADDRESS_SHIFT = 50
class SolaredgeMeter:
def __init__(self, device_name: str, ip_address: str, port: int = 502, unit: int = 1):
self.device_name = device_name
self.ip = ip_address
self.port = port
self.unit = unit
self.client: Optional[ModbusTcpClient] = None
self.registers: Dict[int, Dict[str, Any]] = {} # addr -> {"desc":..., "type":...}
self.connect_to_modbus()
self.load_registers(EXCEL_PATH)
# ---------- Verbindung ----------
def connect_to_modbus(self):
self.client = ModbusTcpClient(self.ip, port=self.port, timeout=3.0, retries=3)
if not self.client.connect():
print("❌ Verbindung zu Zähler fehlgeschlagen.")
raise SystemExit(1)
print("✅ Verbindung zu Zähler hergestellt.")
def close(self):
if self.client:
self.client.close()
self.client = None
# ---------- Register-Liste ----------
def load_registers(self, excel_path: str):
xls = pd.ExcelFile(excel_path)
df = xls.parse()
# Passe Spaltennamen hier an, falls nötig:
cols = ["MB Adresse", "Beschreibung", "Variabel Typ"]
df = df[cols].dropna()
df["MB Adresse"] = df["MB Adresse"].astype(int)
# 1) Vorab-Filter: nur Adressen < 40206 übernehmen
df = df[df["MB Adresse"] >= MIN_ADDR_INCLUSIVE]
self.registers = {
int(row["MB Adresse"]): {
"desc": str(row["Beschreibung"]).strip(),
"type": str(row["Variabel Typ"]).strip()
}
for _, row in df.iterrows()
}
# ---------- Low-Level Lesen ----------
def _try_read(self, fn_name: str, address: int, count: int) -> Optional[List[int]]:
fn = getattr(self.client, fn_name)
# pymodbus 3.8.x hat 'slave='; Fallbacks schaden nicht
shifted_addr = address + ADDRESS_SHIFT
for kwargs in (dict(address=shifted_addr, count=count, slave=self.unit),
dict(address=shifted_addr, count=count)):
try:
res = fn(**kwargs)
if res is None or (hasattr(res, "isError") and res.isError()):
continue
return res.registers
except TypeError:
continue
return None
def _read_any(self, address: int, count: int) -> Optional[List[int]]:
regs = self._try_read("read_holding_registers", address, count)
if regs is None:
regs = self._try_read("read_input_registers", address, count)
return regs
# ---------- Decoding ----------
@staticmethod
def _to_i16(u16: int) -> int:
return struct.unpack(">h", struct.pack(">H", u16))[0]
@staticmethod
def _to_f32_from_two(u16_hi: int, u16_lo: int, msw_first: bool = True) -> float:
b = struct.pack(">HH", u16_hi, u16_lo) if msw_first else struct.pack(">HH", u16_lo, u16_hi)
return struct.unpack(">f", b)[0]
# Hilfsfunktion: wie viele 16-Bit-Register braucht dieser Typ?
@staticmethod
def _word_count_for_type(rtype: str) -> int:
rt = (rtype or "").lower()
# Passe hier an deine Excel-Typen an:
if "uint32" in rt or "real" in rt or "float" in rt or "string(32)" in rt:
return 2
# Default: 1 Wort (z.B. int16/uint16)
return 1
def read_one(self, address_excel: int, rtype: str) -> Optional[float]:
"""
Liest einen Wert nach Typ ('INT' oder 'REAL' etc.).
Es werden ausschließlich Register < 40206 gelesen.
"""
addr = int(address_excel)
words = self._word_count_for_type(rtype)
if words == 2:
regs = self._read_any(addr, 2)
if not regs or len(regs) < 2:
return None
# Deine bisherige Logik interpretiert 2 Worte als Float32:
return self._to_f32_from_two(regs[0], regs[1])
else:
regs = self._read_any(addr, 1)
if not regs:
return None
return float(self._to_i16(regs[0]))
def get_state(self) -> Dict[str, Any]:
"""
Liest ALLE Register aus self.registers und gibt dict zurück.
Achtet darauf, dass keine Adresse (inkl. Mehrwort) >= 40206 gelesen wird.
"""
data = {"Zeit": time.strftime("%Y-%m-%d %H:%M:%S")}
for address, meta in sorted(self.registers.items()):
words = self._word_count_for_type(meta["type"])
val = self.read_one(address, meta["type"])
if val is None:
continue
key = f"{address} - {meta['desc']}"
data[key] = val
return data