Compare commits
8 Commits
98302b9af5
...
standard_r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8642a057f0 | ||
|
|
ce14d59d51 | ||
|
|
4727364048 | ||
|
|
666eb211a3 | ||
|
|
ba6ff9f6c3 | ||
|
|
9ccb1e042b | ||
|
|
a5bcfca39a | ||
|
|
a1f9e29134 |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,7 @@
|
||||
from heat_pump import HeatPump
|
||||
|
||||
hp_master = HeatPump(device_name='hp_master', ip_address='10.0.0.10', port=502, excel_path="../modbus_registers/heat_pump_registers.xlsx")
|
||||
|
||||
state = hp_master.get_state()
|
||||
|
||||
print(state)
|
||||
49
component_test_connectors/heat_pump_connection_sg_ready.py
Normal file
49
component_test_connectors/heat_pump_connection_sg_ready.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from pymodbus.client import ModbusTcpClient
|
||||
|
||||
def switch_sg_ready_mode(ip, port, mode):
|
||||
"""
|
||||
Register 300: 1=BUS 0= Hardware Kontakte
|
||||
Register 301 & 302:
|
||||
0-0= Kein Offset
|
||||
0-1 Boiler und Heizung Offset
|
||||
1-1 Boiler Offset + E-Einsatz Sollwert Erhöht
|
||||
1-0 SG EVU Sperre
|
||||
:param ip:
|
||||
:param mode:
|
||||
'mode1' = [True, False, False] => SG Ready deactivated
|
||||
'mode2' = [True, False, True] => SG ready activated for heatpump only
|
||||
'mode3' = [True, True, True] => SG ready activated for heatpump and heat rod
|
||||
:return:
|
||||
"""
|
||||
client = ModbusTcpClient(ip, port=port)
|
||||
if not client.connect():
|
||||
print("Verbindung zur Wärmepumpe fehlgeschlagen.")
|
||||
return
|
||||
|
||||
mode_code = None
|
||||
if mode == 'mode1':
|
||||
mode_code = [True, False, False]
|
||||
elif mode == 'mode2':
|
||||
mode_code = [True, False, True]
|
||||
elif mode == 'mode3':
|
||||
mode_code = [True, True, True]
|
||||
else:
|
||||
print('Uncorrect or no string for mode!')
|
||||
|
||||
try:
|
||||
response_300 = client.write_coil(300, mode_code[0])
|
||||
response_301 = client.write_coil(301, mode_code[1])
|
||||
response_302 = client.write_coil(302, mode_code[2])
|
||||
|
||||
# Optional: Rückmeldungen prüfen
|
||||
for addr, resp in zip([300, 301, 302], [response_300, response_301, response_302]):
|
||||
if resp.isError():
|
||||
print(f"Fehler beim Schreiben von Coil {addr}: {resp}")
|
||||
else:
|
||||
print(f"Coil {addr} erfolgreich geschrieben.")
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
if '__name__' == '__main__':
|
||||
switch_sg_ready_mode(ip='10.0.0.10', port=502, mode='mode2')
|
||||
@@ -1,5 +1,7 @@
|
||||
from influxdb_client import InfluxDBClient, Point, WritePrecision
|
||||
from datetime import datetime
|
||||
import datetime as dt
|
||||
import pandas as pd
|
||||
|
||||
class DataBaseInflux:
|
||||
def __init__(self, url: str, token: str, org: str, bucket: str):
|
||||
@@ -25,4 +27,22 @@ class DataBaseInflux:
|
||||
# Punkt in InfluxDB schreiben
|
||||
self.write_api.write(bucket=self.bucket, org=self.org, record=point)
|
||||
|
||||
def store_forecasts(self, forecast_name: str, data: pd.Series):
|
||||
|
||||
measurement = forecast_name
|
||||
run_tag = dt.datetime.now(dt.timezone.utc).replace(second=0, microsecond=0).isoformat(timespec="minutes")
|
||||
|
||||
pts = []
|
||||
|
||||
series = pd.to_numeric(data, errors="coerce").dropna()
|
||||
|
||||
for ts, val in series.items():
|
||||
pts.append(
|
||||
Point(measurement)
|
||||
.tag("run", run_tag)
|
||||
.field("value", float(val))
|
||||
.time(ts.to_pydatetime(), WritePrecision.S)
|
||||
)
|
||||
|
||||
self.write_api.write(bucket=self.bucket, org=self.org, record=pts)
|
||||
|
||||
|
||||
BIN
forecaster/__pycache__/weather_forecaster.cpython-312.pyc
Normal file
BIN
forecaster/__pycache__/weather_forecaster.cpython-312.pyc
Normal file
Binary file not shown.
61
forecaster/weather_forecaster.py
Normal file
61
forecaster/weather_forecaster.py
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python3
|
||||
import time
|
||||
import datetime as dt
|
||||
import requests
|
||||
from zoneinfo import ZoneInfo
|
||||
from matplotlib import pyplot as plt
|
||||
import pandas as pd
|
||||
|
||||
TZ = "Europe/Berlin"
|
||||
DAYS = 2
|
||||
|
||||
OPEN_METEO_URL = "https://api.open-meteo.com/v1/forecast"
|
||||
|
||||
class WeatherForecaster:
|
||||
def __init__(self, latitude, longitude):
|
||||
self.lat = latitude
|
||||
self.lon = longitude
|
||||
|
||||
def get_hourly_forecast(self, start_hour, days):
|
||||
start_hour_local = start_hour
|
||||
end_hour_local = start_hour_local + dt.timedelta(days=days)
|
||||
|
||||
params = {
|
||||
"latitude": self.lat,
|
||||
"longitude": self.lon,
|
||||
"hourly": ["temperature_2m", "shortwave_radiation", "wind_speed_10m"],
|
||||
"timezone": TZ,
|
||||
"start_hour": start_hour_local.strftime("%Y-%m-%dT%H:%M"),
|
||||
"end_hour": end_hour_local.strftime("%Y-%m-%dT%H:%M")
|
||||
}
|
||||
|
||||
h = requests.get(OPEN_METEO_URL, params=params).json()["hourly"]
|
||||
|
||||
time_stamps = h["time"]
|
||||
time_stamps = [
|
||||
dt.datetime.fromisoformat(t).replace(tzinfo=ZoneInfo(TZ))
|
||||
for t in time_stamps
|
||||
]
|
||||
|
||||
weather = pd.DataFrame(index=time_stamps)
|
||||
weather["ghi"] = h["shortwave_radiation"]
|
||||
weather["temp_air"] = h["temperature_2m"]
|
||||
weather["wind_speed"] = h["wind_speed_10m"]
|
||||
|
||||
return weather
|
||||
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
|
||||
weather_forecast = WeatherForecaster(latitude=48.041, longitude=7.862)
|
||||
while True:
|
||||
now = dt.datetime.now()
|
||||
secs = 60 - now.second #(60 - now.minute) * 60 - now.second # Sekunden bis volle Stunde
|
||||
time.sleep(secs)
|
||||
|
||||
now_local = dt.datetime.now()
|
||||
start_hour_local = (now_local + dt.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
|
||||
time_stamps, temps, ghi, wind_speed = weather_forecast.get_hourly_forecast(start_hour_local, DAYS)
|
||||
plt.plot(time_stamps, temps)
|
||||
plt.show()
|
||||
203
heat_pump.py
203
heat_pump.py
@@ -1,64 +1,177 @@
|
||||
from pymodbus.client import ModbusTcpClient
|
||||
import pandas as pd
|
||||
import time
|
||||
import struct
|
||||
import math
|
||||
|
||||
|
||||
class HeatPump:
|
||||
def __init__(self, device_name: str, ip_address: str, port: int=502):
|
||||
def __init__(self, device_name: str, ip_address: str, port: int = 502,
|
||||
excel_path: str = "modbus_registers/heat_pump_registers_modbus.xlsx",
|
||||
sheet_name: str = "Register_Map"):
|
||||
self.device_name = device_name
|
||||
self.ip = ip_address
|
||||
self.port = port
|
||||
self.client = None
|
||||
self.connect_to_modbus()
|
||||
self.registers = None
|
||||
self.get_registers()
|
||||
self.client = ModbusTcpClient(self.ip, port=self.port)
|
||||
|
||||
def connect_to_modbus(self):
|
||||
port = self.port
|
||||
self.client = ModbusTcpClient(self.ip, port=port)
|
||||
self.excel_path = excel_path
|
||||
self.sheet_name = sheet_name
|
||||
self.registers = self.get_registers()
|
||||
|
||||
# -------------
|
||||
# Connection
|
||||
# -------------
|
||||
def connect(self) -> bool:
|
||||
ok = self.client.connect()
|
||||
if not ok:
|
||||
print("Verbindung zur Wärmepumpe fehlgeschlagen.")
|
||||
return ok
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
if not self.client.connect():
|
||||
print("Verbindung zur Wärmepumpe fehlgeschlagen.")
|
||||
exit(1)
|
||||
print("Verbindung zur Wärmepumpe erfolgreich.")
|
||||
except KeyboardInterrupt:
|
||||
print("Beendet durch Benutzer (Ctrl+C).")
|
||||
finally:
|
||||
self.client.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def get_registers(self):
|
||||
# Excel-Datei mit den Input-Registerinformationen
|
||||
excel_path = "modbus_registers/heat_pump_registers.xlsx"
|
||||
xls = pd.ExcelFile(excel_path)
|
||||
df_input_registers = xls.parse('04 Input Register')
|
||||
# -------------
|
||||
# Excel parsing
|
||||
# -------------
|
||||
def get_registers(self) -> dict:
|
||||
df = pd.read_excel(self.excel_path, sheet_name=self.sheet_name)
|
||||
df = df[df["Register_Type"].astype(str).str.upper() == "IR"].copy()
|
||||
|
||||
# Relevante Spalten bereinigen
|
||||
df_clean = df_input_registers[['MB Adresse', 'Variable', 'Beschreibung', 'Variabel Typ']].dropna()
|
||||
df_clean['MB Adresse'] = df_clean['MB Adresse'].astype(int)
|
||||
df["Address"] = df["Address"].astype(int)
|
||||
df["Length"] = df["Length"].astype(int)
|
||||
df["Data_Type"] = df["Data_Type"].astype(str).str.upper()
|
||||
df["Byteorder"] = df["Byteorder"].astype(str).str.upper()
|
||||
|
||||
# Dictionary aus Excel erzeugen
|
||||
self.registers = {
|
||||
row['MB Adresse']: {
|
||||
'desc': row['Beschreibung'],
|
||||
'type': 'REAL' if row['Variabel Typ'] == 'REAL' else 'INT'
|
||||
df["Scaling"] = df.get("Scaling", 1.0)
|
||||
df["Scaling"] = df["Scaling"].fillna(1.0).astype(float)
|
||||
|
||||
df["Offset"] = df.get("Offset", 0.0)
|
||||
df["Offset"] = df["Offset"].fillna(0.0).astype(float)
|
||||
|
||||
regs = {}
|
||||
for _, row in df.iterrows():
|
||||
regs[int(row["Address"])] = {
|
||||
"length": int(row["Length"]),
|
||||
"data_type": row["Data_Type"],
|
||||
"byteorder": row["Byteorder"],
|
||||
"scaling": float(row["Scaling"]),
|
||||
"offset": float(row["Offset"]),
|
||||
"tag": str(row.get("Tag_Name", "")).strip(),
|
||||
"desc": "" if pd.isna(row.get("Description")) else str(row.get("Description")).strip(),
|
||||
}
|
||||
for _, row in df_clean.iterrows()
|
||||
}
|
||||
return regs
|
||||
|
||||
def get_state(self):
|
||||
data = {}
|
||||
data['Zeit'] = time.strftime('%Y-%m-%d %H:%M:%S')
|
||||
for address, info in self.registers.items():
|
||||
reg_type = info['type']
|
||||
result = self.client.read_input_registers(address, count=2 if reg_type == 'REAL' else 1)
|
||||
if result.isError():
|
||||
print(f"Fehler beim Lesen von Adresse {address}: {result}")
|
||||
continue
|
||||
# -------------
|
||||
# Byteorder handling
|
||||
# -------------
|
||||
@staticmethod
|
||||
def _registers_to_bytes(registers: list[int], byteorder_code: str) -> bytes:
|
||||
"""
|
||||
registers: Liste von uint16 (0..65535), wie pymodbus sie liefert.
|
||||
byteorder_code: AB, ABCD, CDAB, BADC, DCBA (gemäß Template)
|
||||
Rückgabe: bytes in der Reihenfolge, wie sie für struct.unpack benötigt werden.
|
||||
"""
|
||||
code = (byteorder_code or "ABCD").upper()
|
||||
|
||||
if reg_type == 'REAL':
|
||||
value = result.registers[0] / 10.0
|
||||
else:
|
||||
value = result.registers[0]
|
||||
# Pro Register: 16-bit => zwei Bytes (MSB, LSB)
|
||||
words = [struct.pack(">H", r & 0xFFFF) for r in registers] # big endian pro Wort
|
||||
|
||||
if len(words) == 1:
|
||||
w = words[0] # b'\xAA\xBB'
|
||||
if code in ("AB", "ABCD", "CDAB"):
|
||||
return w
|
||||
if code == "BADC": # byte swap
|
||||
return w[::-1]
|
||||
if code == "DCBA": # byte swap (bei 16-bit identisch zu BADC)
|
||||
return w[::-1]
|
||||
return w
|
||||
|
||||
# 32-bit (2 words) oder 64-bit (4 words): Word/Byte swaps abbilden
|
||||
# words[0] = high word bytes, words[1] = low word bytes (in Modbus-Reihenfolge gelesen)
|
||||
if code == "ABCD":
|
||||
ordered = words
|
||||
elif code == "CDAB":
|
||||
# word swap
|
||||
ordered = words[1:] + words[:1]
|
||||
elif code == "BADC":
|
||||
# byte swap innerhalb jedes Words
|
||||
ordered = [w[::-1] for w in words]
|
||||
elif code == "DCBA":
|
||||
# word + byte swap
|
||||
ordered = [w[::-1] for w in (words[1:] + words[:1])]
|
||||
else:
|
||||
ordered = words
|
||||
|
||||
return b"".join(ordered)
|
||||
|
||||
@staticmethod
|
||||
def _decode_by_type(raw_bytes: bytes, data_type: str):
|
||||
dt = (data_type or "").upper()
|
||||
|
||||
# struct: > = big endian, < = little endian
|
||||
# Wir liefern raw_bytes bereits in der richtigen Reihenfolge; daher nutzen wir ">" konsistent.
|
||||
if dt == "UINT16":
|
||||
return struct.unpack(">H", raw_bytes[:2])[0]
|
||||
if dt == "INT16":
|
||||
return struct.unpack(">h", raw_bytes[:2])[0]
|
||||
if dt == "UINT32":
|
||||
return struct.unpack(">I", raw_bytes[:4])[0]
|
||||
if dt == "INT32":
|
||||
return struct.unpack(">i", raw_bytes[:4])[0]
|
||||
if dt == "FLOAT32":
|
||||
return struct.unpack(">f", raw_bytes[:4])[0]
|
||||
if dt == "FLOAT64":
|
||||
return struct.unpack(">d", raw_bytes[:8])[0]
|
||||
|
||||
raise ValueError(f"Unbekannter Data_Type: {dt}")
|
||||
|
||||
def _decode_value(self, registers: list[int], meta: dict):
|
||||
raw = self._registers_to_bytes(registers, meta["byteorder"])
|
||||
val = self._decode_by_type(raw, meta["data_type"])
|
||||
return (val * meta["scaling"]) + meta["offset"]
|
||||
|
||||
# -------------
|
||||
# Reading
|
||||
# -------------
|
||||
def get_state(self) -> dict:
|
||||
data = {"Zeit": time.strftime("%Y-%m-%d %H:%M:%S")}
|
||||
|
||||
if not self.connect():
|
||||
data["error"] = "connect_failed"
|
||||
return data
|
||||
|
||||
try:
|
||||
for address, meta in self.registers.items():
|
||||
count = int(meta["length"])
|
||||
result = self.client.read_input_registers(address, count=count)
|
||||
if result.isError():
|
||||
print(f"Fehler beim Lesen von Adresse {address}: {result}")
|
||||
continue
|
||||
|
||||
try:
|
||||
value = self._decode_value(result.registers, meta)
|
||||
except Exception as e:
|
||||
print(f"Decode-Fehler an Adresse {address} ({meta.get('tag','')}): {e}")
|
||||
continue
|
||||
|
||||
# Optional filter
|
||||
# if self._is_invalid_sentinel(value):
|
||||
# continue
|
||||
|
||||
desc = meta.get("desc") or ""
|
||||
label = f"{address} - {desc}".strip(" -")
|
||||
|
||||
data[label] = value
|
||||
tag = meta.get("tag")
|
||||
if tag:
|
||||
data[tag] = value
|
||||
|
||||
print(f"Adresse {address} - {desc}: {value}")
|
||||
|
||||
finally:
|
||||
self.close()
|
||||
|
||||
print(f"Adresse {address} - {info['desc']}: {value}")
|
||||
data[f"{address} - {info['desc']}"] = value
|
||||
return data
|
||||
|
||||
36
main.py
36
main.py
@@ -1,12 +1,16 @@
|
||||
import time
|
||||
from datetime import datetime
|
||||
from data_base_influx import DataBaseInflux
|
||||
from forecaster.weather_forecaster import WeatherForecaster
|
||||
from heat_pump import HeatPump
|
||||
from pv_inverter import PvInverter
|
||||
from simulators.pv_plant_simulator import PvWattsSubarrayConfig, PvWattsPlant
|
||||
from solaredge_meter import SolaredgeMeter
|
||||
from shelly_pro_3m import ShellyPro3m
|
||||
from energysystem import EnergySystem
|
||||
from sg_ready_controller import SgReadyController
|
||||
from pvlib.location import Location
|
||||
import datetime as dt
|
||||
|
||||
# For dev-System run in terminal: ssh -N -L 127.0.0.1:8111:10.0.0.10:502 pi@192.168.1.146
|
||||
# For productive-System change IP-adress in heatpump to '10.0.0.10' and port to 502
|
||||
@@ -31,6 +35,24 @@ meter = SolaredgeMeter(device_name='solaredge_meter', ip_address='192.168.1.112'
|
||||
es.add_components(hp_master, hp_slave, shelly, wr, meter)
|
||||
controller = SgReadyController(es)
|
||||
|
||||
# FORECASTING
|
||||
latitude = 48.041
|
||||
longitude = 7.862
|
||||
TZ = "Europe/Berlin"
|
||||
HORIZON_DAYS = 2
|
||||
weather_forecaster = WeatherForecaster(latitude=latitude, longitude=longitude)
|
||||
site = Location(latitude=latitude, longitude=longitude, altitude=35, tz=TZ, name="Gundelfingen")
|
||||
|
||||
p_module = 435
|
||||
upper_roof_north = PvWattsSubarrayConfig(name="north", pdc0_w=(29+29+21)*p_module, tilt_deg=10, azimuth_deg=20, dc_loss=0.02, ac_loss=0.01)
|
||||
upper_roof_south = PvWattsSubarrayConfig(name="south", pdc0_w=(29+21+20)*p_module, tilt_deg=10, azimuth_deg=200, dc_loss=0.02, ac_loss=0.01)
|
||||
upper_roof_east = PvWattsSubarrayConfig(name="east", pdc0_w=7*p_module, tilt_deg=10, azimuth_deg=110, dc_loss=0.02, ac_loss=0.01)
|
||||
upper_roof_west = PvWattsSubarrayConfig(name="west", pdc0_w=7*p_module, tilt_deg=10, azimuth_deg=290, dc_loss=0.02, ac_loss=0.01)
|
||||
cfgs = [upper_roof_north, upper_roof_south, upper_roof_east, upper_roof_west]
|
||||
pv_plant = PvWattsPlant(site, cfgs)
|
||||
|
||||
now = datetime.now()
|
||||
next_forecast_at = (now + dt.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
|
||||
while True:
|
||||
now = datetime.now()
|
||||
if now.second % interval_seconds == 0 and now.microsecond < 100_000:
|
||||
@@ -42,5 +64,19 @@ while True:
|
||||
else:
|
||||
mode_as_binary = 1
|
||||
db.store_data('sg_ready', {'mode': mode_as_binary})
|
||||
|
||||
if now >= next_forecast_at:
|
||||
# Start der Prognose: ab der kommenden vollen Stunde
|
||||
start_hour_local = (now + dt.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
|
||||
weather = weather_forecaster.get_hourly_forecast(start_hour_local, HORIZON_DAYS)
|
||||
total = pv_plant.get_power(weather)
|
||||
db.store_forecasts('pv_forecast', total)
|
||||
|
||||
# Nächste geplante Ausführung definieren (immer volle Stunde)
|
||||
# Falls wir durch Delay mehrere Stunden verpasst haben, hole auf:
|
||||
while next_forecast_at <= now:
|
||||
next_forecast_at = (next_forecast_at + dt.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
BIN
modbus_registers/_modbus_register_template.xlsx
Normal file
BIN
modbus_registers/_modbus_register_template.xlsx
Normal file
Binary file not shown.
Binary file not shown.
BIN
modbus_registers/raw_register_tables/heat_pump_registers.xlsx
Normal file
BIN
modbus_registers/raw_register_tables/heat_pump_registers.xlsx
Normal file
Binary file not shown.
@@ -1,4 +1,5 @@
|
||||
pymodbus~=3.8.6
|
||||
pandas
|
||||
openpyxl
|
||||
sshtunnel
|
||||
sshtunnel
|
||||
pvlib
|
||||
BIN
simulators/__pycache__/pv_plant_simulator.cpython-312.pyc
Normal file
BIN
simulators/__pycache__/pv_plant_simulator.cpython-312.pyc
Normal file
Binary file not shown.
210
simulators/pv_plant_simulator.py
Normal file
210
simulators/pv_plant_simulator.py
Normal file
@@ -0,0 +1,210 @@
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Dict, List, Literal, Tuple, Union
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pvlib
|
||||
import matplotlib.pyplot as plt
|
||||
from pvlib.location import Location
|
||||
from pvlib.pvsystem import PVSystem
|
||||
from pvlib.modelchain import ModelChain
|
||||
|
||||
SeriesOrArray = Union[pd.Series, np.ndarray]
|
||||
|
||||
# ----------------------------- Konfiguration -----------------------------
|
||||
|
||||
@dataclass
|
||||
class PvWattsSubarrayConfig:
|
||||
name: str
|
||||
pdc0_w: float # STC-DC-Leistung [W]
|
||||
tilt_deg: float # Neigung (0=horizontal)
|
||||
azimuth_deg: float # Azimut (180=Süd)
|
||||
gamma_pdc: float = -0.004 # Tempkoeff. [1/K]
|
||||
eta_inv_nom: float = 0.96 # WR-Wirkungsgrad (nominal)
|
||||
albedo: float = 0.2 # Bodenreflexion
|
||||
|
||||
# Pauschale Verluste (PVWatts-Losses)
|
||||
dc_loss: float = 0.0
|
||||
ac_loss: float = 0.0
|
||||
soiling: float = 0.0
|
||||
|
||||
# Modell
|
||||
transposition_model: Literal["perez","haydavies","isotropic","klucher","reindl"] = "perez"
|
||||
|
||||
|
||||
# ------------------------------ Subarray ---------------------------------
|
||||
|
||||
class PvWattsSubarray:
|
||||
"""
|
||||
Ein Subarray mit pvlib.ModelChain (PVWatts).
|
||||
Berechnet automatisch DNI/DHI aus GHI (ERBS-Methode)
|
||||
und nutzt ein SAPM-Temperaturmodell.
|
||||
"""
|
||||
def __init__(self, cfg: PvWattsSubarrayConfig, location: Location):
|
||||
self.cfg = cfg
|
||||
self.location = location
|
||||
self._mc: Optional[ModelChain] = None
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
def _create_modelchain(self) -> ModelChain:
|
||||
"""Erzeuge eine pvlib.ModelChain-Instanz mit PVWatts-Parametern."""
|
||||
temp_params = pvlib.temperature.TEMPERATURE_MODEL_PARAMETERS["sapm"]["open_rack_glass_polymer"]
|
||||
|
||||
system = PVSystem(
|
||||
surface_tilt=self.cfg.tilt_deg,
|
||||
surface_azimuth=self.cfg.azimuth_deg,
|
||||
module_parameters={"pdc0": self.cfg.pdc0_w, "gamma_pdc": self.cfg.gamma_pdc},
|
||||
inverter_parameters={"pdc0": self.cfg.pdc0_w, "eta_inv_nom": self.cfg.eta_inv_nom},
|
||||
albedo=self.cfg.albedo,
|
||||
temperature_model_parameters=temp_params,
|
||||
module_type="glass_polymer",
|
||||
racking_model="open_rack",
|
||||
)
|
||||
|
||||
mc = ModelChain(
|
||||
system, self.location,
|
||||
transposition_model=self.cfg.transposition_model,
|
||||
solar_position_method="nrel_numpy",
|
||||
airmass_model="kastenyoung1989",
|
||||
dc_model="pvwatts",
|
||||
ac_model="pvwatts",
|
||||
aoi_model="physical",
|
||||
spectral_model=None,
|
||||
losses_model="pvwatts",
|
||||
temperature_model="sapm",
|
||||
)
|
||||
|
||||
mc.losses_parameters = {
|
||||
"dc_loss": float(self.cfg.dc_loss),
|
||||
"ac_loss": float(self.cfg.ac_loss),
|
||||
"soiling": float(self.cfg.soiling),
|
||||
}
|
||||
|
||||
self._mc = mc
|
||||
return mc
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
def calc_dni_and_dhi(self, weather: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Berechnet DNI & DHI aus GHI über die ERBS-Methode.
|
||||
Gibt ein neues DataFrame mit 'ghi', 'dni', 'dhi' zurück.
|
||||
"""
|
||||
if "ghi" not in weather:
|
||||
raise ValueError("Wetterdaten benötigen mindestens 'ghi'.")
|
||||
# Sonnenstand bestimmen
|
||||
sp = self.location.get_solarposition(weather.index)
|
||||
erbs = pvlib.irradiance.erbs(weather["ghi"], sp["zenith"], weather.index)
|
||||
out = weather.copy()
|
||||
out["dni"] = erbs["dni"].clip(lower=0)
|
||||
out["dhi"] = erbs["dhi"].clip(lower=0)
|
||||
return out
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
def _prepare_weather(self, weather: pd.DataFrame) -> pd.DataFrame:
|
||||
"""Sichert vollständige Spalten (ghi, dni, dhi, temp_air, wind_speed)."""
|
||||
if "ghi" not in weather or "temp_air" not in weather:
|
||||
raise ValueError("weather benötigt Spalten: 'ghi' und 'temp_air'.")
|
||||
|
||||
w = weather.copy()
|
||||
|
||||
# Zeitzone prüfen
|
||||
if w.index.tz is None:
|
||||
w.index = w.index.tz_localize(self.location.tz)
|
||||
else:
|
||||
if str(w.index.tz) != str(self.location.tz):
|
||||
w = w.tz_convert(self.location.tz)
|
||||
|
||||
# Wind default
|
||||
if "wind_speed" not in w:
|
||||
w["wind_speed"] = 1.0
|
||||
|
||||
# DNI/DHI ergänzen (immer mit ERBS)
|
||||
if "dni" not in w or "dhi" not in w:
|
||||
w = self.calc_dni_and_dhi(w)
|
||||
|
||||
return w
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
def get_power(self, weather: pd.DataFrame) -> pd.Series:
|
||||
"""
|
||||
Berechnet AC-Leistung aus Wetterdaten.
|
||||
"""
|
||||
w = self._prepare_weather(weather)
|
||||
mc = self._create_modelchain()
|
||||
mc.run_model(weather=w)
|
||||
return mc.results.ac.rename(self.cfg.name)
|
||||
|
||||
|
||||
# ------------------------------- Anlage ----------------------------------
|
||||
|
||||
class PvWattsPlant:
|
||||
"""
|
||||
Eine PV-Anlage mit mehreren Subarrays, die ein gemeinsames Wetter-DataFrame nutzt.
|
||||
"""
|
||||
def __init__(self, site: Location, subarray_cfgs: List[PvWattsSubarrayConfig]):
|
||||
self.site = site
|
||||
self.subs: Dict[str, PvWattsSubarray] = {c.name: PvWattsSubarray(c, site) for c in subarray_cfgs}
|
||||
|
||||
def get_power(
|
||||
self,
|
||||
weather: pd.DataFrame,
|
||||
*,
|
||||
return_breakdown: bool = False
|
||||
) -> pd.Series | Tuple[pd.Series, Dict[str, pd.Series]]:
|
||||
"""Berechne Gesamtleistung und optional Einzel-Subarrays."""
|
||||
parts: Dict[str, pd.Series] = {name: sub.get_power(weather) for name, sub in self.subs.items()}
|
||||
|
||||
# gemeinsamen Index bilden
|
||||
idx = list(parts.values())[0].index
|
||||
for s in parts.values():
|
||||
idx = idx.intersection(s.index)
|
||||
parts = {k: v.reindex(idx).fillna(0.0) for k, v in parts.items()}
|
||||
|
||||
total = sum(parts.values())
|
||||
total.name = "total_ac"
|
||||
|
||||
if return_breakdown:
|
||||
return total, parts
|
||||
return total
|
||||
|
||||
|
||||
# --------------------------- Beispielnutzung -----------------------------
|
||||
if __name__ == "__main__":
|
||||
# Standort
|
||||
site = Location(latitude=52.52, longitude=13.405, altitude=35, tz="Europe/Berlin", name="Berlin")
|
||||
|
||||
# Zeitachse: 1 Tag, 15-minütig
|
||||
times = pd.date_range("2025-06-21 00:00", "2025-06-21 23:45", freq="15min", tz=site.tz)
|
||||
|
||||
# Dummy-Wetter
|
||||
ghi = 1000 * np.clip(np.sin(np.linspace(0, np.pi, len(times)))**1.2, 0, None)
|
||||
temp_air = 16 + 8 * np.clip(np.sin(np.linspace(-np.pi/2, np.pi/2, len(times))), 0, None)
|
||||
wind = np.full(len(times), 1.0)
|
||||
weather = pd.DataFrame(index=times)
|
||||
weather["ghi"] = ghi
|
||||
weather["temp_air"] = temp_air
|
||||
weather["wind_speed"] = wind
|
||||
|
||||
# Zwei Subarrays
|
||||
cfgs = [
|
||||
PvWattsSubarrayConfig(name="Sued_30", pdc0_w=6000, tilt_deg=30, azimuth_deg=180, dc_loss=0.02, ac_loss=0.01),
|
||||
PvWattsSubarrayConfig(name="West_20", pdc0_w=4000, tilt_deg=20, azimuth_deg=270, soiling=0.02),
|
||||
]
|
||||
plant = PvWattsPlant(site, cfgs)
|
||||
|
||||
# Simulation
|
||||
total, parts = plant.get_power(weather, return_breakdown=True)
|
||||
|
||||
# Plot
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.plot(total.index, total / 1000, label="Gesamtleistung (AC)", linewidth=2, color="black")
|
||||
for name, s in parts.items():
|
||||
plt.plot(s.index, s / 1000, label=name)
|
||||
plt.title("PV-Leistung (PVWatts, ERBS-Methode für DNI/DHI)")
|
||||
plt.ylabel("Leistung [kW]")
|
||||
plt.xlabel("Zeit")
|
||||
plt.legend()
|
||||
plt.grid(True, linestyle="--", alpha=0.5)
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
Reference in New Issue
Block a user