Finish solar server prototype
This commit is contained in:
parent
40eba650da
commit
c52268be2c
194
server/apsystems_ecur/APSystemsECUR.py → server/APSystemsECUR.py
Executable file → Normal file
194
server/apsystems_ecur/APSystemsECUR.py → server/APSystemsECUR.py
Executable file → Normal file
|
@ -1,21 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
# Stolen from https://github.com/ksheumaker/homeassistant-apsystems_ecur
|
||||||
|
# cheers to ksheumaker and HAEdwin
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import socket
|
import socket
|
||||||
import binascii
|
import binascii
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from pprint import pprint
|
|
||||||
|
|
||||||
class APSystemsInvalidData(Exception):
|
class APSystemsInvalidData(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class APSystemsECUR:
|
class APSystemsECUR:
|
||||||
|
|
||||||
def __init__(self, ipaddr, port=8899, raw_ecu=None, raw_inverter=None):
|
def __init__(self, ipaddr, port=8899, raw_ecu=None, raw_inverter=None):
|
||||||
self.ipaddr = ipaddr
|
self.ipaddr = ipaddr
|
||||||
self.port = port
|
self.port = port
|
||||||
|
@ -30,18 +23,18 @@ class APSystemsECUR:
|
||||||
self.cmd_attempts = 3
|
self.cmd_attempts = 3
|
||||||
|
|
||||||
# how big of a buffer to read at a time from the socket
|
# how big of a buffer to read at a time from the socket
|
||||||
self.recv_size = 4096
|
self.recv_size = 4096*16
|
||||||
|
|
||||||
self.qs1_ids = [ "802", "801" ]
|
self.qs1_ids = [ '802', '801' ]
|
||||||
self.yc600_ids = [ "406", "407", "408", "409" ]
|
self.yc600_ids = [ '406', '407', '408', '409' ]
|
||||||
self.yc1000_ids = [ "501", "502", "503", "504" ]
|
self.yc1000_ids = [ '501', '502', '503', '504' ]
|
||||||
|
|
||||||
self.cmd_suffix = "END\n"
|
self.cmd_suffix = 'END\n'
|
||||||
self.ecu_query = "APS1100160001" + self.cmd_suffix
|
self.ecu_query = 'APS1100160001' + self.cmd_suffix
|
||||||
self.inverter_query_prefix = "APS1100280002"
|
self.inverter_query_prefix = 'APS1100280002'
|
||||||
self.inverter_query_suffix = self.cmd_suffix
|
self.inverter_query_suffix = self.cmd_suffix
|
||||||
|
|
||||||
self.inverter_signal_prefix = "APS1100280030"
|
self.inverter_signal_prefix = 'APS1100280030'
|
||||||
self.inverter_signal_suffix = self.cmd_suffix
|
self.inverter_signal_suffix = self.cmd_suffix
|
||||||
|
|
||||||
self.inverter_byte_start = 26
|
self.inverter_byte_start = 26
|
||||||
|
@ -65,94 +58,54 @@ class APSystemsECUR:
|
||||||
self.reader = None
|
self.reader = None
|
||||||
self.writer = None
|
self.writer = None
|
||||||
|
|
||||||
|
|
||||||
def dump(self):
|
|
||||||
print(f"ECU : {self.ecu_id}")
|
|
||||||
print(f"Firmware : {self.firmware}")
|
|
||||||
print(f"TZ : {self.timezone}")
|
|
||||||
print(f"Qty of inverters : {self.qty_of_inverters}")
|
|
||||||
|
|
||||||
async def async_read_from_socket(self):
|
|
||||||
self.read_buffer = b''
|
|
||||||
end_data = None
|
|
||||||
|
|
||||||
while end_data != self.recv_suffix:
|
|
||||||
self.read_buffer += await self.reader.read(self.recv_size)
|
|
||||||
size = len(self.read_buffer)
|
|
||||||
end_data = self.read_buffer[size-4:]
|
|
||||||
|
|
||||||
return self.read_buffer
|
|
||||||
|
|
||||||
async def async_send_read_from_socket(self, cmd):
|
|
||||||
current_attempt = 0
|
|
||||||
while current_attempt < self.cmd_attempts:
|
|
||||||
current_attempt += 1
|
|
||||||
|
|
||||||
self.writer.write(cmd.encode('utf-8'))
|
|
||||||
await self.writer.drain()
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await asyncio.wait_for(self.async_read_from_socket(),
|
|
||||||
timeout=self.timeout)
|
|
||||||
except Exception as err:
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.writer.close()
|
|
||||||
raise APSystemsInvalidData(f"Incomplete data from ECU after {current_attempt} attempts, cmd='{cmd.rstrip()}' data={self.read_buffer}")
|
|
||||||
|
|
||||||
async def async_query_ecu(self):
|
|
||||||
self.reader, self.writer = await asyncio.open_connection(self.ipaddr, self.port)
|
|
||||||
_LOGGER.info(f"Connected to {self.ipaddr} {self.port}")
|
|
||||||
|
|
||||||
cmd = self.ecu_query
|
|
||||||
self.ecu_raw_data = await self.async_send_read_from_socket(cmd)
|
|
||||||
|
|
||||||
self.process_ecu_data()
|
|
||||||
|
|
||||||
cmd = self.inverter_query_prefix + self.ecu_id + self.inverter_query_suffix
|
|
||||||
self.inverter_raw_data = await self.async_send_read_from_socket(cmd)
|
|
||||||
|
|
||||||
cmd = self.inverter_signal_prefix + self.ecu_id + self.inverter_signal_suffix
|
|
||||||
self.inverter_raw_signal = await self.async_send_read_from_socket(cmd)
|
|
||||||
|
|
||||||
self.writer.close()
|
|
||||||
|
|
||||||
data = self.process_inverter_data()
|
|
||||||
data["ecu_id"] = self.ecu_id
|
|
||||||
data["today_energy"] = self.today_energy
|
|
||||||
data["lifetime_energy"] = self.lifetime_energy
|
|
||||||
data["current_power"] = self.current_power
|
|
||||||
|
|
||||||
return(data)
|
|
||||||
|
|
||||||
def query_ecu(self):
|
def query_ecu(self):
|
||||||
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
sock.connect((self.ipaddr,self.port))
|
sock.connect((self.ipaddr,self.port))
|
||||||
|
|
||||||
|
logging.debug('ecu query: %s', self.ecu_query)
|
||||||
sock.sendall(self.ecu_query.encode('utf-8'))
|
sock.sendall(self.ecu_query.encode('utf-8'))
|
||||||
self.ecu_raw_data = sock.recv(self.recv_size)
|
self.ecu_raw_data = sock.recv(self.recv_size)
|
||||||
|
|
||||||
|
logging.debug('raw ecu data: %s', self.ecu_raw_data)
|
||||||
|
logging.debug('ecu id: %s', self.ecu_raw_data[13:25])
|
||||||
|
power = self.ecu_raw_data[31:35]
|
||||||
|
logging.debug('current power: %s', int.from_bytes(power, byteorder='big'))
|
||||||
|
|
||||||
self.process_ecu_data()
|
self.process_ecu_data()
|
||||||
|
|
||||||
|
sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
sock.close()
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.connect((self.ipaddr,self.port))
|
||||||
|
|
||||||
cmd = self.inverter_query_prefix + self.ecu_id + self.inverter_query_suffix
|
cmd = self.inverter_query_prefix + self.ecu_id + self.inverter_query_suffix
|
||||||
|
logging.debug('inverter data cmd: %s', cmd)
|
||||||
sock.sendall(cmd.encode('utf-8'))
|
sock.sendall(cmd.encode('utf-8'))
|
||||||
self.inverter_raw_data = sock.recv(self.recv_size)
|
self.inverter_raw_data = sock.recv(self.recv_size)
|
||||||
|
|
||||||
|
logging.debug('raw inverter data: %s', self.inverter_raw_data)
|
||||||
|
|
||||||
|
sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
sock.close()
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.connect((self.ipaddr,self.port))
|
||||||
|
|
||||||
cmd = self.inverter_signal_prefix + self.ecu_id + self.inverter_signal_suffix
|
cmd = self.inverter_signal_prefix + self.ecu_id + self.inverter_signal_suffix
|
||||||
|
logging.debug('inverter signal cmd: %s', cmd)
|
||||||
sock.sendall(cmd.encode('utf-8'))
|
sock.sendall(cmd.encode('utf-8'))
|
||||||
self.inverter_raw_signal = sock.recv(self.recv_size)
|
self.inverter_raw_signal = sock.recv(self.recv_size)
|
||||||
|
|
||||||
|
logging.debug('raw signal data: %s', self.inverter_raw_signal)
|
||||||
|
|
||||||
sock.shutdown(socket.SHUT_RDWR)
|
sock.shutdown(socket.SHUT_RDWR)
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
data = self.process_inverter_data()
|
data = self.process_inverter_data()
|
||||||
|
|
||||||
data["ecu_id"] = self.ecu_id
|
data['ecu_id'] = self.ecu_id
|
||||||
data["today_energy"] = self.today_energy
|
data['today_energy'] = self.today_energy
|
||||||
data["lifetime_energy"] = self.lifetime_energy
|
data['lifetime_energy'] = self.lifetime_energy
|
||||||
data["current_power"] = self.current_power
|
data['current_power'] = self.current_power
|
||||||
|
|
||||||
|
|
||||||
return(data)
|
return(data)
|
||||||
|
|
||||||
|
@ -161,21 +114,21 @@ class APSystemsECUR:
|
||||||
return int(binascii.b2a_hex(codec[(start):(start+2)]), 16)
|
return int(binascii.b2a_hex(codec[(start):(start+2)]), 16)
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
debugdata = binascii.b2a_hex(codec)
|
debugdata = binascii.b2a_hex(codec)
|
||||||
raise APSystemsInvalidData(f"Unable to convert binary to int location={start} data={debugdata}")
|
raise APSystemsInvalidData(f'Unable to convert binary to int location={start} data={debugdata}')
|
||||||
|
|
||||||
def aps_short(self, codec, start):
|
def aps_short(self, codec, start):
|
||||||
try:
|
try:
|
||||||
return int(binascii.b2a_hex(codec[(start):(start+1)]), 8)
|
return int(binascii.b2a_hex(codec[(start):(start+1)]), 8)
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
debugdata = binascii.b2a_hex(codec)
|
debugdata = binascii.b2a_hex(codec)
|
||||||
raise APSystemsInvalidData(f"Unable to convert binary to short int location={start} data={debugdata}")
|
raise APSystemsInvalidData(f'Unable to convert binary to short int location={start} data={debugdata}')
|
||||||
|
|
||||||
def aps_double(self, codec, start):
|
def aps_double(self, codec, start):
|
||||||
try:
|
try:
|
||||||
return int (binascii.b2a_hex(codec[(start):(start+4)]), 16)
|
return int (binascii.b2a_hex(codec[(start):(start+4)]), 16)
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
debugdata = binascii.b2a_hex(codec)
|
debugdata = binascii.b2a_hex(codec)
|
||||||
raise APSystemsInvalidData(f"Unable to convert binary to double location={start} data={debugdata}")
|
raise APSystemsInvalidData(f'Unable to convert binary to double location={start} data={debugdata}')
|
||||||
|
|
||||||
def aps_bool(self, codec, start):
|
def aps_bool(self, codec, start):
|
||||||
return bool(binascii.b2a_hex(codec[(start):(start+2)]))
|
return bool(binascii.b2a_hex(codec[(start):(start+2)]))
|
||||||
|
@ -188,38 +141,41 @@ class APSystemsECUR:
|
||||||
|
|
||||||
def aps_timestamp(self, codec, start, amount):
|
def aps_timestamp(self, codec, start, amount):
|
||||||
timestr=str(binascii.b2a_hex(codec[start:(start+amount)]))[2:(amount+2)]
|
timestr=str(binascii.b2a_hex(codec[start:(start+amount)]))[2:(amount+2)]
|
||||||
return timestr[0:4]+"-"+timestr[4:6]+"-"+timestr[6:8]+" "+timestr[8:10]+":"+timestr[10:12]+":"+timestr[12:14]
|
return timestr[0:4]+'-'+timestr[4:6]+'-'+timestr[6:8]+' '+timestr[8:10]+':'+timestr[10:12]+':'+timestr[12:14]
|
||||||
|
|
||||||
def check_ecu_checksum(self, data, cmd):
|
def check_ecu_checksum(self, data, cmd):
|
||||||
datalen = len(data) - 1
|
datalen = len(data) - 1
|
||||||
|
logging.debug('datalen: %s', datalen)
|
||||||
try:
|
try:
|
||||||
checksum = int(data[5:9])
|
checksum = int(data[5:9])
|
||||||
|
logging.debug('checksum: %s', checksum)
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
debugdata = binascii.b2a_hex(data)
|
debugdata = binascii.b2a_hex(data)
|
||||||
raise APSystemsInvalidData(f"Error getting checksum int from '{cmd}' data={debugdata}")
|
raise APSystemsInvalidData(f'Error getting checksum int from {cmd} data={debugdata}')
|
||||||
|
|
||||||
if datalen != checksum:
|
if datalen != checksum:
|
||||||
debugdata = binascii.b2a_hex(data)
|
debugdata = binascii.b2a_hex(data)
|
||||||
raise APSystemsInvalidData(f"Checksum on '{cmd}' failed checksum={checksum} datalen={datalen} data={debugdata}")
|
raise APSystemsInvalidData(f'Checksum on {cmd} failed checksum={checksum} datalen={datalen} data={debugdata}')
|
||||||
|
|
||||||
start_str = self.aps_str(data, 0, 3)
|
start_str = self.aps_str(data, 0, 3)
|
||||||
end_str = self.aps_str(data, len(data) - 4, 3)
|
end_str = self.aps_str(data, len(data) - 4, 3)
|
||||||
|
|
||||||
if start_str != 'APS':
|
if start_str != 'APS':
|
||||||
debugdata = binascii.b2a_hex(data)
|
debugdata = binascii.b2a_hex(data)
|
||||||
raise APSystemsInvalidData(f"Result on '{cmd}' incorrect start signature '{start_str}' != APS data={debugdata}")
|
raise APSystemsInvalidData(f'Result on {cmd} incorrect start signature {start_str} != APS data={debugdata}')
|
||||||
|
|
||||||
if end_str != 'END':
|
if end_str != 'END':
|
||||||
debugdata = binascii.b2a_hex(data)
|
debugdata = binascii.b2a_hex(data)
|
||||||
raise APSystemsInvalidData(f"Result on '{cmd}' incorrect end signature '{end_str}' != END data={debugdata}")
|
raise APSystemsInvalidData(f'Result on {cmd} incorrect end signature {end_str} != END data={debugdata}')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def process_ecu_data(self, data=None):
|
def process_ecu_data(self, data=None):
|
||||||
|
logging.debug('Processing ECU data...')
|
||||||
if not data:
|
if not data:
|
||||||
data = self.ecu_raw_data
|
data = self.ecu_raw_data
|
||||||
|
|
||||||
self.check_ecu_checksum(data, "ECU Query")
|
self.check_ecu_checksum(data, 'ECU Query')
|
||||||
|
|
||||||
self.ecu_id = self.aps_str(data, 13, 12)
|
self.ecu_id = self.aps_str(data, 13, 12)
|
||||||
self.qty_of_inverters = self.aps_int(data, 46)
|
self.qty_of_inverters = self.aps_int(data, 46)
|
||||||
|
@ -231,12 +187,13 @@ class APSystemsECUR:
|
||||||
|
|
||||||
|
|
||||||
def process_signal_data(self, data=None):
|
def process_signal_data(self, data=None):
|
||||||
|
logging.debug('Processing signal data...')
|
||||||
signal_data = {}
|
signal_data = {}
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
data = self.inverter_raw_signal
|
data = self.inverter_raw_signal
|
||||||
|
|
||||||
self.check_ecu_checksum(data, "Signal Query")
|
self.check_ecu_checksum(data, 'Signal Query')
|
||||||
|
|
||||||
if not self.qty_of_inverters:
|
if not self.qty_of_inverters:
|
||||||
return signal_data
|
return signal_data
|
||||||
|
@ -255,10 +212,11 @@ class APSystemsECUR:
|
||||||
return signal_data
|
return signal_data
|
||||||
|
|
||||||
def process_inverter_data(self, data=None):
|
def process_inverter_data(self, data=None):
|
||||||
|
logging.debug('Processing inverter data...')
|
||||||
if not data:
|
if not data:
|
||||||
data = self.inverter_raw_data
|
data = self.inverter_raw_data
|
||||||
|
|
||||||
self.check_ecu_checksum(data, "Inverter data")
|
self.check_ecu_checksum(data, 'Inverter data')
|
||||||
|
|
||||||
output = {}
|
output = {}
|
||||||
|
|
||||||
|
@ -266,9 +224,9 @@ class APSystemsECUR:
|
||||||
inverter_qty = self.aps_int(data, 17)
|
inverter_qty = self.aps_int(data, 17)
|
||||||
|
|
||||||
self.last_update = timestamp
|
self.last_update = timestamp
|
||||||
output["timestamp"] = timestamp
|
output['timestamp'] = timestamp
|
||||||
output["inverter_qty"] = inverter_qty
|
output['inverter_qty'] = inverter_qty
|
||||||
output["inverters"] = {}
|
output['inverters'] = {}
|
||||||
|
|
||||||
# this is the start of the loop of inverters
|
# this is the start of the loop of inverters
|
||||||
location = self.inverter_byte_start
|
location = self.inverter_byte_start
|
||||||
|
@ -281,22 +239,22 @@ class APSystemsECUR:
|
||||||
inv={}
|
inv={}
|
||||||
|
|
||||||
inverter_uid = self.aps_uid(data, location)
|
inverter_uid = self.aps_uid(data, location)
|
||||||
inv["uid"] = inverter_uid
|
inv['uid'] = inverter_uid
|
||||||
location += 6
|
location += 6
|
||||||
|
|
||||||
inv["online"] = self.aps_bool(data, location)
|
inv['online'] = self.aps_bool(data, location)
|
||||||
location += 1
|
location += 1
|
||||||
|
|
||||||
inv["unknown"] = self.aps_str(data, location, 2)
|
inv['unknown'] = self.aps_str(data, location, 2)
|
||||||
location += 2
|
location += 2
|
||||||
|
|
||||||
inv["frequency"] = self.aps_int(data, location) / 10
|
inv['frequency'] = self.aps_int(data, location) / 10
|
||||||
location += 2
|
location += 2
|
||||||
|
|
||||||
inv["temperature"] = self.aps_int(data, location) - 100
|
inv['temperature'] = self.aps_int(data, location) - 100
|
||||||
location += 2
|
location += 2
|
||||||
|
|
||||||
inv["signal"] = signal.get(inverter_uid, 0)
|
inv['signal'] = signal.get(inverter_uid, 0)
|
||||||
|
|
||||||
# the first 3 digits determine the type of inverter
|
# the first 3 digits determine the type of inverter
|
||||||
inverter_type = inverter_uid[0:3]
|
inverter_type = inverter_uid[0:3]
|
||||||
|
@ -313,11 +271,11 @@ class APSystemsECUR:
|
||||||
inv.update(channel_data)
|
inv.update(channel_data)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise APSystemsInvalidData(f"Unsupported inverter type {inverter_type}")
|
raise APSystemsInvalidData(f'Unsupported inverter type {inverter_type}')
|
||||||
|
|
||||||
inverters[inverter_uid] = inv
|
inverters[inverter_uid] = inv
|
||||||
|
|
||||||
output["inverters"] = inverters
|
output['inverters'] = inverters
|
||||||
return (output)
|
return (output)
|
||||||
|
|
||||||
def process_yc1000(self, data, location):
|
def process_yc1000(self, data, location):
|
||||||
|
@ -349,10 +307,10 @@ class APSystemsECUR:
|
||||||
voltages.append(voltage)
|
voltages.append(voltage)
|
||||||
|
|
||||||
output = {
|
output = {
|
||||||
"model" : "YC1000",
|
'model' : 'YC1000',
|
||||||
"channel_qty" : 4,
|
'channel_qty' : 4,
|
||||||
"power" : power,
|
'power' : power,
|
||||||
"voltage" : voltages
|
'voltage' : voltages
|
||||||
}
|
}
|
||||||
|
|
||||||
return (output, location)
|
return (output, location)
|
||||||
|
@ -381,10 +339,10 @@ class APSystemsECUR:
|
||||||
voltages.append(voltage)
|
voltages.append(voltage)
|
||||||
|
|
||||||
output = {
|
output = {
|
||||||
"model" : "QS1",
|
'model' : 'QS1',
|
||||||
"channel_qty" : 4,
|
'channel_qty' : 4,
|
||||||
"power" : power,
|
'power' : power,
|
||||||
"voltage" : voltages
|
'voltage' : voltages
|
||||||
}
|
}
|
||||||
|
|
||||||
return (output, location)
|
return (output, location)
|
||||||
|
@ -402,12 +360,10 @@ class APSystemsECUR:
|
||||||
location += 2
|
location += 2
|
||||||
|
|
||||||
output = {
|
output = {
|
||||||
"model" : "YC600",
|
'model' : 'YC600',
|
||||||
"channel_qty" : 2,
|
'channel_qty' : 2,
|
||||||
"power" : power,
|
'power' : power,
|
||||||
"voltage" : voltages,
|
'voltage' : voltages,
|
||||||
}
|
}
|
||||||
|
|
||||||
return (output, location)
|
return (output, location)
|
||||||
|
|
||||||
|
|
|
@ -1,154 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
import traceback
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from .APSystemsECUR import APSystemsECUR, APSystemsInvalidData
|
|
||||||
from homeassistant.helpers.discovery import load_platform
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
from homeassistant.const import CONF_HOST
|
|
||||||
from homeassistant.helpers.entity import Entity
|
|
||||||
from homeassistant.util import Throttle
|
|
||||||
|
|
||||||
from homeassistant.helpers.update_coordinator import (
|
|
||||||
CoordinatorEntity,
|
|
||||||
DataUpdateCoordinator,
|
|
||||||
UpdateFailed,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
|
|
||||||
CONF_INTERVAL = "interval"
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema({
|
|
||||||
DOMAIN : vol.Schema({
|
|
||||||
vol.Required(CONF_HOST): cv.string,
|
|
||||||
vol.Optional(CONF_INTERVAL) : cv.time_period_seconds
|
|
||||||
})
|
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
|
||||||
|
|
||||||
PLATFORMS = [ "sensor", "binary_sensor" ]
|
|
||||||
|
|
||||||
|
|
||||||
## handle all the communications with the ECUR class and deal with our need for caching, etc
|
|
||||||
class ECUR():
|
|
||||||
|
|
||||||
def __init__(self, ipaddr):
|
|
||||||
self.ecu = APSystemsECUR(ipaddr)
|
|
||||||
self.cache_count = 0
|
|
||||||
self.cache_max = 5
|
|
||||||
self.data_from_cache = False
|
|
||||||
self.querying = True
|
|
||||||
self.error_messge = ""
|
|
||||||
self.cached_data = {}
|
|
||||||
|
|
||||||
async def stop_query(self):
|
|
||||||
self.querying = False
|
|
||||||
|
|
||||||
async def start_query(self):
|
|
||||||
self.querying = True
|
|
||||||
|
|
||||||
async def update(self):
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
# if we aren't actively quering data, pull data form the cache
|
|
||||||
# this is so we can stop querying after sunset
|
|
||||||
if not self.querying:
|
|
||||||
|
|
||||||
_LOGGER.debug("Not querying ECU due to stopped")
|
|
||||||
data = self.cached_data
|
|
||||||
self.data_from_cache = True
|
|
||||||
|
|
||||||
data["data_from_cache"] = self.data_from_cache
|
|
||||||
data["querying"] = self.querying
|
|
||||||
return self.cached_data
|
|
||||||
|
|
||||||
_LOGGER.debug("Querying ECU")
|
|
||||||
try:
|
|
||||||
data = await self.ecu.async_query_ecu()
|
|
||||||
_LOGGER.debug("Got data from ECU")
|
|
||||||
|
|
||||||
# we got good results, so we store it and set flags about our
|
|
||||||
# cache state
|
|
||||||
self.cached_data = data
|
|
||||||
self.cache_count = 0
|
|
||||||
self.data_from_cache = False
|
|
||||||
self.error_message = ""
|
|
||||||
|
|
||||||
except APSystemsInvalidData as err:
|
|
||||||
|
|
||||||
msg = f"Using cached data from last successful communication from ECU. Error: {err}"
|
|
||||||
_LOGGER.warning(msg)
|
|
||||||
|
|
||||||
# we got invalid data, so we need to pull from cache
|
|
||||||
self.error_msg = msg
|
|
||||||
self.cache_count += 1
|
|
||||||
self.data_from_cache = True
|
|
||||||
data = self.cached_data
|
|
||||||
|
|
||||||
if self.cache_count > self.cache_max:
|
|
||||||
raise Exception(f"Error using cached data for more than {self.cache_max} times.")
|
|
||||||
|
|
||||||
except Exception as err:
|
|
||||||
|
|
||||||
msg = f"Using cached data from last successful communication from ECU. Error: {err}"
|
|
||||||
_LOGGER.warning(msg)
|
|
||||||
|
|
||||||
# we got invalid data, so we need to pull from cache
|
|
||||||
self.error_msg = msg
|
|
||||||
self.cache_count += 1
|
|
||||||
self.data_from_cache = True
|
|
||||||
data = self.cached_data
|
|
||||||
|
|
||||||
if self.cache_count > self.cache_max:
|
|
||||||
raise Exception(f"Error using cached data for more than {self.cache_max} times.")
|
|
||||||
|
|
||||||
data["data_from_cache"] = self.data_from_cache
|
|
||||||
data["querying"] = self.querying
|
|
||||||
_LOGGER.debug(f"Returning {data}")
|
|
||||||
return data
|
|
||||||
|
|
||||||
async def async_setup(hass, config):
|
|
||||||
""" Setup the APsystems platform """
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
|
||||||
|
|
||||||
host = config[DOMAIN].get(CONF_HOST)
|
|
||||||
interval = config[DOMAIN].get(CONF_INTERVAL)
|
|
||||||
if not interval:
|
|
||||||
interval = timedelta(seconds=60)
|
|
||||||
|
|
||||||
ecu = ECUR(host)
|
|
||||||
|
|
||||||
coordinator = DataUpdateCoordinator(
|
|
||||||
hass,
|
|
||||||
_LOGGER,
|
|
||||||
name=DOMAIN,
|
|
||||||
update_method=ecu.update,
|
|
||||||
update_interval=interval,
|
|
||||||
)
|
|
||||||
|
|
||||||
await coordinator.async_refresh()
|
|
||||||
|
|
||||||
hass.data[DOMAIN] = {
|
|
||||||
"ecu" : ecu,
|
|
||||||
"coordinator" : coordinator
|
|
||||||
}
|
|
||||||
|
|
||||||
async def handle_stop_query(call):
|
|
||||||
await ecu.stop_query()
|
|
||||||
coordinator.async_refresh()
|
|
||||||
|
|
||||||
async def handle_start_query(call):
|
|
||||||
await ecu.start_query()
|
|
||||||
coordinator.async_refresh()
|
|
||||||
|
|
||||||
hass.services.async_register(DOMAIN, "start_query", handle_start_query)
|
|
||||||
hass.services.async_register(DOMAIN, "stop_query", handle_stop_query)
|
|
||||||
|
|
||||||
for component in PLATFORMS:
|
|
||||||
load_platform(hass, component, DOMAIN, {}, config)
|
|
||||||
|
|
||||||
return True
|
|
|
@ -1,86 +0,0 @@
|
||||||
"""Example integration using DataUpdateCoordinator."""
|
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
|
||||||
BinarySensorEntity,
|
|
||||||
)
|
|
||||||
|
|
||||||
from homeassistant.helpers.update_coordinator import (
|
|
||||||
CoordinatorEntity,
|
|
||||||
DataUpdateCoordinator,
|
|
||||||
UpdateFailed,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
DOMAIN,
|
|
||||||
RELOAD_ICON,
|
|
||||||
CACHE_ICON
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
|
||||||
|
|
||||||
ecu = hass.data[DOMAIN].get("ecu")
|
|
||||||
coordinator = hass.data[DOMAIN].get("coordinator")
|
|
||||||
|
|
||||||
|
|
||||||
sensors = [
|
|
||||||
APSystemsECUBinarySensor(coordinator, ecu, "data_from_cache",
|
|
||||||
label="Using Cached Data", icon=CACHE_ICON),
|
|
||||||
APSystemsECUBinarySensor(coordinator, ecu, "querying",
|
|
||||||
label="Querying Enabled", icon=RELOAD_ICON),
|
|
||||||
]
|
|
||||||
|
|
||||||
add_entities(sensors)
|
|
||||||
|
|
||||||
|
|
||||||
class APSystemsECUBinarySensor(CoordinatorEntity, BinarySensorEntity):
|
|
||||||
|
|
||||||
def __init__(self, coordinator, ecu, field, label=None, devclass=None, icon=None):
|
|
||||||
|
|
||||||
super().__init__(coordinator)
|
|
||||||
|
|
||||||
self.coordinator = coordinator
|
|
||||||
|
|
||||||
self._ecu = ecu
|
|
||||||
self._field = field
|
|
||||||
self._label = label
|
|
||||||
if not label:
|
|
||||||
self._label = field
|
|
||||||
self._icon = icon
|
|
||||||
|
|
||||||
self._name = f"ECU {self._label}"
|
|
||||||
self._state = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unique_id(self):
|
|
||||||
return f"{self._ecu.ecu.ecu_id}_{self._field}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_on(self):
|
|
||||||
return self.coordinator.data.get(self._field)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def icon(self):
|
|
||||||
return self._icon
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_state_attributes(self):
|
|
||||||
|
|
||||||
attrs = {
|
|
||||||
"ecu_id" : self._ecu.ecu.ecu_id,
|
|
||||||
"firmware" : self._ecu.ecu.firmware,
|
|
||||||
"last_update" : self._ecu.ecu.last_update
|
|
||||||
}
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
DOMAIN = 'apsystems_ecur'
|
|
||||||
SOLAR_ICON = "mdi:solar-power"
|
|
||||||
FREQ_ICON = "mdi:sine-wave"
|
|
||||||
SIGNAL_ICON = "mdi:signal"
|
|
||||||
RELOAD_ICON = "mdi:reload"
|
|
||||||
CACHE_ICON = "mdi:cached"
|
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
{
|
|
||||||
"codeowners": ["@ksheumaker"],
|
|
||||||
"config_flow": false,
|
|
||||||
"dependencies": [],
|
|
||||||
"documentation": "https://github.com/ksheumaker/homeassistant-apsystems_ecur",
|
|
||||||
"domain": "apsystems_ecur",
|
|
||||||
"name": "APSystems PV solar ECU-R",
|
|
||||||
"version": "1.0.1",
|
|
||||||
"requirements": []
|
|
||||||
}
|
|
|
@ -1,208 +0,0 @@
|
||||||
"""Example integration using DataUpdateCoordinator."""
|
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from homeassistant.helpers.entity import Entity
|
|
||||||
from homeassistant.helpers.update_coordinator import (
|
|
||||||
CoordinatorEntity,
|
|
||||||
DataUpdateCoordinator,
|
|
||||||
UpdateFailed,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
DOMAIN,
|
|
||||||
SOLAR_ICON,
|
|
||||||
FREQ_ICON,
|
|
||||||
SIGNAL_ICON
|
|
||||||
)
|
|
||||||
|
|
||||||
from homeassistant.const import (
|
|
||||||
DEVICE_CLASS_TEMPERATURE,
|
|
||||||
DEVICE_CLASS_POWER,
|
|
||||||
DEVICE_CLASS_ENERGY,
|
|
||||||
DEVICE_CLASS_VOLTAGE,
|
|
||||||
DEVICE_CLASS_ENERGY,
|
|
||||||
ENERGY_KILO_WATT_HOUR,
|
|
||||||
POWER_WATT,
|
|
||||||
VOLT,
|
|
||||||
TEMP_CELSIUS,
|
|
||||||
PERCENTAGE,
|
|
||||||
FREQUENCY_HERTZ
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
|
||||||
|
|
||||||
ecu = hass.data[DOMAIN].get("ecu")
|
|
||||||
coordinator = hass.data[DOMAIN].get("coordinator")
|
|
||||||
|
|
||||||
|
|
||||||
sensors = [
|
|
||||||
APSystemsECUSensor(coordinator, ecu, "current_power",
|
|
||||||
label="Current Power", unit=POWER_WATT,
|
|
||||||
devclass=DEVICE_CLASS_POWER, icon=SOLAR_ICON),
|
|
||||||
APSystemsECUSensor(coordinator, ecu, "today_energy",
|
|
||||||
label="Today Energy", unit=ENERGY_KILO_WATT_HOUR,
|
|
||||||
devclass=DEVICE_CLASS_ENERGY, icon=SOLAR_ICON),
|
|
||||||
APSystemsECUSensor(coordinator, ecu, "lifetime_energy",
|
|
||||||
label="Lifetime Energy", unit=ENERGY_KILO_WATT_HOUR,
|
|
||||||
devclass=DEVICE_CLASS_ENERGY, icon=SOLAR_ICON),
|
|
||||||
]
|
|
||||||
|
|
||||||
inverters = coordinator.data.get("inverters", {})
|
|
||||||
for uid,inv_data in inverters.items():
|
|
||||||
#_LOGGER.warning(f"Inverter {uid} {inv_data.get('channel_qty')}")
|
|
||||||
sensors.extend([
|
|
||||||
APSystemsECUInverterSensor(coordinator, ecu, uid,
|
|
||||||
"temperature", label="Temperature",
|
|
||||||
devclass=DEVICE_CLASS_TEMPERATURE, unit=TEMP_CELSIUS),
|
|
||||||
APSystemsECUInverterSensor(coordinator, ecu, uid,
|
|
||||||
"frequency", label="Frequency", unit=FREQUENCY_HERTZ,
|
|
||||||
devclass=None, icon=FREQ_ICON),
|
|
||||||
APSystemsECUInverterSensor(coordinator, ecu, uid,
|
|
||||||
"voltage", label="Voltage", unit=VOLT,
|
|
||||||
devclass=DEVICE_CLASS_VOLTAGE),
|
|
||||||
APSystemsECUInverterSensor(coordinator, ecu, uid,
|
|
||||||
"signal", label="Signal", unit=PERCENTAGE,
|
|
||||||
icon=SIGNAL_ICON)
|
|
||||||
|
|
||||||
])
|
|
||||||
for i in range(0, inv_data.get("channel_qty", 0)):
|
|
||||||
sensors.append(
|
|
||||||
APSystemsECUInverterSensor(coordinator, ecu, uid, f"power",
|
|
||||||
index=i, label=f"Power Ch {i+1}", unit=POWER_WATT,
|
|
||||||
devclass=DEVICE_CLASS_POWER, icon=SOLAR_ICON)
|
|
||||||
)
|
|
||||||
|
|
||||||
add_entities(sensors)
|
|
||||||
|
|
||||||
|
|
||||||
class APSystemsECUInverterSensor(CoordinatorEntity, Entity):
|
|
||||||
def __init__(self, coordinator, ecu, uid, field, index=0, label=None, icon=None, unit=None, devclass=None):
|
|
||||||
|
|
||||||
super().__init__(coordinator)
|
|
||||||
|
|
||||||
self.coordinator = coordinator
|
|
||||||
|
|
||||||
self._index = index
|
|
||||||
self._uid = uid
|
|
||||||
self._ecu = ecu
|
|
||||||
self._field = field
|
|
||||||
self._devclass = devclass
|
|
||||||
self._label = label
|
|
||||||
if not label:
|
|
||||||
self._label = field
|
|
||||||
self._icon = icon
|
|
||||||
self._unit = unit
|
|
||||||
|
|
||||||
self._name = f"Inverter {self._uid} {self._label}"
|
|
||||||
self._state = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unique_id(self):
|
|
||||||
field = self._field
|
|
||||||
if self._index != None:
|
|
||||||
field = f"{field}_{self._index}"
|
|
||||||
return f"{self._ecu.ecu.ecu_id}_{self._uid}_{field}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_class(self):
|
|
||||||
return self._devclass
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self):
|
|
||||||
#_LOGGER.warning(f"State called for {self._field}")
|
|
||||||
if self._field == "voltage":
|
|
||||||
return self.coordinator.data.get("inverters", {}).get(self._uid, {}).get("voltage", [])[0]
|
|
||||||
elif self._field == "power":
|
|
||||||
#_LOGGER.warning(f"POWER {self._uid} {self._index}")
|
|
||||||
return self.coordinator.data.get("inverters", {}).get(self._uid, {}).get("power", [])[self._index]
|
|
||||||
else:
|
|
||||||
return self.coordinator.data.get("inverters", {}).get(self._uid, {}).get(self._field)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def icon(self):
|
|
||||||
return self._icon
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unit_of_measurement(self):
|
|
||||||
return self._unit
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_state_attributes(self):
|
|
||||||
|
|
||||||
attrs = {
|
|
||||||
"ecu_id" : self._ecu.ecu.ecu_id,
|
|
||||||
"last_update" : self._ecu.ecu.last_update,
|
|
||||||
}
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class APSystemsECUSensor(CoordinatorEntity, Entity):
|
|
||||||
|
|
||||||
def __init__(self, coordinator, ecu, field, label=None, icon=None, unit=None, devclass=None):
|
|
||||||
|
|
||||||
super().__init__(coordinator)
|
|
||||||
|
|
||||||
self.coordinator = coordinator
|
|
||||||
|
|
||||||
self._ecu = ecu
|
|
||||||
self._field = field
|
|
||||||
self._label = label
|
|
||||||
if not label:
|
|
||||||
self._label = field
|
|
||||||
self._icon = icon
|
|
||||||
self._unit = unit
|
|
||||||
self._devclass = devclass
|
|
||||||
|
|
||||||
self._name = f"ECU {self._label}"
|
|
||||||
self._state = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unique_id(self):
|
|
||||||
return f"{self._ecu.ecu.ecu_id}_{self._field}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_class(self):
|
|
||||||
return self._devclass
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self):
|
|
||||||
#_LOGGER.warning(f"State called for {self._field}")
|
|
||||||
return self.coordinator.data.get(self._field)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def icon(self):
|
|
||||||
return self._icon
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unit_of_measurement(self):
|
|
||||||
return self._unit
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_state_attributes(self):
|
|
||||||
|
|
||||||
attrs = {
|
|
||||||
"ecu_id" : self._ecu.ecu.ecu_id,
|
|
||||||
"firmware" : self._ecu.ecu.firmware,
|
|
||||||
"last_update" : self._ecu.ecu.last_update
|
|
||||||
}
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
start_query:
|
|
||||||
description: Start querying the ECU for new data (i.e. when sunrise)
|
|
||||||
|
|
||||||
stop_query:
|
|
||||||
description: Stop querying the ECU for new data (i.e. when sunset)
|
|
|
@ -1,25 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from APSystemsECUR import APSystemsECUR
|
|
||||||
import time
|
|
||||||
import asyncio
|
|
||||||
from pprint import pprint
|
|
||||||
|
|
||||||
ecu_ip = "192.168.0.251"
|
|
||||||
sleep = 60
|
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
ecu = APSystemsECUR(ecu_ip)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
data = loop.run_until_complete(ecu.async_query_ecu())
|
|
||||||
print(f"[OK] Timestamp: {data.get('timestamp')} Current Power: {data.get('current_power')}")
|
|
||||||
pprint(data)
|
|
||||||
except Exception as err:
|
|
||||||
print(f"[ERROR] {err}")
|
|
||||||
|
|
||||||
print(f"Sleeping for {sleep} sec")
|
|
||||||
time.sleep(sleep)
|
|
||||||
|
|
||||||
|
|
151
server/main.py
151
server/main.py
|
@ -1,30 +1,165 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
logging.basicConfig(
|
||||||
|
format='[%(asctime)s] %(levelname)s %(module)s/%(funcName)s: - %(message)s',
|
||||||
|
level=logging.DEBUG if os.environ.get('DEBUG') else logging.INFO)
|
||||||
|
logging.getLogger('aiohttp').setLevel(logging.DEBUG if os.environ.get('DEBUG') else logging.WARNING)
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from aiohttp import web
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from APSystemsECUR import APSystemsECUR
|
||||||
|
|
||||||
|
ECU_IP = '192.168.69.103'
|
||||||
|
LISTEN_IP = '192.168.69.100'
|
||||||
|
ecu = APSystemsECUR(ECU_IP)
|
||||||
|
app = web.Application()
|
||||||
|
prev_ecu_timestamp = None
|
||||||
|
solar_data = {}
|
||||||
|
|
||||||
|
from influxdb import InfluxDBClient
|
||||||
|
client = InfluxDBClient('localhost', 8086, database='solar2')
|
||||||
|
|
||||||
async def proxy(reader, writer):
|
async def proxy(reader, writer):
|
||||||
message = await reader.read(1024)
|
message = await reader.read(1024)
|
||||||
addr = writer.get_extra_info('peername')
|
addr = writer.get_extra_info('peername')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
print('Recvd from {}: {}'.format(addr[0], message))
|
logging.debug('Recvd from {}: {}'.format(addr[0], message))
|
||||||
offset = 32
|
offset = 32
|
||||||
date_time_obj = datetime.strptime(str(message)[offset:offset+14], '%Y%m%d%H%M%S') + timedelta(minutes=-5)
|
date_time_obj = datetime.strptime(str(message)[offset:offset+14], '%Y%m%d%H%M%S') + timedelta(minutes=-5)
|
||||||
send_str = '101' + datetime.strftime(date_time_obj, '%Y%m%d%H%M%S')
|
send_str = '101' + datetime.strftime(date_time_obj, '%Y%m%d%H%M%S')
|
||||||
send_data = send_str.encode()
|
send_data = send_str.encode()
|
||||||
print('Sending to {}: {}'.format(addr[0], send_data))
|
logging.debug('Sending to {}: {}'.format(addr[0], send_data))
|
||||||
writer.write(send_data)
|
writer.write(send_data)
|
||||||
await writer.drain()
|
await writer.drain()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print('Ignored unnecessary data')
|
logging.debug('Ignored unnecessary data')
|
||||||
|
|
||||||
writer.close()
|
writer.close()
|
||||||
|
|
||||||
async def main():
|
async def run_proxies():
|
||||||
for port in [8995, 8996, 8997]:
|
for port in [8995, 8996, 8997]:
|
||||||
server = await asyncio.start_server(proxy, '192.168.69.69', port)
|
server = await asyncio.start_server(proxy, LISTEN_IP, port)
|
||||||
|
logging.info('Started TCP listener server on %s:%s', LISTEN_IP, port)
|
||||||
task = asyncio.create_task(server.serve_forever())
|
task = asyncio.create_task(server.serve_forever())
|
||||||
|
|
||||||
# block here for now
|
async def get_data():
|
||||||
await task
|
global prev_ecu_timestamp
|
||||||
|
global solar_data
|
||||||
|
|
||||||
asyncio.run(main())
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
logging.debug('Grabbing ECU data...')
|
||||||
|
data = ecu.query_ecu()
|
||||||
|
logging.debug('Good read, timestamp: %s Mountain Time, current power: %s', data['timestamp'], data['current_power'])
|
||||||
|
|
||||||
|
if data['timestamp'] != prev_ecu_timestamp:
|
||||||
|
total = 0
|
||||||
|
timestamp = datetime.utcnow()
|
||||||
|
|
||||||
|
for i in data['inverters'].values():
|
||||||
|
total += i['power'][0]
|
||||||
|
total += i['power'][1]
|
||||||
|
data['actual_total'] = total
|
||||||
|
|
||||||
|
solar_data = data
|
||||||
|
logging.info('Solar data updated, ecu time: %s Mountain, ecu total: %s, actual total: %s', data['timestamp'], data['current_power'], total)
|
||||||
|
|
||||||
|
points = []
|
||||||
|
for i in data['inverters'].values():
|
||||||
|
points.append({
|
||||||
|
'time': timestamp,
|
||||||
|
'measurement': 'inverter',
|
||||||
|
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid']},
|
||||||
|
'fields': {
|
||||||
|
'ecu_time': data['timestamp'],
|
||||||
|
'online': i['online'],
|
||||||
|
'frequency': i['frequency'],
|
||||||
|
'temperature': i['temperature'],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
points.append({
|
||||||
|
'time': timestamp,
|
||||||
|
'measurement': 'panel',
|
||||||
|
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid'], 'channel': '0'},
|
||||||
|
'fields': {
|
||||||
|
'ecu_time': data['timestamp'],
|
||||||
|
'power': i['power'][0],
|
||||||
|
'voltage': i['voltage'][0]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
points.append({
|
||||||
|
'time': timestamp,
|
||||||
|
'measurement': 'panel',
|
||||||
|
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid'], 'channel': '1'},
|
||||||
|
'fields': {
|
||||||
|
'ecu_time': data['timestamp'],
|
||||||
|
'power': i['power'][1],
|
||||||
|
'voltage': i['voltage'][1]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
points.append({
|
||||||
|
'time': timestamp,
|
||||||
|
'measurement': 'ecu',
|
||||||
|
'tags': {'ecu': data['ecu_id']},
|
||||||
|
'fields': {
|
||||||
|
'ecu_total': data['current_power'],
|
||||||
|
'ecu_time': data['timestamp'],
|
||||||
|
'actual_total': data['actual_total'],
|
||||||
|
'today_energy': data['today_energy'],
|
||||||
|
'lifetime_energy': data['lifetime_energy'],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
client.write_points(points)
|
||||||
|
|
||||||
|
logging.info('Wrote %s points to InfluxDB', len(points))
|
||||||
|
|
||||||
|
|
||||||
|
prev_ecu_timestamp = data['timestamp']
|
||||||
|
|
||||||
|
except Exception as err:
|
||||||
|
logging.error('Error: ' + str(err))
|
||||||
|
|
||||||
|
await asyncio.sleep(120)
|
||||||
|
|
||||||
|
async def index(request):
|
||||||
|
return web.Response(text='hello world', content_type='text/html')
|
||||||
|
|
||||||
|
async def data(request):
|
||||||
|
return web.json_response(solar_data)
|
||||||
|
|
||||||
|
async def history(request):
|
||||||
|
try:
|
||||||
|
date = datetime.strptime(request.match_info['date'], '%Y-%m-%d')
|
||||||
|
tz = pytz.timezone('America/Edmonton')
|
||||||
|
date = tz.localize(date)
|
||||||
|
except ValueError:
|
||||||
|
raise web.HTTPNotFound
|
||||||
|
|
||||||
|
start = int(date.timestamp())
|
||||||
|
end = date + timedelta(days=1)
|
||||||
|
end = int(end.timestamp())
|
||||||
|
|
||||||
|
q = 'select * from ecu where time >= {}s and time < {}s'.format(start, end)
|
||||||
|
history = list(client.query(q).get_points())
|
||||||
|
|
||||||
|
return web.json_response(history)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
app.router.add_get('/', index)
|
||||||
|
app.router.add_get('/data', data)
|
||||||
|
app.router.add_get('/history/{date}', history)
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.create_task(run_proxies())
|
||||||
|
loop.create_task(get_data())
|
||||||
|
web.run_app(app, port=6901)
|
||||||
|
|
18
server/requirements.txt
Normal file
18
server/requirements.txt
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
aiohttp==3.7.4.post0
|
||||||
|
async-timeout==3.0.1
|
||||||
|
attrs==21.2.0
|
||||||
|
certifi==2021.5.30
|
||||||
|
chardet==4.0.0
|
||||||
|
ciso8601==2.1.3
|
||||||
|
idna==2.10
|
||||||
|
influxdb==5.3.1
|
||||||
|
msgpack==1.0.2
|
||||||
|
multidict==5.1.0
|
||||||
|
pkg-resources==0.0.0
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
pytz==2021.1
|
||||||
|
requests==2.25.1
|
||||||
|
six==1.16.0
|
||||||
|
typing-extensions==3.10.0.0
|
||||||
|
urllib3==1.26.5
|
||||||
|
yarl==1.6.3
|
Loading…
Reference in New Issue
Block a user