Compare commits
6 Commits
863841bd74
...
f2e9b389b2
Author | SHA1 | Date |
---|---|---|
Tanner Collin | f2e9b389b2 | 2 years ago |
Tanner Collin | 8b5643d57d | 2 years ago |
Tanner Collin | c52268be2c | 3 years ago |
Tanner Collin | 40eba650da | 3 years ago |
Tanner Collin | 9e1f55f7dc | 3 years ago |
Tanner Collin | 483d054b2f | 3 years ago |
5 changed files with 686 additions and 0 deletions
@ -0,0 +1,18 @@ |
||||
# Server Setup |
||||
|
||||
``` |
||||
sudo apt install dnsmasq |
||||
``` |
||||
|
||||
Edit `/etc/dnsmasq.conf`: |
||||
|
||||
``` |
||||
no-resolv # for offline / isolated networks only |
||||
address=/.apsystemsema.com/192.168.69.100 |
||||
address=/.apsema.com/192.168.69.100 |
||||
listen-address=192.168.69.100 |
||||
``` |
||||
|
||||
Replace `192.168.69.100` with the server's IP address. |
||||
|
||||
Edit router's DHCP DNS settings to use 192.168.69.100 as the name server. |
@ -0,0 +1,111 @@ |
||||
# Byte-compiled / optimized / DLL files |
||||
__pycache__/ |
||||
*.py[cod] |
||||
*$py.class |
||||
|
||||
# C extensions |
||||
*.so |
||||
|
||||
# Distribution / packaging |
||||
.Python |
||||
build/ |
||||
develop-eggs/ |
||||
dist/ |
||||
downloads/ |
||||
eggs/ |
||||
.eggs/ |
||||
lib/ |
||||
lib64/ |
||||
parts/ |
||||
sdist/ |
||||
var/ |
||||
wheels/ |
||||
*.egg-info/ |
||||
.installed.cfg |
||||
*.egg |
||||
|
||||
# PyInstaller |
||||
# Usually these files are written by a python script from a template |
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it. |
||||
*.manifest |
||||
*.spec |
||||
|
||||
# Installer logs |
||||
pip-log.txt |
||||
pip-delete-this-directory.txt |
||||
|
||||
# Unit test / coverage reports |
||||
htmlcov/ |
||||
.tox/ |
||||
.coverage |
||||
.coverage.* |
||||
.cache |
||||
nosetests.xml |
||||
coverage.xml |
||||
*.cover |
||||
.hypothesis/ |
||||
|
||||
# Translations |
||||
*.mo |
||||
*.pot |
||||
|
||||
# Django stuff: |
||||
*.log |
||||
local_settings.py |
||||
|
||||
# Flask stuff: |
||||
instance/ |
||||
.webassets-cache |
||||
|
||||
# Scrapy stuff: |
||||
.scrapy |
||||
|
||||
# Sphinx documentation |
||||
docs/_build/ |
||||
|
||||
# PyBuilder |
||||
target/ |
||||
|
||||
# Jupyter Notebook |
||||
.ipynb_checkpoints |
||||
|
||||
# pyenv |
||||
.python-version |
||||
|
||||
# celery beat schedule file |
||||
celerybeat-schedule |
||||
|
||||
# SageMath parsed files |
||||
*.sage.py |
||||
|
||||
# Environments |
||||
.env |
||||
.venv |
||||
env/ |
||||
venv/ |
||||
ENV/ |
||||
|
||||
# Spyder project settings |
||||
.spyderproject |
||||
.spyproject |
||||
|
||||
# Rope project settings |
||||
.ropeproject |
||||
|
||||
# mkdocs documentation |
||||
/site |
||||
|
||||
# mypy |
||||
.mypy_cache/ |
||||
|
||||
# Editor |
||||
*.swp |
||||
*.swo |
||||
|
||||
*.session |
||||
*.session-journal |
||||
settings.py |
||||
|
||||
caseta.crt |
||||
caseta-bridge.crt |
||||
caseta.key |
@ -0,0 +1,369 @@ |
||||
# Stolen from https://github.com/ksheumaker/homeassistant-apsystems_ecur |
||||
# cheers to ksheumaker and HAEdwin |
||||
|
||||
import socket |
||||
import binascii |
||||
import logging |
||||
|
||||
class APSystemsInvalidData(Exception): |
||||
pass |
||||
|
||||
class APSystemsECUR: |
||||
def __init__(self, ipaddr, port=8899, raw_ecu=None, raw_inverter=None): |
||||
self.ipaddr = ipaddr |
||||
self.port = port |
||||
|
||||
# what do we expect socket data to end in |
||||
self.recv_suffix = b'END\n' |
||||
|
||||
# how long to wait on socket commands until we get our recv_suffix |
||||
self.timeout = 5 |
||||
|
||||
# how many times do we try the same command in a single update before failing |
||||
self.cmd_attempts = 3 |
||||
|
||||
# how big of a buffer to read at a time from the socket |
||||
self.recv_size = 4096*16 |
||||
|
||||
self.qs1_ids = [ '802', '801' ] |
||||
self.yc600_ids = [ '406', '407', '408', '409' ] |
||||
self.yc1000_ids = [ '501', '502', '503', '504' ] |
||||
|
||||
self.cmd_suffix = 'END\n' |
||||
self.ecu_query = 'APS1100160001' + self.cmd_suffix |
||||
self.inverter_query_prefix = 'APS1100280002' |
||||
self.inverter_query_suffix = self.cmd_suffix |
||||
|
||||
self.inverter_signal_prefix = 'APS1100280030' |
||||
self.inverter_signal_suffix = self.cmd_suffix |
||||
|
||||
self.inverter_byte_start = 26 |
||||
|
||||
self.ecu_id = None |
||||
self.qty_of_inverters = 0 |
||||
self.lifetime_energy = 0 |
||||
self.current_power = 0 |
||||
self.today_energy = 0 |
||||
self.inverters = [] |
||||
self.firmware = None |
||||
self.timezone = None |
||||
self.last_update = None |
||||
|
||||
self.ecu_raw_data = raw_ecu |
||||
self.inverter_raw_data = raw_inverter |
||||
self.inverter_raw_signal = None |
||||
|
||||
self.read_buffer = b'' |
||||
|
||||
self.reader = None |
||||
self.writer = None |
||||
|
||||
def query_ecu(self): |
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
||||
sock.connect((self.ipaddr,self.port)) |
||||
|
||||
logging.debug('ecu query: %s', self.ecu_query) |
||||
sock.sendall(self.ecu_query.encode('utf-8')) |
||||
self.ecu_raw_data = sock.recv(self.recv_size) |
||||
|
||||
logging.debug('raw ecu data: %s', self.ecu_raw_data) |
||||
logging.debug('ecu id: %s', self.ecu_raw_data[13:25]) |
||||
power = self.ecu_raw_data[31:35] |
||||
logging.debug('current power: %s', int.from_bytes(power, byteorder='big')) |
||||
|
||||
self.process_ecu_data() |
||||
|
||||
sock.shutdown(socket.SHUT_RDWR) |
||||
sock.close() |
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
||||
sock.connect((self.ipaddr,self.port)) |
||||
|
||||
cmd = self.inverter_query_prefix + self.ecu_id + self.inverter_query_suffix |
||||
logging.debug('inverter data cmd: %s', cmd) |
||||
sock.sendall(cmd.encode('utf-8')) |
||||
self.inverter_raw_data = sock.recv(self.recv_size) |
||||
|
||||
logging.debug('raw inverter data: %s', self.inverter_raw_data) |
||||
|
||||
sock.shutdown(socket.SHUT_RDWR) |
||||
sock.close() |
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
||||
sock.connect((self.ipaddr,self.port)) |
||||
|
||||
cmd = self.inverter_signal_prefix + self.ecu_id + self.inverter_signal_suffix |
||||
logging.debug('inverter signal cmd: %s', cmd) |
||||
sock.sendall(cmd.encode('utf-8')) |
||||
self.inverter_raw_signal = sock.recv(self.recv_size) |
||||
|
||||
logging.debug('raw signal data: %s', self.inverter_raw_signal) |
||||
|
||||
sock.shutdown(socket.SHUT_RDWR) |
||||
sock.close() |
||||
|
||||
data = self.process_inverter_data() |
||||
|
||||
data['ecu_id'] = self.ecu_id |
||||
data['today_energy'] = self.today_energy |
||||
data['lifetime_energy'] = self.lifetime_energy |
||||
data['current_power'] = self.current_power |
||||
|
||||
return(data) |
||||
|
||||
def aps_int(self, codec, start): |
||||
try: |
||||
return int(binascii.b2a_hex(codec[(start):(start+2)]), 16) |
||||
except ValueError as err: |
||||
debugdata = binascii.b2a_hex(codec) |
||||
raise APSystemsInvalidData(f'Unable to convert binary to int location={start} data={debugdata}') |
||||
|
||||
def aps_short(self, codec, start): |
||||
try: |
||||
return int(binascii.b2a_hex(codec[(start):(start+1)]), 8) |
||||
except ValueError as err: |
||||
debugdata = binascii.b2a_hex(codec) |
||||
raise APSystemsInvalidData(f'Unable to convert binary to short int location={start} data={debugdata}') |
||||
|
||||
def aps_double(self, codec, start): |
||||
try: |
||||
return int (binascii.b2a_hex(codec[(start):(start+4)]), 16) |
||||
except ValueError as err: |
||||
debugdata = binascii.b2a_hex(codec) |
||||
raise APSystemsInvalidData(f'Unable to convert binary to double location={start} data={debugdata}') |
||||
|
||||
def aps_bool(self, codec, start): |
||||
return bool(binascii.b2a_hex(codec[(start):(start+2)])) |
||||
|
||||
def aps_uid(self, codec, start): |
||||
return str(binascii.b2a_hex(codec[(start):(start+12)]))[2:14] |
||||
|
||||
def aps_str(self, codec, start, amount): |
||||
return str(codec[start:(start+amount)])[2:(amount+2)] |
||||
|
||||
def aps_timestamp(self, codec, start, amount): |
||||
timestr=str(binascii.b2a_hex(codec[start:(start+amount)]))[2:(amount+2)] |
||||
return timestr[0:4]+'-'+timestr[4:6]+'-'+timestr[6:8]+' '+timestr[8:10]+':'+timestr[10:12]+':'+timestr[12:14] |
||||
|
||||
def check_ecu_checksum(self, data, cmd): |
||||
datalen = len(data) - 1 |
||||
logging.debug('datalen: %s', datalen) |
||||
try: |
||||
checksum = int(data[5:9]) |
||||
logging.debug('checksum: %s', checksum) |
||||
except ValueError as err: |
||||
debugdata = binascii.b2a_hex(data) |
||||
raise APSystemsInvalidData(f'Error getting checksum int from {cmd} data={debugdata}') |
||||
|
||||
if datalen != checksum: |
||||
debugdata = binascii.b2a_hex(data) |
||||
raise APSystemsInvalidData(f'Checksum on {cmd} failed checksum={checksum} datalen={datalen} data={debugdata}') |
||||
|
||||
start_str = self.aps_str(data, 0, 3) |
||||
end_str = self.aps_str(data, len(data) - 4, 3) |
||||
|
||||
if start_str != 'APS': |
||||
debugdata = binascii.b2a_hex(data) |
||||
raise APSystemsInvalidData(f'Result on {cmd} incorrect start signature {start_str} != APS data={debugdata}') |
||||
|
||||
if end_str != 'END': |
||||
debugdata = binascii.b2a_hex(data) |
||||
raise APSystemsInvalidData(f'Result on {cmd} incorrect end signature {end_str} != END data={debugdata}') |
||||
|
||||
return True |
||||
|
||||
def process_ecu_data(self, data=None): |
||||
logging.debug('Processing ECU data...') |
||||
if not data: |
||||
data = self.ecu_raw_data |
||||
|
||||
self.check_ecu_checksum(data, 'ECU Query') |
||||
|
||||
self.ecu_id = self.aps_str(data, 13, 12) |
||||
self.qty_of_inverters = self.aps_int(data, 46) |
||||
self.firmware = self.aps_str(data, 55, 15) |
||||
self.timezone = self.aps_str(data, 70, 9) |
||||
self.lifetime_energy = self.aps_double(data, 27) / 10 |
||||
self.today_energy = self.aps_double(data, 35) / 100 |
||||
self.current_power = self.aps_double(data, 31) |
||||
|
||||
|
||||
def process_signal_data(self, data=None): |
||||
logging.debug('Processing signal data...') |
||||
signal_data = {} |
||||
|
||||
if not data: |
||||
data = self.inverter_raw_signal |
||||
|
||||
self.check_ecu_checksum(data, 'Signal Query') |
||||
|
||||
if not self.qty_of_inverters: |
||||
return signal_data |
||||
|
||||
location = 15 |
||||
for i in range(0, self.qty_of_inverters): |
||||
uid = self.aps_uid(data, location) |
||||
location += 6 |
||||
|
||||
strength = data[location] |
||||
location += 1 |
||||
|
||||
strength = int((strength / 255) * 100) |
||||
signal_data[uid] = strength |
||||
|
||||
return signal_data |
||||
|
||||
def process_inverter_data(self, data=None): |
||||
logging.debug('Processing inverter data...') |
||||
if not data: |
||||
data = self.inverter_raw_data |
||||
|
||||
self.check_ecu_checksum(data, 'Inverter data') |
||||
|
||||
output = {} |
||||
|
||||
timestamp = self.aps_timestamp(data, 19, 14) |
||||
inverter_qty = self.aps_int(data, 17) |
||||
|
||||
self.last_update = timestamp |
||||
output['timestamp'] = timestamp |
||||
output['inverter_qty'] = inverter_qty |
||||
output['inverters'] = {} |
||||
|
||||
# this is the start of the loop of inverters |
||||
location = self.inverter_byte_start |
||||
|
||||
signal = self.process_signal_data() |
||||
|
||||
inverters = {} |
||||
for i in range(0, inverter_qty): |
||||
|
||||
inv={} |
||||
|
||||
inverter_uid = self.aps_uid(data, location) |
||||
inv['uid'] = inverter_uid |
||||
location += 6 |
||||
|
||||
inv['online'] = self.aps_bool(data, location) |
||||
location += 1 |
||||
|
||||
inv['unknown'] = self.aps_str(data, location, 2) |
||||
location += 2 |
||||
|
||||
inv['frequency'] = self.aps_int(data, location) / 10 |
||||
location += 2 |
||||
|
||||
inv['temperature'] = self.aps_int(data, location) - 100 |
||||
location += 2 |
||||
|
||||
inv['signal'] = signal.get(inverter_uid, 0) |
||||
|
||||
# the first 3 digits determine the type of inverter |
||||
inverter_type = inverter_uid[0:3] |
||||
if inverter_type in self.yc600_ids: |
||||
(channel_data, location) = self.process_yc600(data, location) |
||||
inv.update(channel_data) |
||||
|
||||
elif inverter_type in self.qs1_ids: |
||||
(channel_data, location) = self.process_qs1(data, location) |
||||
inv.update(channel_data) |
||||
|
||||
elif inverter_type in self.yc1000_ids: |
||||
(channel_data, location) = self.process_yc1000(data, location) |
||||
inv.update(channel_data) |
||||
|
||||
else: |
||||
raise APSystemsInvalidData(f'Unsupported inverter type {inverter_type}') |
||||
|
||||
inverters[inverter_uid] = inv |
||||
|
||||
output['inverters'] = inverters |
||||
return (output) |
||||
|
||||
def process_yc1000(self, data, location): |
||||
|
||||
power = [] |
||||
voltages = [] |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltage = self.aps_int(data, location) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltage = self.aps_int(data, location) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltage = self.aps_int(data, location) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltages.append(voltage) |
||||
|
||||
output = { |
||||
'model' : 'YC1000', |
||||
'channel_qty' : 4, |
||||
'power' : power, |
||||
'voltage' : voltages |
||||
} |
||||
|
||||
return (output, location) |
||||
|
||||
|
||||
def process_qs1(self, data, location): |
||||
|
||||
power = [] |
||||
voltages = [] |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltage = self.aps_int(data, location) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltages.append(voltage) |
||||
|
||||
output = { |
||||
'model' : 'QS1', |
||||
'channel_qty' : 4, |
||||
'power' : power, |
||||
'voltage' : voltages |
||||
} |
||||
|
||||
return (output, location) |
||||
|
||||
|
||||
def process_yc600(self, data, location): |
||||
power = [] |
||||
voltages = [] |
||||
|
||||
for i in range(0, 2): |
||||
power.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
voltages.append(self.aps_int(data, location)) |
||||
location += 2 |
||||
|
||||
output = { |
||||
'model' : 'YC600', |
||||
'channel_qty' : 2, |
||||
'power' : power, |
||||
'voltage' : voltages, |
||||
} |
||||
|
||||
return (output, location) |
@ -0,0 +1,170 @@ |
||||
import os |
||||
import logging |
||||
logging.basicConfig( |
||||
format='[%(asctime)s] %(levelname)s %(module)s/%(funcName)s: - %(message)s', |
||||
level=logging.DEBUG if os.environ.get('DEBUG') else logging.INFO) |
||||
logging.getLogger('aiohttp').setLevel(logging.DEBUG if os.environ.get('DEBUG') else logging.WARNING) |
||||
|
||||
import asyncio |
||||
from aiohttp import web |
||||
from datetime import datetime, timedelta |
||||
import pytz |
||||
|
||||
from APSystemsECUR import APSystemsECUR |
||||
|
||||
ECU_IP = '192.168.69.153' |
||||
LISTEN_IP = '192.168.69.100' |
||||
ecu = APSystemsECUR(ECU_IP) |
||||
app = web.Application() |
||||
prev_ecu_timestamp = None |
||||
solar_data = {} |
||||
|
||||
from influxdb import InfluxDBClient |
||||
client = InfluxDBClient('localhost', 8086, database='solar2') |
||||
|
||||
async def proxy(reader, writer): |
||||
message = await reader.read(1024) |
||||
addr = writer.get_extra_info('peername') |
||||
|
||||
try: |
||||
logging.debug('Recvd from {}: {}'.format(addr[0], message)) |
||||
offset = 32 |
||||
date_time_obj = datetime.strptime(str(message)[offset:offset+14], '%Y%m%d%H%M%S') + timedelta(minutes=-5) |
||||
send_str = '101' + datetime.strftime(date_time_obj, '%Y%m%d%H%M%S') |
||||
send_data = send_str.encode() |
||||
logging.debug('Sending to {}: {}'.format(addr[0], send_data)) |
||||
writer.write(send_data) |
||||
await writer.drain() |
||||
except ValueError: |
||||
logging.debug('Ignored unnecessary data') |
||||
|
||||
writer.close() |
||||
|
||||
async def run_proxies(): |
||||
for port in [8995, 8996, 8997]: |
||||
server = await asyncio.start_server(proxy, LISTEN_IP, port) |
||||
logging.info('Started TCP listener server on %s:%s', LISTEN_IP, port) |
||||
task = asyncio.create_task(server.serve_forever()) |
||||
|
||||
async def get_data(): |
||||
global prev_ecu_timestamp |
||||
global solar_data |
||||
|
||||
await asyncio.sleep(1) |
||||
|
||||
while True: |
||||
try: |
||||
logging.debug('Grabbing ECU data...') |
||||
data = ecu.query_ecu() |
||||
logging.debug('Good read, timestamp: %s Mountain Time, current power: %s', data['timestamp'], data['current_power']) |
||||
|
||||
if data['timestamp'] != prev_ecu_timestamp: |
||||
total = 0 |
||||
timestamp = datetime.utcnow() |
||||
|
||||
for i in data['inverters'].values(): |
||||
total += i['power'][0] |
||||
total += i['power'][1] |
||||
data['actual_total'] = total |
||||
|
||||
solar_data = data |
||||
logging.info('Solar data updated, ecu time: %s Mountain, ecu total: %s, actual total: %s', data['timestamp'], data['current_power'], total) |
||||
|
||||
points = [] |
||||
for i in data['inverters'].values(): |
||||
points.append({ |
||||
'time': timestamp, |
||||
'measurement': 'inverter', |
||||
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid']}, |
||||
'fields': { |
||||
'ecu_time': data['timestamp'], |
||||
'online': i['online'], |
||||
'frequency': i['frequency'], |
||||
'temperature': i['temperature'], |
||||
} |
||||
}) |
||||
|
||||
points.append({ |
||||
'time': timestamp, |
||||
'measurement': 'panel', |
||||
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid'], 'channel': '0'}, |
||||
'fields': { |
||||
'ecu_time': data['timestamp'], |
||||
'power': i['power'][0], |
||||
'voltage': i['voltage'][0] |
||||
} |
||||
}) |
||||
|
||||
points.append({ |
||||
'time': timestamp, |
||||
'measurement': 'panel', |
||||
'tags': {'ecu': data['ecu_id'], 'inverter': i['uid'], 'channel': '1'}, |
||||
'fields': { |
||||
'ecu_time': data['timestamp'], |
||||
'power': i['power'][1], |
||||
'voltage': i['voltage'][1] |
||||
} |
||||
}) |
||||
|
||||
points.append({ |
||||
'time': timestamp, |
||||
'measurement': 'ecu', |
||||
'tags': {'ecu': data['ecu_id']}, |
||||
'fields': { |
||||
'ecu_total': data['current_power'], |
||||
'ecu_time': data['timestamp'], |
||||
'actual_total': data['actual_total'], |
||||
'today_energy': data['today_energy'], |
||||
'lifetime_energy': data['lifetime_energy'], |
||||
} |
||||
}) |
||||
|
||||
client.write_points(points) |
||||
|
||||
logging.info('Wrote %s points to InfluxDB', len(points)) |
||||
|
||||
|
||||
prev_ecu_timestamp = data['timestamp'] |
||||
|
||||
except Exception as err: |
||||
logging.error('Error: ' + str(err)) |
||||
|
||||
await asyncio.sleep(120) |
||||
|
||||
async def index(request): |
||||
return web.Response(text='hello world', content_type='text/html') |
||||
|
||||
async def data(request): |
||||
return web.json_response(solar_data) |
||||
|
||||
async def display(request): |
||||
res = dict(power=solar_data['actual_total'], brightness=5) |
||||
return web.json_response(res) |
||||
|
||||
async def history(request): |
||||
try: |
||||
date = datetime.strptime(request.match_info['date'], '%Y-%m-%d') |
||||
tz = pytz.timezone('America/Edmonton') |
||||
date = tz.localize(date) |
||||
except ValueError: |
||||
raise web.HTTPNotFound |
||||
|
||||
start = int(date.timestamp()) |
||||
end = date + timedelta(days=1) |
||||
end = int(end.timestamp()) |
||||
|
||||
q = 'select * from ecu where time >= {}s and time < {}s'.format(start, end) |
||||
history = list(client.query(q).get_points()) |
||||
|
||||
return web.json_response(history) |
||||
|
||||
if __name__ == '__main__': |
||||
app.router.add_get('/', index) |
||||
app.router.add_get('/data', data) |
||||
app.router.add_get('/display', display) |
||||
app.router.add_get('/history/{date}', history) |
||||
|
||||
loop = asyncio.get_event_loop() |
||||
loop.create_task(run_proxies()) |
||||
loop.create_task(get_data()) |
||||
web.run_app(app, port=6901) |
@ -0,0 +1,18 @@ |
||||
aiohttp==3.7.4.post0 |
||||
async-timeout==3.0.1 |
||||
attrs==21.2.0 |
||||
certifi==2021.5.30 |
||||
chardet==4.0.0 |
||||
ciso8601==2.1.3 |
||||
idna==2.10 |
||||
influxdb==5.3.1 |
||||
msgpack==1.0.2 |
||||
multidict==5.1.0 |
||||
pkg-resources==0.0.0 |
||||
python-dateutil==2.8.1 |
||||
pytz==2021.1 |
||||
requests==2.25.1 |
||||
six==1.16.0 |
||||
typing-extensions==3.10.0.0 |
||||
urllib3==1.26.5 |
||||
yarl==1.6.3 |
Loading…
Reference in new issue