Python Forum

Full Version: How can I only receive data from selected serial numbers
You're currently viewing a stripped down version of our content. View the full version with proper formatting.
How can I only receive data from selected serial numbers
or alternatively add a list of serial numbers to exclude ?
ie)"serialNumber": "121718037628", has been dead for about 2 months now
or maybe auto exclude data that has not been updated for a long period of time
using "lastReportDate": 1549920259,

the stream from then enphase microinverter controller looks like this
http://envoy/api/v1/production/inverters (local network )
Quote:[
{
"serialNumber": "121718037628",
"lastReportDate": 1549920259,
"devType": 1,
"lastReportWatts": 18,
"maxReportWatts": 18
},
{
"serialNumber": "121718037534",
"lastReportDate": 1555635154,
"devType": 1,
"lastReportWatts": 108,
"maxReportWatts": 187
},
{
"serialNumber": "121718037683",
"lastReportDate": 1555635148,
"devType": 1,
"lastReportWatts": 117,
"maxReportWatts": 197
},

#!/usr/bin/env python -f

import os
import time
import json
import requests
import threading
from requests.auth import HTTPDigestAuth
from prometheus_client import start_http_server, Gauge


host = os.getenv('ENVOY_HOST')
password = os.getenv('ENVOY_PASS')

user = 'installer'
auth = HTTPDigestAuth(user, password)
marker = b'data: '


serials = {
    121718037663: '#1',
    121718037534: '#1',
    121718037513: '#1',
    121718037414: '#1',
    121718037683: '#1',
    121718037593: '',  
    121718037695: '#1',
    121718037872: '#1',
    121718037601: '#1',
    121718037876: '#2',
    121718037698: '#2',
    121718037881: '#2',
    121718037584: '#2',
    121718037703: '#2',
    
}


stream_gauges = {
    'p': Gauge('meter_active_power_watts', 'Active Power', ['type', 'phase']),
    'q': Gauge('meter_reactive_power_watts', 'Reactive Power', ['type', 'phase']),
    's': Gauge('meter_apparent_power_watts', 'Apparent Power', ['type', 'phase']),
    'v': Gauge('meter_voltage_volts', 'Voltage', ['type', 'phase']),
    'i': Gauge('meter_current_amps', 'Current', ['type', 'phase']),
    'f': Gauge('meter_frequency_hertz', 'Frequency', ['type', 'phase']),
    'pf': Gauge('meter_power_factor_ratio', 'Power Factor', ['type', 'phase']),
}

production_gauges = {
    'activeCount': Gauge('production_active_count', 'Active Count', ['type']),
    'wNow': Gauge('power_now_watts', 'Active Count', ['type']),
    'whToday': Gauge('production_today_watthours', 'Total production today', ['type']),
    'whLastSevenDays': Gauge('production_7days_watthours', 'Total production last seven days', ['type']),
    'whLifetime': Gauge('production_lifetime_watthours', 'Total production lifetime', ['type']),
}

consumption_gauges = {
    'wNow': Gauge('consumption_now_watts', 'Active Count', ['type']),
    'whToday': Gauge('consumption_today_watthours', 'Total consumption today', ['type']),
    'whLastSevenDays': Gauge('consumption_7days_watthours', 'Total consumption last seven days', ['type']),
    'whLifetime': Gauge('consumption_lifetime_watthours', 'Total consumption lifetime', ['type']),
}

inverter_gauges = {
    'last': Gauge('inverter_last_report_watts', 'Last reported watts', ['serial', 'location']),
    'max': Gauge('inverter_max_report_watts', 'Max reported watts', ['serial', 'location']),
}


def scrape_stream():
    while True:
        try:
            url = 'http://%s/stream/meter' % host
            stream = requests.get(url, auth=auth, stream=True, timeout=5)
            for line in stream.iter_lines():
                if line.startswith(marker):
                    data = json.loads(line.replace(marker, b''))
                    print(data)
                    for meter_type in ['production', 'net-consumption', 'total-consumption']:
                        for phase in ['ph-a', 'ph-b']:
                            for key, value in data.get(meter_type, {}).get(phase, {}).items():
                                if key in stream_gauges:
                                    stream_gauges[key].labels(type=meter_type, phase=phase).set(value)
        except requests.exceptions.RequestException as e:
            print('Exception fetching stream data: %s' % e)
            time.sleep(5)


def scrape_production_json():
    url = 'http://%s/production.json' % host
    data = requests.get(url).json()
    production = data['production']
    print(production)
    for each in production:
        mtype = each['type']
        for key in ['activeCount', 'wNow', 'whLifetime', 'whToday', 'whLastSevenDays']:
            value = each.get(key)
            if value is not None:
                production_gauges[key].labels(type=mtype).set(value)
    consumption = data['consumption']
    print(consumption)
    for each in consumption:
        mtype = each['measurementType']
        for key in ['wNow', 'whLifetime', 'whToday', 'whLastSevenDays']:
            value = each.get(key)
            if value is not None:
                consumption_gauges[key].labels(type=mtype).set(value)



def scrape_inverters():
    url = 'http://%s/api/v1/production/inverters' % host
    data = requests.get(url, auth=auth).json()
    print(data)
    for inverter in data:
        serial = int(inverter['serialNumber'])
        location = serials.get(serial, '')
        inverter_gauges['last'].labels(serial=serial, location=location).set(inverter['lastReportWatts'])
        inverter_gauges['max'].labels(serial=serial, location=location).set(inverter['maxReportWatts'])


def main():
    start_http_server(8000)
    stream_thread = threading.Thread(target=scrape_stream)
    stream_thread.setDaemon(True)
    stream_thread.start()
    while True:
        try:
            scrape_production_json()
            scrape_inverters()
        except Exception as e:
            print('Exception fetching scrape data: %s' % e)
        time.sleep(60)


if __name__ == '__main__':
    main()
and I have tried adding this

}

ignorelist = {
    121718037628,
}

for inverter in data:
     if inverter['serialNumber'] in ignorelist:
        continue
         serial = int(inverter['serialNumber'])
but then it skips all of the inverters
Unless you are able to filter serialNumbers in the API requests your approach is correct. The problem that in the JSON serialNumber is str and in your ignore list - int

# assuming you have properly converted the json response
json_response = [
{
"serialNumber": "121718037628",
"lastReportDate": 1549920259,
"devType": 1,
"lastReportWatts": 18,
"maxReportWatts": 18
},
{
"serialNumber": "121718037534",
"lastReportDate": 1555635154,
"devType": 1,
"lastReportWatts": 108,
"maxReportWatts": 187
},
{
"serialNumber": "121718037683",
"lastReportDate": 1555635148,
"devType": 1,
"lastReportWatts": 117,
"maxReportWatts": 197
}
]

ignore_set = {'121718037628',} # that is actually a set

for inverter in json_response:
    if inverter['serialNumber'] not in ignore_set:
        print(inverter)
Output:
{'serialNumber': '121718037534', 'lastReportDate': 1555635154, 'devType': 1, 'lastReportWatts': 108, 'maxReportWatts': 187} {'serialNumber': '121718037683', 'lastReportDate': 1555635148, 'devType': 1, 'lastReportWatts': 117, 'maxReportWatts': 197} >>>
ok thanks for clearing that up ,explains how everything I thought should do it skipped the entire section, how can I get
json_response = [
]{
to be read from the API stream ?
you already have it
 data = requests.get(url, auth=auth).json()
data in your code is json_response in mine
thanks, with your hint I got something to work


I just got it working using

ignorelist = {
'121718037628','121718037534'
}

for inverter in data:
    if inverter['serialNumber'] not in ignorelist:
        print(inverter)
        serial = int(inverter['serialNumber'])
        location = serials.get(serial, '')
Now I do not have to put up with all the flat lines in the charts from dead inverters that the API still outputs even after they have been deleted from the system , why it keeps them as deleted inverters on the output list has got me ( half of the micro inverters are now dead )
instead of converting serialNumber to int, why not make keys in serials to be str?
because I am a noob trying to tinker with someone else's work :P

by all means if you can improve

https://github.com/MasterCATZ/solar-obse.../scrape.py

something is causing the envoy to lag down a lot when I am scraping it but pretty sure its wifi related