Data Logging - mjdeventer/AQ-JDMB GitHub Wiki

Timer in Python

we want to call our sensors and store data to a continious DataFrame with a specified time step (frequency) in between samples, e.g. once every 3 seconds:

make sure to install pandas and embedded numpy the proper way on your raspberry, assuming you run python 3 use
sudo apt-get install python3-pandas
make sure to install all following dependencies and update:
pandas, serial, bme280, influxdb_client

import time, serial, struct
import pandas as pd
import datetime as dt

dfaq = pd.DataFrame({'DateTime':[dt.datetime.now()],'PM25':[0],'PM10':[0]}) #preallocating DF for logger storage, adapt to your sensors and readouts

starttime = time.time()
loggstep = 3 # in seconds
while True:
    pmout = getsds011(comsds011) #insert function to call any sensor here 
    newdata = [{'DateTime':dt.datetime.now(),'PM25':pmout[0],'PM10':pmout[1]}]
    dfaq = dfaq.append(newdata,ignore_index = True); del newdata; del pmout
    print("appended new data to Logger DF")
    time.sleep(loggstep - ((time.time() - starttime) % loggstep))

work in progress example> Logging into pd DF and exporting as .csv every 1 minute

import time, struct
import pandas as pd
import serial
import bme280 as bme
import datetime as dt

def getsds011(comport):
    """Opens COM to SDS011 and returns 1 data touple of PM25 and PM10
    input - string of the comport the SDS011 is connected to
    output - a 1x2 toubple with [0] entry being pm25 and [1] being pm10"""
    
    portflagsds011 = True
    ser = serial.Serial()
    ser.port = comport # Set this to your serial port
    ser.baudrate = 9600
    ser.open()
    ser.flushInput()
    byte, lastbyte = "\x00", b'\xab'
    while portflagsds011== True:
        #lastbyte = byte
        byte = ser.read(size=1); #print(byte)
        
        # We got a valid packet header
        if byte == b'\xc0' and lastbyte == b'\xab':
        #if lastbyte == "\x00" and byte == b'\xaa':
            sentence = ser.read(size=8) # Read 8 more bytes
            readings = struct.unpack('<hhxxcc',sentence) # Decode the packet - little endian, 2 shorts for pm2.5 and pm10, 2 reserved bytes, checksum, message tail
            
            pm_25 = readings[0]/10
            pm_10 = readings[1]/10
            # ignoring the checksum and message tail
            
            print("PM 2.5:",pm_25,"μg/m^3  PM 10:",pm_10,"μg/m^3")
            lastbyte = readings[3]
            del readings
            portflagsds011 = False
            #ser.flushInput()
            ser.close()
            return pm_25, pm_10          

comsds011 = "/dev/ttyUSB0"
#dfaq = pd.DataFrame({'DateTime':[dt.datetime.now()],'PM25':[0],'PM10':[0]}) #preallocating DF for logger storage, adapt to your sensors and readouts
dfaq = pd.DataFrame({'DateTime':[dt.datetime.now()],'PM10':[0],'PM25':[0],'Ta':[0],'Pa':[0],'RH':[0]}) 

starttime = time.time()
starttimestr = dt.datetime.now().strftime("%Y-%m-%d %H:%M")
loggstep = 3 # in seconds
i = 0
while True:
    pmout = getsds011(comsds011) #insert function to call any sensor here 
    bmeout = bme.readBME280All()
    newdata = [{'DateTime':dt.datetime.now(),'PM25':pmout[0],'PM10':pmout[1],'Ta':round(bmeout[0],1),'Pa':round(bmeout[1],3),'RH':round(bmeout[2],1)}]
    dfaq = dfaq.append(newdata,ignore_index = True); del newdata; del pmout; del bmeout
    print("appended new data to Logger DF")
    time.sleep(loggstep - ((time.time() - starttime) % loggstep))
    newtime = time.time()
    if (newtime>(starttime + (60*i))):
        dfaq.to_csv(starttimestr+'_logger.csv')
        print("wrote file")
        i = i+1
dfaq

work in progress 2

import time, struct
import pandas as pd
import serial
import bme280 as bme
import datetime as dt
from influxdb_client import InfluxDBClient, Point, WritePrecision
from influxdb_client.client.write_api import SYNCHRONOUS

### Instantiate the connection to the InfluxDB client."""
user = 'yourusername'; org = user
password = 'yourpassword'
dbname = 'AQ-JDMB'
protocol = 'line'
token = "1kn0mBd9TSX9ZFSjAC-U4SpZUFPrl7kYja9sPiGVAxSEnU7lu_r6oxDv2dkRO-eWrz1fpTeY1O78HmpbFlRUtA=="
bucket = "AQ-JDMB"
client = InfluxDBClient(url="https://westeurope-1.azure.cloud2.influxdata.com", token=token)
write_api = client.write_api(write_options=SYNCHRONOUS)


def getsds011(comport):
    """Opens COM to SDS011 and returns 1 data touple of PM25 and PM10
    input - string of the comport the SDS011 is connected to
    output - a 1x2 toubple with [0] entry being pm25 and [1] being pm10"""
    
    portflagsds011 = True
    ser = serial.Serial()
    ser.port = comport # Set this to your serial port
    ser.baudrate = 9600
    ser.open()
    ser.flushInput()
    byte, lastbyte = "\x00", b'\xab'
    while portflagsds011== True:
        #lastbyte = byte
        byte = ser.read(size=1); #print(byte)
        
        # We got a valid packet header
        if byte == b'\xc0' and lastbyte == b'\xab':
        #if lastbyte == "\x00" and byte == b'\xaa':
            sentence = ser.read(size=8) # Read 8 more bytes
            readings = struct.unpack('<hhxxcc',sentence) # Decode the packet - little endian, 2 shorts for pm2.5 and pm10, 2 reserved bytes, checksum, message tail
            
            pm_25 = readings[0]/10
            pm_10 = readings[1]/10
            # ignoring the checksum and message tail
            
            print("PM 2.5:",pm_25,"μg/m^3  PM 10:",pm_10,"μg/m^3")
            lastbyte = readings[3]
            del readings
            portflagsds011 = False
            #ser.flushInput()
            ser.close()
            return pm_25, pm_10          

comsds011 = "/dev/ttyUSB0"
#dfaq = pd.DataFrame({'DateTime':[dt.datetime.now()],'PM25':[0],'PM10':[0]}) #preallocating DF for logger storage, adapt to your sensors and readouts
dfaq = pd.DataFrame({'DateTime':[dt.datetime.now()],'PM10':[0],'PM25':[0],'Ta':[0],'Pa':[0],'RH':[0]}) 

starttime = time.time()
starttimestr = dt.datetime.now().strftime("%Y-%m-%d %H:%M")
loggstep = 3 # in seconds
i = 0
while True:
    pmout = getsds011(comsds011) #insert function to call any sensor here 
    bmeout = bme.readBME280All()
    newdata = [{'DateTime':dt.datetime.now(),'PM25':pmout[0],'PM10':pmout[1],'Ta':round(bmeout[0],1),'Pa':round(bmeout[1],3),'RH':round(bmeout[2],1)}]
    dfaq = dfaq.append(newdata,ignore_index = True); del newdata; del pmout; del bmeout
    print("appended new data to Logger DF")
    time.sleep(loggstep - ((time.time() - starttime) % loggstep))
    newtime = time.time()
    if (newtime>(starttime + (60*i))):
        dfaq.to_csv(starttimestr+'_logger.csv')
        print("wrote file")
        i = i+1
        data = "Temperature,host=bme280 Air="+str(round(bmeout[0],1))
        write_api.write(bucket, org, data)
        data = "Humidity,host=bme280 Air="+str(round(bmeout[2],0))
        write_api.write(bucket, org, data)
        data = "Pressure,host=bme280 Air="+str(round(bmeout[1],0))
        write_api.write(bucket, org, data)
        data = "PM10,host=SDS011 Air="+str(round(pmout[1],1))
        write_api.write(bucket, org, data)
        data = "PM25,host=SDS011 Air="+str(round(pmout[0],1))
        write_api.write(bucket, org, data)