Jump to content

ngblume

Members
  • Content Count

    6
  • Joined

  • Last visited

Community Reputation

0 Neutral
  1. Hallo Richard, vielen Dank für deine Antwort! ja, ich habe mittlerweile eine Differenzbildung für den Regenwert implementiert und bekommen nun Werte für Regen in den letzten 24h und in der letzten Woche angezeigt... Das Zurücksetzen war auch eher für einen "wirklichen" Reset gedacht; nicht für Tages- oder Wochenwerte. Bzgl. der Datenbank: Der eigentliche Hauptthread zur Software ist hier: https://www.tinkerunity.org/forum/index.php/topic,4389.0.html Dort geht es vor allem um die Frage, wie man das ganze als Daemon laufen lassen kann, quasi als Dienst (in Windows-Sprache)... Ich habe mittlerweile die DB gewechselt und benutze InfluxDB, welche explizit für Timeseries-Data gedacht ist (also für Daten, die gegen die Zeit aufgezeichnet werden). ist nicht unbedingt nötig bei diesen Datenmengen, aber ich wollte es eh mal ausprobieren. Zum Darstellen benutze ich Grafana, welche direkt InfluxDB als Datenquelle einbinden kann. Den ganzen aktuellen Code vom Python Daemon findest du direkt hier.. #!/usr/bin/env python3 # -*- coding: utf-8 -*- # Author: NIELS GÖRAN BLUME # eMail: ngblume@gmail.com # DAEMON VERISON HOST = "localhost" PORT = 4223 UID = "Er2" # Outdoor Weather Bricklet ID #InfluxDB access data DB_HOST = "########" DB_USER = "########" DB_PASSWD = "########" DB_DB = "########" from tinkerforge.ip_connection import IPConnection from tinkerforge.bricklet_outdoor_weather import BrickletOutdoorWeather import sys import os import datetime import time import logging import argparse import daemon import signal from pidlockfile import PIDLockFile # Import for InfluxDB connection from influxdb import InfluxDBClient # CONSTANTS # Stations are 10000 + identifier; Sensors are 20000 + identifier !!! info_data = { # Outdoor-001 station 10086: {'last_update': datetime.datetime.now() - datetime.timedelta(seconds=60), 'nodes_id': 1002, 'locations_id': 2002, 'sensors_id': [4003, 4004, 4005, 4006, 4007, 4008, 4009], 'types_id': [3001, 3002, 3003, 3004, 3005, 3006, 3007], }, # Indoor-001 station 20038: {'last_update': datetime.datetime.now() - datetime.timedelta(seconds=60), 'nodes_id': 1001, 'locations_id': 2001, 'sensors_id': [4001, 4002], 'types_id': [3001, 3002], }, } logger, ipcon, ow = None, None, None # Callback function for station data callback def cb_station_data(identifier, temperature, humidity, wind_speed, gust_speed, rain, wind_direction, battery_low): global logger # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 10000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity, wind_speed/10.0, gust_speed/10.0, rain/10.0, wind_direction, battery_low] # Logging logger.info(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH, " + str( values[2]) + "m/s, " + str(values[3]) + "m/s, " + str(values[4]) + "mm, " + str(values[5]) + ", " + str(values[6])) # Insert data via sep function insert_values_into_db(identifier, ts, values) # Callback function for sensor data callback def cb_sensor_data(identifier, temperature, humidity): global logger # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 20000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity] # Printing for debugging logger.info(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH") # Insert data via sep function insert_values_into_db(identifier, ts, values) # Common shared fuction to insert station as well as sensor data in to DB def insert_values_into_db(identifier, ts, values): global logger global info_data # Format suitable for MySQL timestamp = ts.strftime('%Y-%m-%d %H:%M:%S') # Obtain info for sensor for sensor dict sensor_info = info_data.get(identifier, 'NA') if (sensor_info == 'NA'): logger.info("Unknown sensor in use !!") else: # Check if at least 45 seconds have elapsed since last update to avoid double postings if (ts >= sensor_info.get('last_update') + datetime.timedelta(seconds=45)): logger.info("New data received") # Set up a client for InfluxDB dbclient = InfluxDBClient(DB_HOST, 8086, DB_USER, DB_PASSWD, DB_DB) # Extract lists of sensors and types from sensor_info sensors = sensor_info.get('sensors_id') types = sensor_info.get('types_id') json_body = [ { "measurement": "weather", "tags": { "sensor-type": "Tinkerforge", "node_id": sensor_info.get('nodes_id'), "locations_id": sensor_info.get('locations_id'), }, "fields": { } } ] # Loop over all sensors for i in range(len(sensors)): json_body[0]["fields"][sensors[i]] = values[i] logger.info("Write points: {0}".format(json_body)) dbclient.write_points(json_body, time_precision='ms') # Set new update time as last_update in sensor_data info_data[identifier]['last_update'] = ts logger.info("New entry sent to DB") else: logger.info("Data not new enough !!!") def do_some_work(args): global logger global ipcon global ow # main loop in daemon logger = logging.getLogger(args.daemon_name) logger.setLevel(logging.INFO) fh = logging.FileHandler(args.log_file) fh.setLevel(logging.INFO) log_format = '%(asctime)s|%(levelname)s|%(message)s' fh.setFormatter(logging.Formatter(log_format)) logger.addHandler(fh) # List known stations and sensors logger.info("Listing all known stations and sensors including their node_ids") # print("") for key, val in info_data.items(): logger.info("{} = {}".format(key, val)) # print("") # Open connection to Bricks and Bricklets ipcon = IPConnection() # Create IP connection ipcon.connect(HOST, PORT) # Connect to brickd # Don't use device before ipcon is connected # Connect to Bricklet ow = BrickletOutdoorWeather(UID, ipcon) # Create device object # Enable station data callbacks ow.set_station_callback_configuration(True) # Enable sensor data callbacks ow.set_sensor_callback_configuration(True) # Register station data callback to function cb_station_data ow.register_callback(ow.CALLBACK_STATION_DATA, cb_station_data) # Register sensor data callback to function cb_sensor_data ow.register_callback(ow.CALLBACK_SENSOR_DATA, cb_sensor_data) while True: logger.info("Heartbeat (1/min)") time.sleep(60) def f_start(args): if args.verbose: print("{0}: starting...".format(args.daemon_name)) print("{0}: pid_file = {1}".format(args.daemon_name, args.pid_file)) print("{0}: log_file = {1}".format(args.daemon_name, args.log_file)) with daemon.DaemonContext( working_directory=args.working_directory, umask=0o002, pidfile=PIDLockFile(args.pid_file, timeout=2.0), stdout=open(args.stdout_file, "a"), stderr=open(args.stderr_file, "a")): # Main loop of the daemon do_some_work(args) def f_stop(args): if args.verbose: print("{0}: stopping...".format(args.daemon_name)) plf = PIDLockFile(args.pid_file) pid = plf.is_locked() if pid: os.kill(pid, signal.SIGTERM) else: print("{0}: NOT running".format(args.daemon_name)) def f_restart(args): f_stop(args) f_start(args) def f_status(args): plf = PIDLockFile(args.pid_file) pid = plf.is_locked() if pid: print("{0}: running, PID = {1}".format(args.daemon_name, pid)) else: print("{0}: NOT running".format(args.daemon_name)) if __name__ == "__main__": here = os.path.abspath(os.path.dirname(__file__)) base_name = os.path.basename(__file__).split('.')[0] # To avoid dealing with permissions and to simplify this example # setting working directory, pid file and log file location etc. # to the directory where the script is located. Normally these files # go to various subdirectories of /var # working directory, normally /var/lib/<daemon_name> working_directory = here # log file, normally /var/log/<daemon_name>.log log_file = os.path.join(here, base_name + ".log") # pid lock file, normally /var/run/<daemon_name>.pid pid_file = os.path.join(here, base_name + ".pid") # stdout, normally /var/log/<daemon_name>.stdout stdout_file = os.path.join(here, base_name + ".stdout") # stderr, normally /var/log/<daemon_name>.stderr stderr_file = os.path.join(here, base_name + ".stderr") parser = argparse.ArgumentParser( description="Minimalistic example of using python-daemon with pidlockfile" ) parser.add_argument( "-v", "--verbose", help="print additional messages to stdout", action="store_true" ) parser.set_defaults(working_directory=working_directory) parser.set_defaults(log_file=log_file) parser.set_defaults(pid_file=pid_file) parser.set_defaults(stdout_file=stdout_file) parser.set_defaults(stderr_file=stderr_file) parser.set_defaults(daemon_name=base_name) subparsers = parser.add_subparsers(title="commands") sp_start = subparsers.add_parser("start", description="start daemon") sp_start.set_defaults(func=f_start) sp_stop = subparsers.add_parser("stop", description="stop daemon") sp_stop.set_defaults(func=f_stop) sp_restart = subparsers.add_parser("restart", description="restart daemon") sp_restart.set_defaults(func=f_restart) sp_status = subparsers.add_parser("status", description="check daemon status") sp_status.set_defaults(func=f_status) # Parse as entered args = parser.parse_args() args.func(args) Dieses Programm ist wie folgt strukturiert: 1. Mittels "python3 script.py start" kann es gestartet werden. "python3 script.py status" und "python3 script.py stop" bieten einfache Optionen im laufenden Betrieb. 2. Für beide Typen von Stationen werden Call-Back Funktionen erstellt, die jeweils bei Empfang eine Pakets von einem dieser beiden Stationstypen aufgelöst wird. Diese Funktionen formatieren und loggen die empfangenen Daten und übergeben sie an die Funktion "insert_values_into_db". Diese Funktion ürpft, ob die Daten ausreichend aktuell sind und die entsprechenden Sensoren registriert sind. Wenn dies zutrifft, weden die Daten in die DB eingefügt. 3. Die main Funktion des Programms ist am Ende in einer While-Loop, die einmal die Minute läuft und einen Heartbeat in die Logfiles schreibt. Du kannst die Funktion "insert_values_into_db" beliebig modifizieren um die Daten bspw. nur lokal zu speichern, oder lokal zu buffern. Zum Aufsetzen von InfluxDB und Grafana kann ich folgende Tutorials empfehlen: https://thingsmatic.com/2017/03/02/influxdb-and-grafana-for-sensor-time-series/ https://www.circuits.dk/datalogger-example-using-sense-hat-influxdb-grafana/ http://www.andremiller.net/content/grafana-and-influxdb-quickstart-on-ubuntu Hoffe, dass das hilft.. Grüße Niels Göran P.S.: Hier ein paar Screenshots von Grafana im Anhang...
  2. Hallo Nic, hallo zusammen, ich habe jetzt mal das ganze in einen Daemon gegossen, auch wenn dafür ein paar Global Variables in Python nötig waren, damit es funktioniert... Würde mich freuen, wenn ihr mal drüber schaut und vielleicht noch ein paar Ideen zur Verbesserung oder Vereinfachung habt... Das Ergebniss des Loggings zur DB könnt ihr euch hier anschauen: http://ngblume.com/weather/ (momentan sehr simples Design.. History Data und ähnliches kommt in den nächsten Wochen...) Inspiriert haben mich folgende Seiten: https://dpbl.wordpress.com/2017/02/12/a-tutorial-on-python-daemon/ https://github.com/aigo9/python-daemon-example Grüße Niels Göran #!/usr/bin/env python3 # -*- coding: utf-8 -*- # Author: NIELS GÖRAN BLUME # eMail: ngblume@gmail.com # DAEMON VERISON HOST = "localhost" PORT = 4223 UID = "Er2" # Outdoor Weather Bricklet ID DB_HOST = "****" DB_USER = "****" DB_PASSWD = "****" DB_DB = "****" from tinkerforge.ip_connection import IPConnection from tinkerforge.bricklet_outdoor_weather import BrickletOutdoorWeather import sys import os import mysql.connector as mc import datetime import time import logging import argparse import daemon import signal from pidlockfile import PIDLockFile # CONSTANTS # Stations are 10000 + identifier; Sensors are 20000 + identifier !!! info_data = { # Outdoor-001 station 10086: {'last_update': datetime.datetime.now(), 'nodes_id': 1002, 'locations_id': 2002, 'sensors_id': [4003, 4004, 4005, 4006, 4007, 4008, 4009], 'types_id': [3001, 3002, 3003, 3004, 3005, 3006, 3007], }, # Indoor-001 station 20038: {'last_update': datetime.datetime.now(), 'nodes_id': 1001, 'locations_id': 2001, 'sensors_id': [4001, 4002], 'types_id': [3001, 3002], }, } logger, cursor, connection, add_meas, ipcon, ow = None, None, None, None, None, None # Callback function for station data callback def cb_station_data(identifier, temperature, humidity, wind_speed, gust_speed, rain, wind_direction, battery_low): global logger # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 10000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity, wind_speed/10.0, gust_speed/10.0, rain/10.0, wind_direction, battery_low] # Logging logger.info(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH, " + str( values[2]) + "m/s, " + str(values[3]) + "m/s, " + str(values[4]) + "mm, " + str(values[5]) + ", " + str(values[6])) # Insert data via sep function insert_values_into_db(identifier, ts, values) # Callback function for sensor data callback def cb_sensor_data(identifier, temperature, humidity): global logger # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 20000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity] # Printing for debugging logger.info(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH") # Insert data via sep function insert_values_into_db(identifier, ts, values) # Common shared fuction to insert station as well as sensor data in to DB def insert_values_into_db(identifier, ts, values): global logger global cursor global connection global add_meas global info_data # Format suitable for MySQL timestamp = ts.strftime('%Y-%m-%d %H:%M:%S') # Obtain info for sensor for sensor dict sensor_info = info_data.get(identifier, 'NA') if (sensor_info == 'NA'): logger.info("Unknown sensor in use !!") else: # Check if at least 45 seconds have elapsed since last update to avoid double postings if (ts >= sensor_info.get('last_update') + datetime.timedelta(seconds=45)): logger.info("New data received") # Extract lists of sensors and types from sensor_info sensors = sensor_info.get('sensors_id') types = sensor_info.get('types_id') # Loop over all sensors for i in range(len(sensors)): data_meas = { 'timestamp': timestamp, 'nodes_id': sensor_info.get('nodes_id'), 'locations_id': sensor_info.get('locations_id'), 'sensors_id': sensors[i], 'types_id': types[i], 'value': values[i], } # Insert into DB try: cursor.execute(add_meas, data_meas) except mc.Error as err: logger.info("Problem while inserting into DB") logger.info("Something went wrong: {}".format(err)) # Make sure data is committed to the database connection.commit() # Set new update time as last_update in sensor_data info_data[identifier]['last_update'] = ts logger.info("New entry sent to DB") else: logger.info("Data not new enough !!!") def do_some_work(args): global logger global ipcon global cursor global connection global ow global add_meas # main loop in daemon logger = logging.getLogger(args.daemon_name) logger.setLevel(logging.INFO) fh = logging.FileHandler(args.log_file) fh.setLevel(logging.INFO) log_format = '%(asctime)s|%(levelname)s|%(message)s' fh.setFormatter(logging.Formatter(log_format)) logger.addHandler(fh) # List known stations and sensors logger.info("Listing all known stations and sensors including their node_ids") # print("") for key, val in info_data.items(): logger.info("{} = {}".format(key, val)) # print("") try: connection = mc.connect(host=DB_HOST, user=DB_USER, passwd=DB_PASSWD, db=DB_DB) # Connect to DB # print("Database connection successful") logger.info("Database connection successful") except mc.Error as e: # print("Error %d: %s" % (e.args[0], e.args[1])) logger.info("Error %d: %s" % (e.args[0], e.args[1])) # sys.exit(1) # Check server version (to test connection) cursor = connection.cursor() cursor.execute("SELECT VERSION()") row = cursor.fetchone() # print("server version:", row[0]) logger.info(("server version:", row[0])) # Define insert object add_meas = ("INSERT INTO meas (timestamp, nodes_id, locations_id, sensors_id, types_id, value) VALUES (%(timestamp)s, %(nodes_id)s, %(locations_id)s, %(sensors_id)s, %(types_id)s, %(value)s)") # Open connection to Bricks and Bricklets ipcon = IPConnection() # Create IP connection ipcon.connect(HOST, PORT) # Connect to brickd # Don't use device before ipcon is connected # Connect to Bricklet ow = BrickletOutdoorWeather(UID, ipcon) # Create device object # Enable station data callbacks ow.set_station_callback_configuration(True) # Enable sensor data callbacks ow.set_sensor_callback_configuration(True) # Register station data callback to function cb_station_data ow.register_callback(ow.CALLBACK_STATION_DATA, cb_station_data) # Register sensor data callback to function cb_sensor_data ow.register_callback(ow.CALLBACK_SENSOR_DATA, cb_sensor_data) while True: logger.info("Heartbeat (1/min)") time.sleep(60) def f_start(args): if args.verbose: print("{0}: starting...".format(args.daemon_name)) print("{0}: pid_file = {1}".format(args.daemon_name, args.pid_file)) print("{0}: log_file = {1}".format(args.daemon_name, args.log_file)) with daemon.DaemonContext( working_directory=args.working_directory, umask=0o002, pidfile=PIDLockFile(args.pid_file, timeout=2.0), stdout=open(args.stdout_file, "a"), stderr=open(args.stderr_file, "a")): # Main loop of the daemon do_some_work(args) def f_stop(args): if args.verbose: print("{0}: stopping...".format(args.daemon_name)) plf = PIDLockFile(args.pid_file) pid = plf.is_locked() if pid: os.kill(pid, signal.SIGTERM) else: print("{0}: NOT running".format(args.daemon_name)) def f_restart(args): f_stop(args) f_start(args) def f_status(args): plf = PIDLockFile(args.pid_file) pid = plf.is_locked() if pid: print("{0}: running, PID = {1}".format(args.daemon_name, pid)) else: print("{0}: NOT running".format(args.daemon_name)) if __name__ == "__main__": here = os.path.abspath(os.path.dirname(__file__)) base_name = os.path.basename(__file__).split('.')[0] # To avoid dealing with permissions and to simplify this example # setting working directory, pid file and log file location etc. # to the directory where the script is located. Normally these files # go to various subdirectories of /var # working directory, normally /var/lib/<daemon_name> working_directory = here # log file, normally /var/log/<daemon_name>.log log_file = os.path.join(here, base_name + ".log") # pid lock file, normally /var/run/<daemon_name>.pid pid_file = os.path.join(here, base_name + ".pid") # stdout, normally /var/log/<daemon_name>.stdout stdout_file = os.path.join(here, base_name + ".stdout") # stderr, normally /var/log/<daemon_name>.stderr stderr_file = os.path.join(here, base_name + ".stderr") parser = argparse.ArgumentParser( description="Minimalistic example of using python-daemon with pidlockfile" ) parser.add_argument( "-v", "--verbose", help="print additional messages to stdout", action="store_true" ) parser.set_defaults(working_directory=working_directory) parser.set_defaults(log_file=log_file) parser.set_defaults(pid_file=pid_file) parser.set_defaults(stdout_file=stdout_file) parser.set_defaults(stderr_file=stderr_file) parser.set_defaults(daemon_name=base_name) subparsers = parser.add_subparsers(title="commands") sp_start = subparsers.add_parser("start", description="start daemon") sp_start.set_defaults(func=f_start) sp_stop = subparsers.add_parser("stop", description="stop daemon") sp_stop.set_defaults(func=f_stop) sp_restart = subparsers.add_parser("restart", description="restart daemon") sp_restart.set_defaults(func=f_restart) sp_status = subparsers.add_parser("status", description="check daemon status") sp_status.set_defaults(func=f_status) # Parse as entered args = parser.parse_args() args.func(args)
  3. Hallo Nic, danke für die Antwort! Die Seite sieht sehr gut aus (besonders zusammen mit systemd: https://www.loggly.com/blog/new-style-daemons-python/), leider scheitere ich immer noch an einer Stelle im Daemon: "While True:"... In dem Beispiel auf der verlinkten Seite wird dort etwas konkretes gemacht (Uhrzeit loggen) und dann mittels "time.sleep(2)" einfach zwei Sekunden gewartet. In meinem Fall brauche ich ja eigentlich genau diese While True Loop gar nicht, da alles über Callbacks nur passiert, wenn neue Daten empfangen wurden. Eine scheinbar mögliche, aber für mich komisch wirkende Lösung ist hier gezeigt: https://github.com/giometti/beaglebone_home_automation_blueprints/blob/master/chapter_09/smart_card/smart_card.py Dort wird in der While True Loop auch nichts gemacht, außer 277,7777 Stunden zu warten, oder wenn der Daemon anderweitig beendet wird oder Befehle erhält. Sicher eine Lösung, aber ich dachte, Python hätte da vielleicht etwas eleganteres im Angebot (vielleicht Richtung "time.sleep(-1)" für unendliches Schlafen). Ich werde es jetzt erstmal mit einem Daemon und einer WHile True Loop versuchen, in der ich einfach nur ein "time.sleep(120)" verwende. Sprich Aufwachen alle 2 Minuten und wieder schlafen (evtl. noch Zeitstempel loggen). Mal schauen, wie das funktioniert... Grüße Niels Göran
  4. Hallo hwsoft, vielen Dank für deine Antwort. Die Idee mittels eines Warten auf User Input den Main Prozess offen zu halten und die Abwicklung sonst über Callbacks zu handhaben, habe ich aus den Beispielen entnommen. Daher dachte ich, könnte ich auf Sleep und ähnliches verzichten. Und alles rein Callback basiert laufen lassen... Der Code ist schon in einem anderen Thread von mir zur Hardware des Outdoor Bricklets gepostet (geht um mehrfach auftretende Callbacks) (https://www.tinkerunity.org/forum/index.php/topic,4388.0.html). Zur Sicherheit nochmal hier: #!/usr/bin/env python # -*- coding: utf-8 -*- HOST = "localhost" PORT = 4223 UID = "Er2" # Outdoor Weather Bricklet ID DB_HOST = "****" DB_USER = "****" DB_PASSWD = "****" DB_DB = "****" from tinkerforge.ip_connection import IPConnection from tinkerforge.bricklet_outdoor_weather import BrickletOutdoorWeather import sys import mysql.connector as mc import datetime # Stations are 10000 + identifier; Sensors are 20000 + identifier !!! info_data = { 10086: {'last_update': datetime.datetime.now(), 'nodes_id': 1002, 'locations_id': 2002, 'sensors_id': [4003, 4004, 4005, 4006, 4007, 4008, 4009], 'types_id': [3001, 3002, 3003, 3004, 3005, 3006, 3007],}, # Outdoor-001 station 20038: {'last_update': datetime.datetime.now(), 'nodes_id': 1001, 'locations_id': 2001, 'sensors_id': [4001, 4002], 'types_id': [3001, 3002],}, # Indoor-001 station } # Callback function for station data callback def cb_station_data(identifier, temperature, humidity, wind_speed, gust_speed, rain, wind_direction, battery_low): # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 10000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity, wind_speed/10.0, gust_speed/10.0, rain/10.0, wind_direction, battery_low] # Printing for debugging print(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH, " + str(values[2]) + "m/s, " + str(values[3]) + "m/s, " + str(values[4]) + "mm, " + str(values[5]) + ", " + str(values[6])) # Insert data via sep function insert_values_into_db(identifier, ts, values) # Callback function for sensor data callback def cb_sensor_data(identifier, temperature, humidity): # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 20000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity] # Printing for debugging print(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH") # Insert data via sep function insert_values_into_db(identifier, ts, values) # Common shared fuction to insert station as well as sensor data in to DB def insert_values_into_db(identifier, ts, values): # Format suitable for MySQL timestamp = ts.strftime('%Y-%m-%d %H:%M:%S') # Obtain info for sensor for sensor dict sensor_info = info_data.get(identifier, 'NA') if (sensor_info == 'NA'): print("Unknown sensor in use !!") else: # Check if at least 45 seconds have elapsed since last update to avoid double postings if (ts >= sensor_info.get('last_update') + datetime.timedelta(seconds=45)): # Extract lists of sensors and types from sensor_info sensors = sensor_info.get('sensors_id') types = sensor_info.get('types_id') # Loop over all sensors for i in range(len(sensors)): data_meas = { 'timestamp': timestamp, 'nodes_id': sensor_info.get('nodes_id'), 'locations_id': sensor_info.get('locations_id'), 'sensors_id': sensors[i], 'types_id': types[i], 'value': values[i], } # Insert into DB try: cursor.execute(add_meas, data_meas) except mc.Error as err: print("Problem while inserting into DB") print("Something went wrong: {}".format(err)) # Make sure data is committed to the database connection.commit() # Set new update time as last_update in sensor_data info_data[identifier]['last_update'] = ts print("Updated!") if __name__ == "__main__": # List known stations and sensors print("Listing all known stations and sensors including their node_ids") print("") for key,val in info_data.items(): print("{} = {}".format(key, val)) print("") try: connection = mc.connect (host = DB_HOST, user = DB_USER, passwd = DB_PASSWD, db = DB_DB) # Connect to DB print("Database connection successful") except mc.Error as e: print("Error %d: %s" % (e.args[0], e.args[1])) sys.exit(1) # Check server version (to test connection) cursor = connection.cursor() cursor.execute ("SELECT VERSION()") row = cursor.fetchone() print("server version:", row[0]) # Define insert object add_meas = ("INSERT INTO meas (timestamp, nodes_id, locations_id, sensors_id, types_id, value) VALUES (%(timestamp)s, %(nodes_id)s, %(locations_id)s, %(sensors_id)s, %(types_id)s, %(value)s)") # Open connection to Bricks and Bricklets ipcon = IPConnection() # Create IP connection ow = BrickletOutdoorWeather(UID, ipcon) # Create device object ipcon.connect(HOST, PORT) # Connect to brickd # Don't use device before ipcon is connected # Enable station data callbacks ow.set_station_callback_configuration(True) # Enable sensor data callbacks ow.set_sensor_callback_configuration(True) # Register station data callback to function cb_station_data ow.register_callback(ow.CALLBACK_STATION_DATA, cb_station_data) # Register sensor data callback to function cb_sensor_data ow.register_callback(ow.CALLBACK_SENSOR_DATA, cb_sensor_data) input("Press key to exit\n") # Use input() in Python 3 ipcon.disconnect() cursor.close() connection.close() Einzige Änderungen seit dem Post: Ein Atexit Event wurde hinzugefügt und die letzten drei Befehle nach dort ausgelagert, sodass die main jetzt wirklich mit dem "input" endet.. Grüße Niels Göran
  5. Hallo zusammen, irgendwie scheine ich hier etwas auf dem Schlauch zu stehen... Ich habe das Python-Beispiel für das Outdoor Weather Bricklet erweitert und alles funktioniert wunderbar auf einem RPi mit zwei Callbacks und einem "input()" (Python 3) in der main, damit das Programm dauerhaft läuft (zu mindest so mein Verständnis)... Allerdings würde ich das ganze jetzt gerne dauerhaft auf einem Rapsberry Pie laufen lassen, direkt beim Start.. Ich finde Möglichkeiten, das Python Programm zu starten, aber scheinbar bedeutet die "input()"-Funktion direkt wieder das Ende des Skripts... Das Internet hat mir so ziemlich alles vorgeschlagen: Threads, Sleep, ... Callen mit "&" dahinter... mittels nohup... Was sind hier sinnvolle Ansätze, bzw. gut erprobte und funktionsfähige Vorgehensweisen? Mir scheint, als ob das nicht unüblich ist, aber irgendwie doch umfassende Arbeiten erfordert.. Vielen Dank! Grüße Niels Göran
  6. Hallo zusammen, ich bastele gerade aus der Outdoor Weather Station und dem zugehörigen Bricklet eine Wetter-Logging Anwendung. Dabei haben sich einige Fragen ergeben: 1. Beim Erfassen der gesendeten Infos von der Outdoor Wetter Station und dem Indoor Sensor scheint es gerade so zu sein, dass teilweise identische Infos mehrfach ankommen, bzw. mehrere Callbacks auslösen (ungefähr halbe Sekunde verzögert). Das Bild zeigt die Ergebnisse, nachdem ich eine Überprüfung eingebaut habe, ob seit dem letzten Update 45 Sekunden vergangen sind. Nur wenn dies der Fall ist, wird geupdated in der DB ("Updated!")... Liegt dies an mir oder ist das normal und muss User-seitig abgefangen werden? Source-Code in Python basiert auf dem Beispiel und hängt an. 2. Wie funktioniert der Regenmesser? (Tropfen Zähler, oder Gewicht?) Muss man den Zurücksetzen oder setzt der sich automatisch zurück? 3. Ist es normal, dass sich die beiden Stangen gegeneinander verdrehen lassen? Oder habt ihr da einfach eine der Montage-Klemmen direkt drüber gebaut um das Drehen zu verhindern? Danke!! Grüße Niels Göran ==================== #!/usr/bin/env python # -*- coding: utf-8 -*- HOST = "localhost" PORT = 4223 UID = "Er2" # Outdoor Weather Bricklet ID DB_HOST = "****" DB_USER = "****" DB_PASSWD = "****" DB_DB = "****" from tinkerforge.ip_connection import IPConnection from tinkerforge.bricklet_outdoor_weather import BrickletOutdoorWeather import sys import mysql.connector as mc import datetime # Stations are 10000 + identifier; Sensors are 20000 + identifier !!! info_data = { 10086: {'last_update': datetime.datetime.now(), 'nodes_id': 1002, 'locations_id': 2002, 'sensors_id': [4003, 4004, 4005, 4006, 4007, 4008, 4009], 'types_id': [3001, 3002, 3003, 3004, 3005, 3006, 3007],}, # Outdoor-001 station 20038: {'last_update': datetime.datetime.now(), 'nodes_id': 1001, 'locations_id': 2001, 'sensors_id': [4001, 4002], 'types_id': [3001, 3002],}, # Indoor-001 station } # Callback function for station data callback def cb_station_data(identifier, temperature, humidity, wind_speed, gust_speed, rain, wind_direction, battery_low): # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 10000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity, wind_speed/10.0, gust_speed/10.0, rain/10.0, wind_direction, battery_low] # Printing for debugging print(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH, " + str(values[2]) + "m/s, " + str(values[3]) + "m/s, " + str(values[4]) + "mm, " + str(values[5]) + ", " + str(values[6])) # Insert data via sep function insert_values_into_db(identifier, ts, values) # Callback function for sensor data callback def cb_sensor_data(identifier, temperature, humidity): # Get current timestamp ts = datetime.datetime.now() timestamp = ts.strftime('%Y-%m-%d %H:%M:%S.%f') # Correct identifier identifier = 20000 + identifier # Combine all values into a single list values = [temperature/10.0, humidity] # Printing for debugging print(str(identifier) + " - TS: " + str(timestamp) + " - Data: " + str(values[0]) + "°C, " + str(values[1]) + "%RH") # Insert data via sep function insert_values_into_db(identifier, ts, values) # Common shared fuction to insert station as well as sensor data in to DB def insert_values_into_db(identifier, ts, values): # Format suitable for MySQL timestamp = ts.strftime('%Y-%m-%d %H:%M:%S') # Obtain info for sensor for sensor dict sensor_info = info_data.get(identifier, 'NA') if (sensor_info == 'NA'): print("Unknown sensor in use !!") else: # Check if at least 45 seconds have elapsed since last update to avoid double postings if (ts >= sensor_info.get('last_update') + datetime.timedelta(seconds=45)): # Extract lists of sensors and types from sensor_info sensors = sensor_info.get('sensors_id') types = sensor_info.get('types_id') # Loop over all sensors for i in range(len(sensors)): data_meas = { 'timestamp': timestamp, 'nodes_id': sensor_info.get('nodes_id'), 'locations_id': sensor_info.get('locations_id'), 'sensors_id': sensors[i], 'types_id': types[i], 'value': values[i], } # Insert into DB try: cursor.execute(add_meas, data_meas) except mc.Error as err: print("Problem while inserting into DB") print("Something went wrong: {}".format(err)) # Make sure data is committed to the database connection.commit() # Set new update time as last_update in sensor_data info_data[identifier]['last_update'] = ts print("Updated!") if __name__ == "__main__": # List known stations and sensors print("Listing all known stations and sensors including their node_ids") print("") for key,val in info_data.items(): print("{} = {}".format(key, val)) print("") try: connection = mc.connect (host = DB_HOST, user = DB_USER, passwd = DB_PASSWD, db = DB_DB) # Connect to DB print("Database connection successful") except mc.Error as e: print("Error %d: %s" % (e.args[0], e.args[1])) sys.exit(1) # Check server version (to test connection) cursor = connection.cursor() cursor.execute ("SELECT VERSION()") row = cursor.fetchone() print("server version:", row[0]) # Define insert object add_meas = ("INSERT INTO meas (timestamp, nodes_id, locations_id, sensors_id, types_id, value) VALUES (%(timestamp)s, %(nodes_id)s, %(locations_id)s, %(sensors_id)s, %(types_id)s, %(value)s)") # Open connection to Bricks and Bricklets ipcon = IPConnection() # Create IP connection ow = BrickletOutdoorWeather(UID, ipcon) # Create device object ipcon.connect(HOST, PORT) # Connect to brickd # Don't use device before ipcon is connected # Enable station data callbacks ow.set_station_callback_configuration(True) # Enable sensor data callbacks ow.set_sensor_callback_configuration(True) # Register station data callback to function cb_station_data ow.register_callback(ow.CALLBACK_STATION_DATA, cb_station_data) # Register sensor data callback to function cb_sensor_data ow.register_callback(ow.CALLBACK_SENSOR_DATA, cb_sensor_data) input("Press key to exit\n") # Use input() in Python 3 ipcon.disconnect() cursor.close() connection.close()
×
×
  • Create New...