From d87ee56a34804c78d658d2e93b2e19a1a2870de5 Mon Sep 17 00:00:00 2001 From: JAlcocerT Date: Fri, 19 Jan 2024 19:03:07 +0100 Subject: [PATCH 1/5] adding proys ideas --- _posts/2024-12-31-DS18B20-python-timescale.md | 462 ++++++++++ _posts/2024-12-31-GY273.md | 43 + _posts/2024-12-31-MLX90614.md | 852 ++++++++++++++++++ _posts/2024-12-31-dht11-python-influxdb.md | 476 ++++++++++ _posts/2024-12-31-dht22-py-sql-mongodb.md | 589 ++++++++++++ _posts/2024-12-31-dht22-python-ES.md | 143 +++ _posts/2024-12-31-dht22-python-redis.md | 126 +++ _posts/2024-12-31-dht22.md | 2 +- _posts/2024-12-31-esp32.md | 563 ++++++++++++ _posts/2024-12-31-ky008.md | 39 + _posts/2024-12-31-rpi_wifi_ethernet_bridge.md | 408 +++++++++ 11 files changed, 3702 insertions(+), 1 deletion(-) create mode 100644 _posts/2024-12-31-DS18B20-python-timescale.md create mode 100644 _posts/2024-12-31-GY273.md create mode 100644 _posts/2024-12-31-MLX90614.md create mode 100644 _posts/2024-12-31-dht11-python-influxdb.md create mode 100644 _posts/2024-12-31-dht22-py-sql-mongodb.md create mode 100644 _posts/2024-12-31-dht22-python-ES.md create mode 100644 _posts/2024-12-31-dht22-python-redis.md create mode 100644 _posts/2024-12-31-esp32.md create mode 100644 _posts/2024-12-31-ky008.md create mode 100644 _posts/2024-12-31-rpi_wifi_ethernet_bridge.md diff --git a/_posts/2024-12-31-DS18B20-python-timescale.md b/_posts/2024-12-31-DS18B20-python-timescale.md new file mode 100644 index 0000000..9d9d6e0 --- /dev/null +++ b/_posts/2024-12-31-DS18B20-python-timescale.md @@ -0,0 +1,462 @@ +--- +title: "Raspberry Pi: Sensors with TimescaleDB & Grafana" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + +# Raspberry Pi - IoT Project with TimeScaleDB + + +The DS18B20 can detect: -55 to 125 Celsius + +* Connection: + * Black cable - gnd + * Red - 3.3 to 5v + * Yellow - data --> to pin 7 + * It needs a resistor. A 4.7K Ohm Resistor (Colour Code: Yellow Purple Red Gold) + * or 4.7k/10k resistor between data and 3.3v + + +* These videos were of great help to me: + + * + * + + +## DS18B20 + +**RPi 1-wire must be enabled!!!** + +connect the wiring and go to /sys/bus/w1/devices and find the folder with the serial number, then select the w1_slave file + +the file should contain a YES in the first line. + +The video from ReefSpy helped me a lot with the initial setup : + +And also to get the general idea of the Python code that can be used. + +### Reading DS18B20 with python + +```py +import os +import glob +import time + +os.system('modprobe w1-gpio') +os.system('modprobe w1-therm') + +base_dir = '/sys/bus/w1/devices/' +device_folder = glob.glob(base_dir + '28*')[0] +device_file = device_folder + '/w1_slave' + +def read_temp_raw(): + f = open(device_file, 'r') + lines = f.readlines() + f.close() + return lines + + +def read_temp(scale): + lines = read_temp_raw() + while lines[0].strip()[-3:] != 'YES': + time.sleep(0.2) + lines = read_temp_raw() + equals_pos = lines[1].find('t=') + if equals_pos != -1: + temp_string = lines[1][equals_pos+2:] + temp_c = float(temp_string) / 1000.0 + temp_f = temp_c * 9.0 / 5.0 + 32.0 + if scale == "F": + return "{:.1f}".format(temp_f) + if scale =="C": + return "{:.1f}".format(temp_c) + else: + return temp_c, temp_f + +while True: + print(read_temp("C")) + time.sleep(1) +``` + +Execute it with: python3 dsb.py + +### Pushing DS18B20 Data to Timescale + +```py +import os +import glob +import time +import psycopg2 + +# Configure TimescaleDB connection +db_params = { + 'host': 'timescaledb', # Use the service name defined in your docker-compose.yml + 'port': 5432, # Default PostgreSQL port for TimescaleDB + 'user': 'myuser', # Replace with your PostgreSQL username + 'password': 'mypassword', # Replace with your PostgreSQL password + 'database': 'mydb' # Replace with the name of your PostgreSQL database +} + +# Create a connection +conn = psycopg2.connect(**db_params) +cur = conn.cursor() + +# Create the necessary table if it doesn't exist +create_table_query = ''' + CREATE TABLE IF NOT EXISTS ds18b20_sensor ( + time TIMESTAMPTZ NOT NULL, + temperature FLOAT + ); +''' +cur.execute(create_table_query) +conn.commit() + +os.system('modprobe w1-gpio') +os.system('modprobe w1-therm') + +base_dir = '/sys/bus/w1/devices/' +device_folder = glob.glob(base_dir + '28*')[0] +device_file = device_folder + '/w1_slave' + +def read_temp_raw(): + with open(device_file, 'r') as f: + lines = f.readlines() + return lines + +def read_temp(scale): + lines = read_temp_raw() + while lines[0].strip()[-3:] != 'YES': + time.sleep(0.2) + lines = read_temp_raw() + + equals_pos = lines[1].find('t=') + if equals_pos != -1: + temp_c = float(lines[1][equals_pos+2:]) / 1000.0 + return convert_temp(temp_c, scale) + +def convert_temp(temp_c, scale): + if scale == TEMP_SCALE_F: + return temp_c * 9.0 / 5.0 + 32.0 + elif scale == TEMP_SCALE_C: + return temp_c + else: + raise ValueError("Invalid temperature scale") + +# Constants for Temperature Conversion +TEMP_SCALE_F = "F" +TEMP_SCALE_C = "C" + +while True: + ds18b20_temp = read_temp(TEMP_SCALE_C) + + if ds18b20_temp is not None: + insert_data_query = f''' + INSERT INTO ds18b20_sensor (time, temperature) + VALUES (NOW(), {ds18b20_temp}); + ''' + cur.execute(insert_data_query) + + conn.commit() + print("Data sent to TimescaleDB") + time.sleep(3) + +# Close the connection when done +cur.close() +conn.close() + +``` + + + + +```dockerfile +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip libpq-dev gcc && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY dsb2.py /app/ + +# Install additional dependencies +RUN pip install psycopg2-binary + +# Run the Python script +CMD ["python", "dsb2.py"] + +``` + +**docker build -t dsb_to_timescale .** + + +The Stack to run the Python script and push the data to timescale - all in Docker: + +```yml +version: '3' +services: + timescaledb: + image: timescale/timescaledb:latest-pg13 # Adjust the image tag as needed + container_name: timescaledb_dsb_container + environment: + POSTGRES_DB: mydb + POSTGRES_USER: myuser + POSTGRES_PASSWORD: mypassword + volumes: + - timescaledb_data_dsb:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - dsb_network + + dsb_sensor_timescale: + image: dsb_to_timescale # Use your pre-built image name + container_name: dsb_to_timescale_container + privileged: true # Run the container in privileged mode (GPIO access) + depends_on: + - timescaledb + devices: + - /dev/gpiomem + networks: + - dsb_network + +networks: + dsb_network: + +volumes: + timescaledb_data_dsb: +``` + +I have tagged and uploaded it to my DockerHub so that it works with timescaleDB: + +docker tag dsb_to_timescale docker.io/fossengineer/iot:dsb_sensor_to_timescale + +docker push docker.io/fossengineer/iot:dsb_sensor_to_timescale + +Check it at + + + +docker run -it --rm --network=dsbtimescale_dsb_network postgres psql -h timescaledb_dsb_container -U myuser -d mydb --username=myuser + +\l + +psql -U myuser -d mydb + +\d + +```sql +SELECT * FROM ds18b20_sensor; +SELECT MAX(temperature) FROM ds18b20_sensor; +SELECT * FROM ds18b20_sensor ORDER BY time DESC LIMIT 1; +``` + + + + + to pin 7 + +It needs a resistor. A 4.7K Ohm Resistor (Colour Code: Yellow Purple Red Gold) + + + + + + +or 4.7k 10k resistor between data and 3.3v + + + + + + +## DHT11 + +Previously I was using the DHT11 with InfluxDB, was curious about adapting that project to accept the TimescaleBD as well. + + +**Data to Pin7 - GPIO4** +5v +gnd + +```py +import Adafruit_DHT +import time +import psycopg2 + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Configure TimescaleDB connection +db_params = { + 'host': 'timescaledb', # Use the service name defined in your docker-compose.yml + 'port': 5432, # Default PostgreSQL port for TimescaleDB + 'user': 'myuser', # Replace with your PostgreSQL username + 'password': 'mypassword', # Replace with your PostgreSQL password + 'database': 'mydb' # Replace with the name of your PostgreSQL database +} + +# Create a connection +conn = psycopg2.connect(**db_params) +cur = conn.cursor() + +# Create the necessary table if it doesn't exist +create_table_query = ''' + CREATE TABLE IF NOT EXISTS dht_sensor ( + time TIMESTAMPTZ NOT NULL, + temperature FLOAT, + humidity FLOAT + ); +''' +cur.execute(create_table_query) +conn.commit() + +while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + insert_data_query = f''' + INSERT INTO dht_sensor (time, temperature, humidity) + VALUES (NOW(), {temperature}, {humidity}); + ''' + cur.execute(insert_data_query) + conn.commit() + print("Data sent to TimescaleDB") + else: + print("Sensor failure. Check wiring.") + time.sleep(3) + +# Close the connection when done +cur.close() +conn.close() +``` + + +```dockerfile +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip libpq-dev gcc && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY your_modified_python_script.py /app/ + +# Install additional dependencies +RUN pip install Adafruit_DHT psycopg2-binary + +# Run the Python script +CMD ["python", "dht11_python_timescale.py"] + +``` + +docker build -t dht_sensor_timescale . + + +```yml +version: '3' +services: + timescaledb: + image: timescale/timescaledb:latest-pg13 # Adjust the image tag as needed + container_name: timescaledb_container + environment: + POSTGRES_DB: mydb + POSTGRES_USER: myuser + POSTGRES_PASSWORD: mypassword + volumes: + - timescaledb_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app_network + + dht_sensor_timescale: + image: dht_sensor_timescale # Use your pre-built image name + container_name: dht_sensor_timescale_container + privileged: true # Run the container in privileged mode (GPIO access) + depends_on: + - timescaledb + networks: + - app_network + +networks: + app_network: + +volumes: + timescaledb_data: +``` + +```yml +version: '3' +services: + timescaledb: + image: timescale/timescaledb:latest-pg13 # Adjust the image tag as needed + container_name: timescaledb_container + environment: + POSTGRES_DB: mydb + POSTGRES_USER: myuser + POSTGRES_PASSWORD: mypassword + volumes: + - timescaledb_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app_network + + dht_sensor_timescale: + image: dht_sensor_timescale # Use your pre-built image name + container_name: dht_sensor_timescale_container + depends_on: + - timescaledb + networks: + - app_network + +networks: + app_network: + +volumes: + timescaledb_data: + +``` + + +Checking the data ingestion: + + +docker run -it --rm --network=dht_timescaledb_app_network postgres psql -h timescaledb_container -U myuser -d mydb --username=myuser + + +```sql +SELECT * FROM dht_sensor; +SELECT MAX(temperature) FROM dht_sensor; +``` + + +list the databases available + +\l + +If you want to list all tables and their associated schemas, you can use: + + + +\dt + + +See the schema of the table: + +\d+ dht_sensor diff --git a/_posts/2024-12-31-GY273.md b/_posts/2024-12-31-GY273.md new file mode 100644 index 0000000..3c685f1 --- /dev/null +++ b/_posts/2024-12-31-GY273.md @@ -0,0 +1,43 @@ +--- +title: "Raspberry Pi Pico W:" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + + +Yes, you can use the GY-273 sensor with the Raspberry Pi Pico. **The GY-273 is a magnetometer sensor** that measures the strength and direction of magnetic fields. It can be used to create a digital compass, or to detect the presence of magnetic objects. + +The GY-273 is compatible with the Raspberry Pi Pico's I2C bus. To connect the GY-273 to the Pico, you will need to use a four-wire cable. The following table shows the connections: + +GY-273 Raspberry Pi Pico +VCC 3.3V +GND GND +SCL SCL +SDA SDA + + + +The GY-273 is a digital compass module based on the QMC5883L chip. It is a triple-axis magnetometer that can measure the Earth's magnetic field in three dimensions. The GY-273 has a measuring range of ±1.3 to 8 gauss and a resolution of 0.01 gauss. It is powered by a 3 to 5V supply and communicates using the I2C protocol. + +The GY-273 is commonly used in robotics, drones, and other applications that require accurate orientation sensing. It can also be used in navigation systems, such as GPS receivers. + +Here are some of the features of the GY-273 sensor: + +Triple-axis magnetometer: Measures the Earth's magnetic field in three dimensions. +Measuring range of ±1.3 to 8 gauss: Can measure weak to strong magnetic fields. +Resolution of 0.01 gauss: Accurately measures small changes in the magnetic field. +Powered by a 3 to 5V supply: Widely compatible with different power sources. +Communicates using the I2C protocol: Easy to interface with microcontrollers. +If you are looking for a precise and versatile magnetometer sensor, the GY-273 is a good option to consider. + +Here are some of the applications of the GY-273 sensor: + +Robotics: Used to determine the orientation of robots, such as self-driving cars and drones. +Navigation: Used in GPS receivers and other navigation systems to determine the position of an object. +Avionics: Used in aircraft to determine the attitude of the aircraft. +Marine: Used in ships and boats to determine the heading of the vessel. +Surveying: Used to measure the magnetic field of the Earth. +Geology: Used to study the magnetic properties of rocks and minerals. +I hope this helps! \ No newline at end of file diff --git a/_posts/2024-12-31-MLX90614.md b/_posts/2024-12-31-MLX90614.md new file mode 100644 index 0000000..3eb6f62 --- /dev/null +++ b/_posts/2024-12-31-MLX90614.md @@ -0,0 +1,852 @@ +--- +title: "Raspberry Pi: Temp (non contact) & Grafana" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + +The MLX90614 GY-906 is an infrared (IR) temperature sensor module commonly used for non-contact temperature measurements. It's also known as a pyrometer or non-contact thermometer. This sensor is produced by Melexis and can measure the temperature of an object without making physical contact with it, which makes it useful in various applications. + +Here are some key features and information about the MLX90614 GY-906 sensor: + +Working Principle: The sensor measures the infrared radiation emitted by an object to determine its temperature. All objects emit thermal radiation based on their temperature, and this radiation falls within the infrared spectrum. The sensor detects this radiation and converts it into an electrical signal that can be used to calculate the object's temperature. + +Accuracy and Range: The MLX90614 GY-906 sensor can offer a relatively high level of accuracy for non-contact temperature measurements. It has a wide temperature measurement range, typically spanning from **-70°C to 380°C** (-94°F to 716°F), depending on the specific model and calibration. + +Two Sensors in One: The sensor actually contains two separate sensors within a single package: one to measure the temperature of the object being measured (object temperature) and another to measure the temperature of the sensor itself (ambient temperature). This dual-sensor setup helps improve accuracy, as it compensates for changes in the sensor's ambient temperature. + +Communication Interface: The MLX90614 GY-906 sensor can communicate with microcontrollers or other devices using the **I2C (Inter-Integrated Circuit) communication protocol**. This makes it relatively easy to integrate the sensor into various projects and systems. + + +It already has a built-in 3.3V voltage stabilizer and pull-up resistors of the I2c bus to be powered by 4.7 kΩ resistors. We also know that the ranges of measured temperatures are: -40°C…85°C for the ambient temperature and -40°C…380°C for the temperature of the tested object. The module has a measurement accuracy of 0.5°C for the measurement range To=0°C…60°C ( object temperature ), Ta=0°C…50°C ( surrounding temperature ) with a resolution of 0.2°C. The largest measurement error we will encounter is ±4°C, but for very high temperatures. + + +## MLX90614 IR Temp Sensor + + + + + + + +Vin to 3.3V - +GND to gnd +SCL to GPIO3 (SCL) +SDA to GPIO2 (SDA) + + +sudo raspi-config +interfacing options +enable I2C + +reboot + +i2cdetect -y 1 + +you should see something different than -- in at least one of the buckets. + +Install Adafruit-blinka +and also: adafruit-circuitpython-mlx90614 + + + +```py + +# This is the code to run the MLX90614 Infrared Thermal Sensor +# You'll need to import the package "Adafruit Blinka" +# You'll need to import the package "adafruit-circuitpython-mlx90614/" +# You'll need to enable i2c on the pi https://pimylifeup.com/raspberry-pi-i2c/ +# Reboot after enabling i2C +# Sensor is connected to 3.3V, GND and the i2C pins 3(SDA) and 5(SCL) + +import board +import busio as io +import adafruit_mlx90614 + +from time import sleep + +i2c = io.I2C(board.SCL, board.SDA, frequency=100000) +mlx = adafruit_mlx90614.MLX90614(i2c) + +ambientTemp = "{:.2f}".format(mlx.ambient_temperature) +targetTemp = "{:.2f}".format(mlx.object_temperature) + +sleep(1) + +print("Ambient Temperature:", ambientTemp, "°C") +print("Target Temperature:", targetTemp,"°C") +``` + + +**to run inside the container** + +```yml + +version: '3.8' + +services: + mlx_rpi: + image: mlx_sensor_app_influxdb + privileged: true + command: tail -f /dev/null #keep it running +``` + + + + +```dockerfile +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY mlx.py /app/ + +# Install additional dependencies +RUN pip install board adafruit-circuitpython-mlx90614 Adafruit-Blinka RPi.GPIO influxdb + +# Run the Python script +CMD ["python", "mlx.py"] +``` + + +When saved, just run: docker build -t mlx_sensor_app_influxdb . + +```yml + +version: '3.8' + +services: + my_python_dev_container: + image: mlx_sensor_app_influxdb #python:3.10 + privileged: true + command: tail -f /dev/null #keep it running +``` + +I have tagged and uploaded it to my DockerHub so that it works with InfluxDB: + +docker tag mlx_sensor_to_influxdb docker.io/fossengineer/iot:mlx_sensor_to_influxdb + +docker push docker.io/fossengineer/iot:mlx_sensor_to_influxdb + +Check it at + + +Try that it works with: + + +```py +import board +import busio as io +import adafruit_mlx90614 +from influxdb import InfluxDBClient +from time import sleep, strftime + +i2c = io.I2C(board.SCL, board.SDA, frequency=100000) +mlx = adafruit_mlx90614.MLX90614(i2c) + +# Configure InfluxDB connection +influx_client = InfluxDBClient(host='influxdb', port=8086) + +# Try to create the database, or use it if it already exists +database_name = 'sensor_data' +existing_databases = influx_client.get_list_database() + +if {'name': database_name} not in existing_databases: + influx_client.create_database(database_name) + print(f"Database '{database_name}' created.") + +influx_client.switch_database(database_name) + +while True: + ambientTemp = mlx.ambient_temperature + targetTemp = mlx.object_temperature + + if ambientTemp is not None and targetTemp is not None: + data = [ + { + "measurement": "mlx_sensor", + "tags": {}, + "time": strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "ambient_temperature": ambientTemp, + "target_temperature": targetTemp + } + } + ] + influx_client.write_points(data) + print("Ambient Temperature:", ambientTemp, "°C") + print("Target Temperature:", targetTemp,"°C") + print("Data sent to InfluxDB") + else: + print("Sensor failure. Check wiring.") + + sleep(1) +``` + +```yml +version: '3' +services: + mlx_sensor: + image: mlx_sensor_to_influxdb + container_name: mlx_sensor_app + privileged: true + #command: tail -f /dev/null #keep it running for testing + dns: + - 8.8.8.8 + - 8.8.4.4 + depends_on: + - influxdb + + influxdb: + image: influxdb:1.8 #for arm32 + container_name: influxdb + ports: + - "8086:8086" + volumes: + - influxdb_data:/var/lib/influxdb + environment: + - INFLUXDB_DB=sensor_data + - INFLUXDB_ADMIN_USER=admin + - INFLUXDB_ADMIN_PASSWORD=mysecretpassword + + + grafana: + image: grafana/grafana:9.5.7 #was using this one instead of latest for stability + container_name: grafana3 + ports: + - "3000:3000" + depends_on: + - influxdb + volumes: + - grafana_data:/var/lib/grafana # Add this line to specify the volume + dns: + - 8.8.8.8 + - 8.8.4.4 + +volumes: + influxdb_data: + grafana_data: # Define the volume for Grafana + + +``` + + +```py +import board +import busio as io +import adafruit_mlx90614 +from influxdb import InfluxDBClient +from time import sleep, strftime +import os # Import the os module + +i2c = io.I2C(board.SCL, board.SDA, frequency=100000) +mlx = adafruit_mlx90614.MLX90614(i2c) + +# Get values from environment variables (with default values if not set) +INFLUX_HOST = os.environ.get('INFLUX_HOST', 'influxdb') +INFLUX_PORT = int(os.environ.get('INFLUX_PORT', 8086)) +DATABASE_NAME = os.environ.get('DATABASE_NAME', 'sensor_data') +MEASUREMENT = os.environ.get('MEASUREMENT', 'mlx_sensor') +SLEEP_TIME = int(os.environ.get('SLEEP_TIME', 1)) + +# Configure InfluxDB connection +influx_client = InfluxDBClient(host=INFLUX_HOST, port=INFLUX_PORT) + +# Try to create the database, or use it if it already exists +existing_databases = influx_client.get_list_database() + +if {'name': DATABASE_NAME} not in existing_databases: + influx_client.create_database(DATABASE_NAME) + print(f"Database '{DATABASE_NAME}' created.") + +influx_client.switch_database(DATABASE_NAME) + +while True: + ambientTemp = mlx.ambient_temperature + targetTemp = mlx.object_temperature + + if ambientTemp is not None and targetTemp is not None: + data = [ + { + "measurement": MEASUREMENT, + "tags": {}, + "time": strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "ambient_temperature": ambientTemp, + "target_temperature": targetTemp + } + } + ] + influx_client.write_points(data) + print("Ambient Temperature:", ambientTemp, "°C") + print("Target Temperature:", targetTemp,"°C") + print("Data sent to InfluxDB") + else: + print("Sensor failure. Check wiring.") + + sleep(SLEEP_TIME) + +``` + + +**Locally you will do:** + +```yml +version: '3' +services: + mlx_sensor: + image: mlx_sensor_to_influxdb + container_name: mlx_sensor_app + privileged: true + #command: tail -f /dev/null #keep it running for testing + dns: + - 8.8.8.8 + - 8.8.4.4 + depends_on: + - influxdb + environment: + - INFLUX_HOST=reisikei.duckdns.org + - INFLUX_PORT=8086 + - DATABASE_NAME=sensor_data + - MEASUREMENT=mlx_sensor_sec + - SLEEP_TIME=1 + + influxdb: + image: influxdb:1.8 #for arm32 + container_name: influxdb + ports: + - "8086:8086" + volumes: + - influxdb_data:/var/lib/influxdb + environment: + - INFLUXDB_DB=sensor_data + - INFLUXDB_ADMIN_USER=admin + - INFLUXDB_ADMIN_PASSWORD=mysecretpassword + + + grafana: + image: grafana/grafana:9.5.7 #was using this one instead of latest for stability + container_name: grafana3 + ports: + - "3000:3000" + depends_on: + - influxdb + volumes: + - grafana_data:/var/lib/grafana # Add this line to specify the volume + dns: + - 8.8.8.8 + - 8.8.4.4 + +volumes: + influxdb_data: + grafana_data: # Define the volume for Grafana + +``` + + + + + +Grafana can connect to: http://influxdb:8086 or to http://yoursubdomain.duckdns.org:8086 (if you included the DNS in the stack) + +Go to the Influx container and then: + +Influx +USE sensor_data +show measurements +SELECT * FROM mlx_sensor #this is the measurement where we are pushing this data + + + + +http://reisipi.duckdns.org +http://192.168.3.101:8086 + + + +**and using influxdb in the cloud** + + + + +USD 0.002 +/mebibyte + +versus GCP E2 small That's about $0.02 hourly + + + + +```yml +version: '3' +services: + mlx_sensor: + image: mlx_sensor_app_influxdb + container_name: mlx_sensor_app + privileged: true + dns: + - 8.8.8.8 + - 8.8.4.4 + # depends_on: #make sure influxdb its ready + # - influxdb + environment: + - INFLUX_HOST=reisikei.duckdns.org + - INFLUX_PORT=8086 + - DATABASE_NAME=sensor_data + - MEASUREMENT=mlx_sensor_sec + - SLEEP_TIME=1 + #command: tail -f /dev/null #keep it running for testing + restart: unless-stopped + + + grafana: + image: grafana/grafana:9.5.7 #was using this one instead of latest for stability + container_name: grafana3 + ports: + - "3000:3000" + depends_on: + - influxdb + volumes: + - grafana_data:/var/lib/grafana # Add this line to specify the volume + dns: + - 8.8.8.8 + - 8.8.4.4 + restart: unless-stopped + + +volumes: + grafana_data: # Define the volume for Grafana +``` + + +**in GCP** i was running portainer, influxdb and duckdns +remember that you will need a firewall rule for the TCP port 8086 (influxDB) + + + + + +```yml +version: '3' +services: + + influxdb: + image: influxdb:1.8 + container_name: influxdb + ports: + - "8086:8086" + volumes: + - influxdb_data:/var/lib/influxdb + environment: + - INFLUXDB_DB=sensor_data + - INFLUXDB_ADMIN_USER=admin + - INFLUXDB_ADMIN_PASSWORD=mysecretpassword + + duckdns: + image: lscr.io/linuxserver/duckdns:latest + container_name: duckdns + environment: + - SUBDOMAINS=subdomain1,subdomain2 + - TOKEN=yourtoken + - LOG_FILE=false #optional + volumes: + - duckdns_data:/config #optional + restart: unless-stopped + +volumes: + influxdb_data: + duckdns_data: +``` + +Adding [nginx](https://fossengineer.com/selfhosting-nginx-proxy-manager-docker/) and [duckdns](https://fossengineer.com/selfhosting-nginx-proxy-manager-docker/#https-locally-nginx--duckdns) + +```yml + nginx: + image: 'jc21/nginx-proxy-manager:latest' + restart: unless-stopped + container_name: nginx + ports: + - '80:80' # Public HTTP Port + - '443:443' # Public HTTPS Port + - '81:81' # Admin Web Port + volumes: + - nginx_data:/data # - ~/Docker/Nginx/data:/data + - nginx_letsencrypt:/etc/letsencrypt # - ~/Docker/Nginx/letsencrypt:/etc/letsencrypt + networks: + nginx_network: + aliases: + - default + + +volumes: + nginx_data: + nginx_letsencrypt: + +networks: + nginx_network: + internal: true +``` + + +## Grafana + +the grafana model json + + +```json +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 1, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "9.5.7", + "targets": [ + { + "alias": "ambient", + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "measurement": "mlx_sensor", + "orderByTime": "ASC", + "policy": "autogen", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "ambient_temperature" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + }, + { + "alias": "target", + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "hide": false, + "measurement": "mlx_sensor", + "orderByTime": "ASC", + "policy": "autogen", + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "target_temperature" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "title": "Temps MLX", + "type": "timeseries" + }, + { + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "description": "", + "fieldConfig": { + "defaults": { + "mappings": [], + "thresholds": { + "mode": "percentage", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "orange", + "value": 70 + }, + { + "color": "red", + "value": 85 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 10 + }, + "id": 2, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.5.7", + "targets": [ + { + "alias": "ambient", + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "measurement": "mlx_sensor", + "orderByTime": "ASC", + "policy": "autogen", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "ambient_temperature" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + }, + { + "alias": "target", + "datasource": { + "type": "influxdb", + "uid": "ab3e7768-f67f-4716-86de-36914fe1a3f8" + }, + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "hide": false, + "measurement": "mlx_sensor", + "orderByTime": "ASC", + "policy": "autogen", + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "target_temperature" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "title": "Temps MLX", + "type": "gauge" + } + ], + "refresh": "", + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-30m", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "MLX", + "uid": "e77dd736-6406-4888-9007-37a87b1db155", + "version": 1, + "weekStart": "" +} +``` \ No newline at end of file diff --git a/_posts/2024-12-31-dht11-python-influxdb.md b/_posts/2024-12-31-dht11-python-influxdb.md new file mode 100644 index 0000000..9907f00 --- /dev/null +++ b/_posts/2024-12-31-dht11-python-influxdb.md @@ -0,0 +1,476 @@ +--- +title: "Raspberry Pi: IoT - Temp and Humidity with DHT11" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + +# Raspberry Pi together with: Dht11, Python InfluxDB and Docker + +If you already have a RPi at home and a DHT11 sensor, you can perfectly get started with this project. + +We are going to read Temperature and Humidity data from the sensor, save it into an InfluxDB (*say Hi to time-series DBs*) and display the output in Grafana (*Because terminal is great, but we want to make a cool end to end project*). + +And docker? yes, let's put everything together and create a reliable Stack that we can share across any other RPi and forget about dependencies. Lets get started. + +**We can use Raspberry Pi 32/64 bits for this project.** + +## Python + +Credits to for the initial scheleton of the code. + +I have adapted it so that instead of printing the values, it will push them to an InfluxDB that we are going to self-host as well. + +```py +import Adafruit_DHT +import time +from influxdb import InfluxDBClient + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Configure InfluxDB connection +influx_client = InfluxDBClient(host='influxdb', port=8086) + +# Try to create the database, or use it if it already exists +database_name = 'sensor_data' +existing_databases = influx_client.get_list_database() + +if {'name': database_name} not in existing_databases: + influx_client.create_database(database_name) + print(f"Database '{database_name}' created.") + +influx_client.switch_database(database_name) + +while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + data = [ + { + "measurement": "dht_sensor", + "tags": {}, + "time": time.strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "temperature": temperature, + "humidity": humidity + } + } + ] + influx_client.write_points(data) + print("Data sent to InfluxDB") + else: + print("Sensor failure. Check wiring.") + time.sleep(3) +``` + +You can give it a try to the initial version (that just prints) to know that everything works for you, or just go to the next step. + +Remember to save that consistently, for example: your_python_script.py + + +## Docker + +Im a big fan of Docker and the first thing I thought when this worked was to put it in a container. + +For the [Docker image building process](https://fossengineer.com/docker-first-steps-guide-for-data-analytics/#how-to-use-docker-to-containerize-your-application) you will need this dockerfile and of course to [have Docker installed!](https://jalcocert.github.io/RPi/projects/selfhosting_101/) + +### The Dockerfile + +```dockerfile +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY your_python_script.py /app/ + +# Install additional dependencies +RUN pip install Adafruit_DHT influxdb + +# Run the Python script +CMD ["python", "your_python_script.py"] +``` + +When saved, just run: docker build -t dht_sensor_app_influxdb . + +This will create the Docker image that incorporates the Python script above. + +### The Stack + +Deploy this Portainer Stack or Docker-compose to run the Python container with the script, InfluxDB and Grafana for visualization + +```yml +version: '3' +services: + dht_sensor_app: + image: dht_sensor_app_influxdb + container_name: dht_sensor_app + privileged: true + depends_on: + - influxdb + + influxdb: + image: influxdb #:1.8 for arm32 + container_name: influxdb + ports: + - "8086:8086" + volumes: + - influxdb_data:/var/lib/influxdb + environment: + - INFLUXDB_DB=sensor_data + - INFLUXDB_ADMIN_USER=admin + - INFLUXDB_ADMIN_PASSWORD=mysecretpassword + + grafana: + image: grafana/grafana #:9.5.7 was using this one instead of latest for stability + container_name: grafana + ports: + - "3000:3000" + depends_on: + - influxdb + volumes: + - grafana_data:/var/lib/grafana # Add this line to specify the volume + +volumes: + influxdb_data: + grafana_data: # Define the volume for Grafana +``` + + +## InfluxDB + + + +If you go inside the InfluxDB container, you can execute the following to check that everything is working as it should: + +influx +show databases +use sensor_data +show measurements + +```sql +SELECT * FROM dht_sensor +SELECT * FROM dht_sensor ORDER BY time DESC LIMIT 10 +``` + +### Running InfluxDB *in the Cloud* + +And we will expose it with [Cloudflare Tunnels](https://fossengineer.com/selfhosting-cloudflared-tunnel-docker/). + +```yml +version: '3' +services: + + influxdb: + image: influxdb + container_name: influxdb + ports: + - "8086:8086" + volumes: + - influxdb_data:/var/lib/influxdb + environment: + - INFLUXDB_DB=sensor_data + - INFLUXDB_ADMIN_USER=admin + - INFLUXDB_ADMIN_PASSWORD=mysecretpassword + +volumes: + influxdb_data: + +networks: + cloudflare_tunnel: + external: true +``` + + +I have tagged and uploaded it to my DockerHub so that it works with InfluxDB: + +docker tag dht_sensor_appv2 docker.io/fossengineer/iot:dht11_sensor_to_influxdb + +docker push docker.io/fossengineer/iot:dht11_sensor_to_influxdb + +Check it at + +### Connecting the Python Script to InfluxDB *in the Cloud* + + +```py + +import Adafruit_DHT +import time +from influxdb import InfluxDBClient +import logging + +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Configure InfluxDB connection +logging.debug("Configuring InfluxDB connection...") +influx_client = InfluxDBClient(host='influxdb.fossengineer.com', port=8086, #host='192.168.1.50', port=8086 + ssl=True, + verify_ssl=True, + username='TecoT$eko1', + password='CWw7%*!5Mgdf^T' +) + +logging.info("Connected to InfluxDB") + +try: + # Try to create the database, or use it if it already exists + database_name = 'sensor_data' + existing_databases = influx_client.get_list_database() + + if {'name': database_name} not in existing_databases: + influx_client.create_database(database_name) + logging.info(f"Database '{database_name}' created.") + + influx_client.switch_database(database_name) + + while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + data = [ + { + "measurement": "dht_sensor", + "tags": {}, + "time": time.strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "temperature": temperature, + "humidity": humidity + } + } + ] + influx_client.write_points(data) + logging.debug("Data sent to InfluxDB") + else: + logging.warning("Sensor failure. Check wiring.") + time.sleep(3) + +except Exception as e: + logging.error(f"An error occurred: {e}") + + +``` + + +```py +import Adafruit_DHT +import time +from influxdb import InfluxDBClient + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Configure InfluxDB connection +influx_client = InfluxDBClient(host='influxdb.fossengineer.com', port=8086, #host='192.168.1.50', port=8086 + ssl=True, # Enable SSL/TLS encryption + verify_ssl=True, # Verify the SSL certificate (set to False if not required) + username='TecoT$eko1', + password='CWw7%*!5Mgdf^T' +) + +print("Connected to InfluxDB") + +# Try to create the database, or use it if it already exists +database_name = 'sensor_data' +existing_databases = influx_client.get_list_database() + +print("Checking InfluxDB Database list...") + +if {'name': database_name} not in existing_databases: + influx_client.create_database(database_name) + print(f"Database '{database_name}' created.") + +influx_client.switch_database(database_name) + +print("Start sending DHT data...") + +while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + data = [ + { + "measurement": "dht_sensor", + "tags": {}, + "time": time.strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "temperature": temperature, + "humidity": humidity + } + } + ] + influx_client.write_points(data) + print("Data sent to InfluxDB") + else: + print("Sensor failure. Check wiring.") + time.sleep(3) + +``` + + +### Tweaking Python for better Sec-Ops + +This is pretty good, but how about not hard coding passwords in the Python Script? + +Lets use environment variables by changing slightly the Python code: + +```py +import Adafruit_DHT +import time +from influxdb import InfluxDBClient +import os + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Get InfluxDB credentials from environment variables +influx_host = os.getenv("INFLUXDB_HOST") +influx_port = int(os.getenv("INFLUXDB_PORT")) +influx_dbname = os.getenv("INFLUXDB_DBNAME") +influx_user = os.getenv("INFLUXDB_USER") +influx_password = os.getenv("INFLUXDB_PASSWORD") + +# Configure InfluxDB connection +influx_client = InfluxDBClient(host=influx_host, port=influx_port, + username=influx_user, password=influx_password) + +# Try to create the database, or use it if it already exists +existing_databases = influx_client.get_list_database() + +if {'name': influx_dbname} not in existing_databases: + influx_client.create_database(influx_dbname) + print(f"Database '{influx_dbname}' created.") + +influx_client.switch_database(influx_dbname) + +while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + data = [ + { + "measurement": "dht_sensor", + "tags": {}, + "time": time.strftime('%Y-%m-%dT%H:%M:%SZ'), + "fields": { + "temperature": temperature, + "humidity": humidity + } + } + ] + influx_client.write_points(data) + print("Data sent to InfluxDB") + else: + print("Sensor failure. Check wiring.") + time.sleep(3) + +``` + +```dockerfile +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY your_python_script.py /app/ + +# Install additional dependencies +RUN pip install Adafruit_DHT influxdb + +# Run the Python script +#CMD ["python", "your_python_script.py"] +``` + +The dockerfile will be the same presented before, just run again the build command: **docker build -t dht11_python_to_influxdb .** + +Or alternatively use: + +```yml +version: "3" +services: + python_app: + build: + context: . + dockerfile: Dockerfile + environment: + - INFLUXDB_HOST= influxdb.yourdomain.com #influxdb to use the local one like before + - INFLUXDB_PORT=8086 + - INFLUXDB_DBNAME=sensor_data + - INFLUXDB_USER=admin + - INFLUXDB_PASSWORD=mysecretpassword + command: ["python", "your_python_script.py"] + command: tail -f /dev/null #keep it running + +``` + +```yml +version: "3" +services: + + python_dht: + container_name: python_dht + image: dht11_python_to_influxdb # Use the name of your pre-built Python image + privileged: true + environment: + - INFLUXDB_HOST=influxdb + - INFLUXDB_PORT=8086 + - INFLUXDB_DBNAME=sensor_data + - INFLUXDB_USER=admin + - INFLUXDB_PASSWORD=mysecretpassword + command: ["python", "your_python_script.py"] + + # depends_on: + # - influxdb + + # influxdb: #this is running in other device, so make sure that the container is running before executing the python one + # image: influxdb:latest + # environment: + # - INFLUXDB_DB=sensor_data + # - INFLUXDB_ADMIN_USER=admin + # - INFLUXDB_ADMIN_PASSWORD=adminpass + # - INFLUXDB_USER=user + # - INFLUXDB_USER_PASSWORD=userpass + +``` + + +## FAQ + +### How to add the InfluxDB Source to Grafana? + +Make sure to use: http://192.device.local.ip:8086/, for me http://localhost:8086 did not work. + +### Why priviledge flag? + +The container needs access to the GPIO port, otherwise, you will observe this error in the container: + +Traceback (most recent call last): + + File "dht11_python_timescale.py", line 34, in + + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + + File "/usr/local/lib/python3.8/site-packages/Adafruit_DHT/common.py", line 81, in read + + return platform.read(sensor, pin) + + File "/usr/local/lib/python3.8/site-packages/Adafruit_DHT/Raspberry_Pi_2.py", line 34, in read + + raise RuntimeError('Error accessing GPIO.') + +RuntimeError: Error accessing GPIO. \ No newline at end of file diff --git a/_posts/2024-12-31-dht22-py-sql-mongodb.md b/_posts/2024-12-31-dht22-py-sql-mongodb.md new file mode 100644 index 0000000..1367860 --- /dev/null +++ b/_posts/2024-12-31-dht22-py-sql-mongodb.md @@ -0,0 +1,589 @@ +--- +title: "Raspberry Pi: Temp and Humidity with DHT11 and MongoDB" +date: 2024-12-29T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + +# Raspberry Pi together with: Dht11, Python and Docker + +With MongoDB -> requires ARM64/x86 version + +https://www.dcddcc.com/blog/2018-06-09-building-mongodb-for-32-bit-ARM-on-debian-ubuntu.html + + + + +https://hub.docker.com/r/apcheamitru/arm32v7-mongo + +docker build -t mongo_db . + +GND +5v (or 3v3) +GPIO4 + +## Python + +### Simple print + +```py +import Adafruit_DHT +import time + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +try: + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + else: + print('Failed to retrieve data from the sensor') + + # Delay for a while (in seconds) before the next reading + time.sleep(2) + +except KeyboardInterrupt: + print('Program terminated by user') + +``` + +### With MariaDB + + +```yml + +version: '3' +services: + mariadb: + image: linuxserver/mariadb:arm32v7-10.6.13 #arm32v7-latest but wrong arch + container_name: my-mariadb + environment: + MYSQL_ROOT_PASSWORD: your_root_password_here + MYSQL_DATABASE: mydatabase + MYSQL_USER: your_username_here + MYSQL_PASSWORD: your_password_here + ports: + - "3306:3306" + volumes: + - mariadb_data:/var/lib/mysql + restart: always + +volumes: + mariadb_data: + + + +# CREATE TABLE sensor_data ( +# id INT AUTO_INCREMENT PRIMARY KEY, +# timestamp DATETIME NOT NULL, +# temperature DECIMAL(5, 2) NOT NULL, +# humidity DECIMAL(5, 2) NOT NULL +# ); + + +#mysql -u root -p +#use mydatabase +#SHOW TABLES; +#DESCRIBE sensor_data; +#SELECT * from sensor_data; +#SELECT * FROM sensor_data ORDER BY timestamp DESC; + +``` + +pip install mysql-connector-python + + +**THIS BELOW WORKS AND PUSHES FROM RPI TO DOCKER CONTAINER MARIADB WITHOUT ISSUES< BUT NOT WORKING INSIDE THE CONTAINER** + +```py +import Adafruit_DHT +import time +import mysql.connector + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +# Database connection configuration +db_config = { + "user": "your_username_here", + "password": "your_password_here", + "host": "localhost", # Change if your MariaDB is on a different host + "database": "mydatabase", # Change to your database name +} + +try: + # Connect to the MariaDB database + connection = mysql.connector.connect(**db_config) + cursor = connection.cursor() + + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + + # Get the current timestamp + current_time = time.strftime('%Y-%m-%d %H:%M:%S') + + # Insert sensor data into the database with timestamp + insert_query = "INSERT INTO sensor_data (timestamp, temperature, humidity) VALUES (%s, %s, %s)" + data = (current_time, temperature, humidity) + cursor.execute(insert_query, data) + connection.commit() + else: + print('Failed to retrieve data from the sensor') + + # Delay for a while (in seconds) before the next reading + time.sleep(2) + +except KeyboardInterrupt: + print('Program terminated by user') +finally: + # Close the database connection when done + if connection.is_connected(): + cursor.close() + connection.close() + +``` + + +```py + +import os +import Adafruit_DHT +import time +import mysql.connector + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +# Read database connection details and table name from environment variables +db_config = { + "user": os.getenv("DB_USER", "default_username"), + "password": os.getenv("DB_PASSWORD", "default_password"), + "host": os.getenv("DB_HOST", "localhost"), + "database": os.getenv("DB_NAME", "mydatabase"), +} +table_name = os.getenv("TABLE_NAME", "sensor_data") + +try: + # Connect to the MariaDB database + connection = mysql.connector.connect(**db_config) + cursor = connection.cursor() + + # Create the table if it doesn't exist + create_table_query = f""" + CREATE TABLE IF NOT EXISTS {table_name} ( + id INT AUTO_INCREMENT PRIMARY KEY, + timestamp DATETIME NOT NULL, + temperature DECIMAL(5, 2) NOT NULL, + humidity DECIMAL(5, 2) NOT NULL + ) + """ + cursor.execute(create_table_query) + + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + + # Get the current timestamp + current_time = time.strftime('%Y-%m-%d %H:%M:%S') + + # Insert sensor data into the database with timestamp + insert_query = f"INSERT INTO {table_name} (timestamp, temperature, humidity) VALUES (%s, %s, %s)" + data = (current_time, temperature, humidity) + cursor.execute(insert_query, data) + connection.commit() + else: + print('Failed to retrieve data from the sensor') + + # Delay for a while (in seconds) before the next reading + time.sleep(2) + +except KeyboardInterrupt: + print('Program terminated by user') +finally: + # Close the database connection when done + if connection.is_connected(): + cursor.close() + connection.close() +``` + + +```py +import Adafruit_DHT +import time +import mysql.connector +import os # Import the os module + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +# Read database connection details and table name from environment variables +db_config = { + "user": os.getenv("DB_USER", "default_username"), + "password": os.getenv("DB_PASSWORD", "default_password"), + "host": os.getenv("DB_HOST", "localhost"), + "database": os.getenv("DB_NAME", "mydatabase"), +} +table_name = os.getenv("TABLE_NAME", "sensor_data") + +try: + # Connect to the MariaDB database + connection = mysql.connector.connect(**db_config) + cursor = connection.cursor() + + # Create the table if it doesn't exist + create_table_query = f""" + CREATE TABLE IF NOT EXISTS {table_name} ( + id INT AUTO_INCREMENT PRIMARY KEY, + timestamp DATETIME NOT NULL, + temperature DECIMAL(5, 2) NOT NULL, + humidity DECIMAL(5, 2) NOT NULL + ) + """ + cursor.execute(create_table_query) + + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + + # Get the current timestamp + current_time = time.strftime('%Y-%m-%d %H:%M:%S') + + # Insert sensor data into the database with timestamp + insert_query = f"INSERT INTO {table_name} (timestamp, temperature, humidity) VALUES (%s, %s, %s)" + data = (current_time, temperature, humidity) + cursor.execute(insert_query, data) + connection.commit() + else: + print('Failed to retrieve data from the sensor') + + # Delay for a while (in seconds) before the next reading + time.sleep(2) + +except KeyboardInterrupt: + print('Program terminated by user') +except mysql.connector.Error as e: + print(f"Database error: {e}") +finally: + # Close the database connection when done (if it's defined) + if 'connection' in locals() and connection.is_connected(): + cursor.close() + connection.close() +``` + + +**connects to orange** + +```py + +import os +import Adafruit_DHT +import time +import mysql.connector + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +# Read database connection details and table name from environment variables +# Database connection configuration +db_config = { + "user": "your_username_here", + "password": "your_password_here", + "host": "192.168.3.200", # Updated host IP + "port": 3306, # Specified port number + "database": "mydatabase", # Change to your database name +} + +table_name = os.getenv("TABLE_NAME", "sensor_data") + +try: + # Connect to the MariaDB database + connection = mysql.connector.connect(**db_config) + cursor = connection.cursor() + + # Create the table if it doesn't exist + create_table_query = f""" + CREATE TABLE IF NOT EXISTS {table_name} ( + id INT AUTO_INCREMENT PRIMARY KEY, + timestamp DATETIME NOT NULL, + temperature DECIMAL(5, 2) NOT NULL, + humidity DECIMAL(5, 2) NOT NULL + ) + """ + cursor.execute(create_table_query) + + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + + # Get the current timestamp + current_time = time.strftime('%Y-%m-%d %H:%M:%S') + + # Insert sensor data into the database with timestamp + insert_query = f"INSERT INTO {table_name} (timestamp, temperature, humidity) VALUES (%s, %s, %s)" + data = (current_time, temperature, humidity) + cursor.execute(insert_query, data) + connection.commit() + else: + print('Failed to retrieve data from the sensor') + + # Delay for a while (in seconds) before the next reading + time.sleep(2) + +except KeyboardInterrupt: + print('Program terminated by user') +finally: + # Close the database connection when done + if connection.is_connected(): + cursor.close() + connection.close() + +``` + + +```dockerfile + +# Use an official Python runtime as the base image +FROM python:3.8-slim + +# Set the working directory in the container +WORKDIR /app + +# Install system-level dependencies +RUN apt-get update && \ + apt-get install -y python3-dev python3-pip && \ + python3 -m pip install --upgrade pip setuptools wheel + +# Copy the local code to the container +COPY sensor_data.py /app/ + +# Install additional dependencies +RUN pip install Adafruit_DHT mysql-connector-python + +# Run the Python script +CMD ["python", "sensor_data.py"] + + +``` + + +docker build -t dht22_sensor_to_mysql . + + +```yml + +version: '3' +services: + mariadb: + image: linuxserver/mariadb:arm32v7-10.6.13 + container_name: my-mariadb + environment: + MYSQL_ROOT_PASSWORD: your_root_password_here + ports: + - "3306:3306" + volumes: + - mariadb_data:/var/lib/mysql + restart: always + + python-app: + build: + context: ./ # Specify the path to your Dockerfile and Python script + container_name: my-python-app + environment: + - DB_USER=mydbuser + - DB_PASSWORD=mydbpassword + - DB_HOST=mariadb # Use the service name defined above + - DB_NAME=mydatabase + - TABLE_NAME=sensor_data + depends_on: + - mariadb + restart: always + command: tail -f /dev/null #keep it running + + +volumes: + mariadb_data: + + +``` + + +```yml +version: '3' +services: + mariadb: + image: mariadb:latest + container_name: my-mariadb + environment: + MYSQL_ROOT_PASSWORD: your_root_password_here + ports: + - "3306:3306" + volumes: + - mariadb_data:/var/lib/mysql + restart: always + + python-app: + image: dht22_sensor_to_mysql # Use the name of your existing Docker image + container_name: my-python-app + environment: + - DB_USER=mydbuser + - DB_PASSWORD=mydbpassword + - DB_HOST=mariadb # Use the service name defined above + - DB_NAME=mydatabase + - TABLE_NAME=sensor_data + depends_on: + - mariadb + restart: always + command: tail -f /dev/null #keep it running + + +volumes: + mariadb_data: + +``` + + + +only th py script + + +```yml + +version: '3' +services: + + + python-app: + image: dht22_sensor_to_mysql # Use the name of your existing Docker image + container_name: py-dht22-mariadb + privileged: true + environment: + - DB_USER=mydbuser + - DB_PASSWORD=mydbpassword + - DB_HOST=mariadb # Use the service name defined above + - DB_NAME=mydatabase + - TABLE_NAME=sensor_data + restart: always + command: tail -f /dev/null #keep it running + +``` + + + +### With Mongo + +```py +import Adafruit_DHT +import time +from pymongo import MongoClient + +DHT_SENSOR = Adafruit_DHT.DHT11 +DHT_PIN = 4 + +# Configure MongoDB connection +mongo_client = MongoClient('mongodb://localhost:27017/') +db = mongo_client['sensor_data'] +collection = db['dht_sensor'] + +while True: + humidity, temperature = Adafruit_DHT.read(DHT_SENSOR, DHT_PIN) + if humidity is not None and temperature is not None: + data = { + "timestamp": time.strftime('%Y-%m-%dT%H:%M:%SZ'), + "temperature": temperature, + "humidity": humidity + } + collection.insert_one(data) + print("Data sent to MongoDB") + else: + print("Sensor failure. Check wiring.") + time.sleep(3) +``` + + + + + + + +### Split ARM32 and ARM64/x86 with Cloudflare Tunnels + + +I am a big fan of [Cloudflare Tunnels](https://fossengineer.com/selfhosting-cloudflared-tunnel-docker/) to expose services securely and thought of a solution for the users of RPi 32bits (ARMv7) - Run the python script with the RPi 32 bits and run Mongo DB with a ARM86/x86 device and expose the DB with Cloudflare and a Domain with SSL. + +```yml +version: '3' +services: + + dht_sensor_mongo: + image: dht_sensor_mongo:latest # Use the name of your custom image + container_name: dht_sensor_mongo + privileged: true + +``` + + +```yml +version: '3' +services: + mongodb: + image: mongo:7 + container_name: mongodb + environment: + MONGO_INITDB_ROOT_USERNAME: yourusername + MONGO_INITDB_ROOT_PASSWORD: yourpassword + volumes: + - mongodb_data:/data/db + ports: + - "27017:27017" + restart: always + command: ["mongod"] + +networks: + cloudflare_tunnel: + external: true +volumes: + mongodb_data: + +``` \ No newline at end of file diff --git a/_posts/2024-12-31-dht22-python-ES.md b/_posts/2024-12-31-dht22-python-ES.md new file mode 100644 index 0000000..7751d07 --- /dev/null +++ b/_posts/2024-12-31-dht22-python-ES.md @@ -0,0 +1,143 @@ +--- +title: "Raspberry Pi: Temp and Humidity with DHT11 and MongoDB" +date: 2024-12-29T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + + + +Python can push data to Elasticsearch. Elasticsearch is a popular open-source search and analytics engine that is often used for storing, searching, and analyzing large volumes of data in real-time. It is designed to handle various types of structured and unstructured data, making it useful for a wide range of applications, including log and event data analysis, full-text search, and more. + +To push data to Elasticsearch from Python, you can use the official Elasticsearch Python client library, which provides a convenient way to interact with Elasticsearch from your Python code. Here's a basic example of how you can push data to Elasticsearch using the Elasticsearch Python client: + + + + + + + +```py +import os +import Adafruit_DHT +import time +from elasticsearch import Elasticsearch + +# Set the DHT sensor type (DHT22 or DHT11) +sensor_type = Adafruit_DHT.DHT22 + +# Set the GPIO pin where the sensor is connected +gpio_pin = 4 # Change this to the actual GPIO pin number + +# Elasticsearch connection configuration +es_host = "192.168.3.200" +es_port = 9200 # Default Elasticsearch HTTP port + +# Create an Elasticsearch client +es = Elasticsearch([{'host': es_host, 'port': es_port, 'scheme': 'http'}]) + +# Define the Elasticsearch index where you want to store the sensor data +es_index = "sensor_data" # Change this to your desired index name + +try: + while True: + # Read temperature and humidity from the sensor + humidity, temperature = Adafruit_DHT.read_retry(sensor_type, gpio_pin) + + if humidity is not None and temperature is not None: + # Print the values + print(f'Temperature: {temperature:.2f}°C') + print(f'Humidity: {humidity:.2f}%') + + # Get the current timestamp + current_time = time.strftime('%Y-%m-%d %H:%M:%S') + + # Prepare the document to be indexed in Elasticsearch + doc = { + "temperature": temperature, + "humidity": humidity + } + + # Index the document into Elasticsearch with "_id" containing the timestamp + es.index(index=es_index, id=current_time, body=doc) + + # Index the document into Elasticsearch - Not supported after ES 7.x + #es.index(index=es_index, doc_type='_doc', body=doc) + + # Wait for a while (in seconds) before the next reading + time.sleep(2) + else: + print('Failed to retrieve data from the sensor') + +except KeyboardInterrupt: + print('Program terminated by user') + + +``` + + +```yml + +version: '3.7' + +services: + + # Elasticsearch Docker Images: https://www.docker.elastic.co/ + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:7.15.0 + container_name: elasticsearch + environment: + - xpack.security.enabled=false + - discovery.type=single-node + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 + hard: 65536 + cap_add: + - IPC_LOCK + volumes: + - elasticsearch-data:/usr/share/elasticsearch/data + ports: + - 9200:9200 + - 9300:9300 + + kibana: + container_name: kibana + image: docker.elastic.co/kibana/kibana:7.15.0 + environment: + - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 + ports: + - 5601:5601 + depends_on: + - elasticsearch + +volumes: + elasticsearch-data: + driver: local + + +# curl -X GET "http://localhost:9200/" +# curl -X GET "http://192.168.3.200:9200/" +# curl -X GET "http://192.168.3.200:9200/_cat/indices?v" + +# #curl -X GET "http://192.168.3.200:9200/your_index_name/_doc/" +# curl -X GET "http://192.168.3.200:9200/sensor_data/_doc/" + + +# curl -X GET "http://localhost:9200/sensor_data/_mapping?pretty" +# curl -X GET "http://192.168.3.200:9200/sensor_data/_mapping?pretty" + + +# curl -X GET "http://192.168.3.200:9200/sensor_data/_search?pretty" -H "Content-Type: application/json" -d '{ +# "size": 1, +# "sort": [ +# { +# "_doc": "desc" +# } +# ] +# }' +``` \ No newline at end of file diff --git a/_posts/2024-12-31-dht22-python-redis.md b/_posts/2024-12-31-dht22-python-redis.md new file mode 100644 index 0000000..27a0af8 --- /dev/null +++ b/_posts/2024-12-31-dht22-python-redis.md @@ -0,0 +1,126 @@ +--- +title: "Raspberry Pi: Temp and Humidity with DHT11 and MongoDB" +date: 2024-12-29T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + + +Redis is an open-source, in-memory data structure store used as a database, cache, message broker, and streaming engine. It is known for its high performance, scalability, and flexibility. Redis provides data structures such as strings, hashes, lists, sets, sorted sets with range queries, bitmaps, hyperloglogs, geospatial indexes, and streams. + +Redis is open source software released under the BSD license. It is available for Linux, macOS, Windows, and FreeBSD. + +Here are some of the key features of Redis: + +In-memory data store: Redis stores data in memory, which makes it very fast. +Data structures: Redis supports a wide variety of data structures, including strings, hashes, lists, sets, sorted sets, bitmaps, hyperloglogs, geospatial indexes, and streams. +Scalability: Redis is highly scalable and can be used to support a large number of concurrent connections. +Flexibility: Redis is a very flexible tool that can be used for a variety of purposes, including caching, data streaming, and real-time applications. +Redis is a popular choice for a variety of applications, including: + +Caching: Redis can be used to cache frequently accessed data, such as user profiles or product information. This can improve the performance of applications by reducing the number of times the database needs to be accessed. +Data streaming: Redis can be used to stream data in real time. This can be used for applications such as real-time analytics or live chat. +Real-time applications: Redis can be used to build real-time applications that require high performance and scalability. This includes applications such as social media platforms, gaming applications, and financial trading applications. + + +```py + +import redis + +# Create a connection to the Redis database +r = redis.Redis() + +# Push the data to Redis +r.set("key", "value") + +# Push a list of data to Redis +r.lpush("list", "item1", "item2") +``` + +You can push data from Python to Redis, an in-memory data store, using the `redis-py` library. `redis-py` is a popular Python client for Redis that allows you to interact with Redis from your Python applications. + +Here's how you can use `redis-py` to push data to Redis: + +1. Install the `redis-py` library using pip: + + ```bash + pip install redis + ``` + +2. Import the `redis` module and create a connection to your Redis server: + + ```python + import redis + + # Connect to your Redis server + redis_host = 'localhost' # Replace with your Redis server's host or IP address + redis_port = 6379 # Default Redis port + redis_db = 0 # Default Redis database + r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db) + ``` + + Replace `'localhost'` with the address of your Redis server if it's running on a different host. + +3. Push data (key-value pairs) to Redis: + + ```python + # Define your key-value data + key = 'your_key' + value = 'your_value' + + # Push data to Redis + r.set(key, value) + ``` + + You can also specify additional parameters like expiration time, if needed. + +4. Retrieve data from Redis: + + ```python + # Retrieve data from Redis + retrieved_value = r.get(key) + + if retrieved_value is not None: + print(f"Value for key '{key}': {retrieved_value.decode('utf-8')}") + else: + print(f"Key '{key}' not found in Redis.") + ``` + +5. Close the Redis connection when you're done: + + ```python + r.close() + ``` + +These are the basic steps to push data from Python to Redis using `redis-py`. You can use various Redis data structures and commands depending on your use case, such as lists, sets, hashes, and more. + +Make sure that you have a running Redis server with the appropriate configuration and access permissions before running the code. + +### In memory data store + +An in-memory data store, often referred to as an "in-memory database" or "in-memory data store," is a type of database system that primarily stores and manages data in the system's main memory (RAM) rather than on traditional disk storage devices. This means that data is held and processed in memory, which offers several advantages: + +1. **Speed**: Data access and retrieval are extremely fast since there's no need to read from or write to slow disk drives. In-memory data stores can achieve low-latency and high-throughput operations, making them ideal for applications requiring rapid data access. + +2. **Low Latency**: Because data is stored in RAM, there is minimal seek time or latency associated with accessing the data. This is particularly important for real-time or high-performance applications. + +3. **Predictable Performance**: In-memory data stores provide consistent and predictable performance characteristics, making them suitable for applications where response times must be tightly controlled. + +4. **Caching**: In-memory data stores are commonly used for caching frequently accessed data. This reduces the load on traditional databases and accelerates data retrieval for read-heavy workloads. + +5. **No Disk I/O Overhead**: Since data isn't written to disk, there is no disk I/O overhead, which can be a significant bottleneck in traditional database systems. + +6. **Data Integrity**: In-memory databases typically have mechanisms to ensure data consistency and durability, such as periodic snapshots to disk or replication to other nodes. + +7. **Real-Time Analytics**: In-memory databases are often used for real-time analytics and data processing, where quick insights are required from large volumes of data. + +However, there are also some limitations to in-memory data stores: + +1. **Limited Storage**: In-memory data stores are constrained by the amount of available RAM, which may limit the volume of data that can be stored. This makes them less suitable for very large datasets. + +2. **Data Durability**: In-memory data is volatile and can be lost if the system crashes or is restarted. Some in-memory databases address this by periodically writing data to disk. + +3. **Cost**: RAM can be more expensive than traditional disk storage, so scaling up an in-memory database can be cost-prohibitive for large datasets. + +In-memory data stores are commonly used for various applications, including real-time analytics, caching, session management, and high-frequency trading, where fast data access and low-latency responses are critical. Popular examples of in-memory data stores include Redis, Memcached, and various in-memory database systems. \ No newline at end of file diff --git a/_posts/2024-12-31-dht22.md b/_posts/2024-12-31-dht22.md index ea7d2a0..1367860 100644 --- a/_posts/2024-12-31-dht22.md +++ b/_posts/2024-12-31-dht22.md @@ -1,6 +1,6 @@ --- title: "Raspberry Pi: Temp and Humidity with DHT11 and MongoDB" -date: 2023-08-29T23:20:21+01:00 +date: 2024-12-29T23:20:21+01:00 draft: true tags: ["Self-Hosting"] --- diff --git a/_posts/2024-12-31-esp32.md b/_posts/2024-12-31-esp32.md new file mode 100644 index 0000000..ceb45a6 --- /dev/null +++ b/_posts/2024-12-31-esp32.md @@ -0,0 +1,563 @@ +--- +title: "ESP32:" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + +# The ESP32 and the RPi - Killer Combination + +I know this is supposed to be a RPi centered repo and web, but couldn't resist to add some small project that uses the ESP32. + +An interesting tool I found to simulate these projects in the browser is: + +## Testing ESP32 + + +I have to say thanks to Tomasz and his great content that helped me get started with this: + +* + + +To 'upload' the code to the ESP32, please make sure that you have the proper cable (*I was expending too many hours because of this*). + +```cpp +#include + +void setup() { + pinMode(LED_BUILTIN, OUTPUT); + Serial.begin(921600); + Serial.println("Hello from the setup"); +} + +void loop() { + delay(1000); + digitalWrite(LED_BUILTIN, HIGH); + Serial.println("Hello from the loop"); + delay(1000); + digitalWrite(LED_BUILTIN, LOW); +} +``` + +### Connecting ESP32 to Wifi + + + + + + + + + +```cpp + +#define WIFI_SSID "wifi_network_name" +#define WIFI_PASSWORD "wifi_password" + +void setup() { + Serial.begin(921600); + pinMode(LED_BUILTIN, OUTPUT); + + WiFi.begin(WIFI_SSID, WIFI_PASSWORD); + + Serial.println("starting"); +} + +bool isConnected = false; + +void loop() { + if (WiFi.status() == WL_CONNECTED && !isConnected) { + Serial.println("Connected"); + digitalWrite(LED_BUILTIN, HIGH); + isConnected = true; + } + + if (WiFi.status() != WL_CONNECTED) { + Serial.println("."); + digitalWrite(LED_BUILTIN, !digitalRead(LED_BUILTIN)); + delay(1000); + isConnected = false; + } +} +``` + +## ESP32 and DHT11 + +A note on breadboards first: + +### With Arduino IDE + +```cpp +//Incluimos las librerias +#include "DHTesp.h" +//Decaramos el variable que almacena el pin a conectar el DHT11 +int pinDHT = 15; +//Instanciamos el DHT +DHTesp dht; +void setup() { + Serial.begin(115200); + //Inicializamos el dht + dht.setup(pinDHT, DHTesp::DHT11); +} +void loop() { + //Obtenemos el arreglo de datos (humedad y temperatura) + TempAndHumidity data = dht.getTempAndHumidity(); + //Mostramos los datos de la temperatura y humedad + Serial.println("Temperatura: " + String(data.temperature, 2) + "°C"); + Serial.println("Humedad: " + String(data.humidity, 1) + "%"); + Serial.println("---"); + delay(1000); +} +``` + +Dont forget to include the libraries: Tools -> Manage Libraries -> DHT sensor library for ESPx + +### With VScode and PlatformIO + +I recommend you also the **Serial Monitor** extension + + + + + + +### Sending DHT11 Data to Arduino Cloud + + + + + + +```cpp +/* + Sketch generated by the Arduino IoT Cloud Thing "Untitled" + https://create.arduino.cc/cloud/things/b8c0b8a9-a659-48d5-aa57-0c6028a5d734 + + Arduino IoT Cloud Variables description + + The following variables are automatically generated and updated when changes are made to the Thing + + CloudLight led; + CloudRelativeHumidity humidity; + CloudTemperature temperature; + + Variables which are marked as READ/WRITE in the Cloud Thing will also have functions + which are called when their values are changed from the Dashboard. + These functions are generated with the Thing and added at the end of this sketch. +*/ + +#include "thingProperties.h" +#include "DHT.h" + +#define DHTPIN 15 + +#define DHTTYPE DHT11 + +DHT dht(DHTPIN, DHTTYPE); + + +void setup() { + // Initialize serial and wait for port to open: + Serial.begin(9600); + + pinMode(2,OUTPUT); + // This delay gives the chance to wait for a Serial Monitor without blocking if none is found + delay(1500); + + dht.begin(); + + // Defined in thingProperties.h + initProperties(); + + // Connect to Arduino IoT Cloud + ArduinoCloud.begin(ArduinoIoTPreferredConnection); + + /* + The following function allows you to obtain more information + related to the state of network and IoT Cloud connection and errors + the higher number the more granular information you’ll get. + The default is 0 (only errors). + Maximum is 4 + */ + setDebugMessageLevel(2); + ArduinoCloud.printDebugInfo(); +} + +void loop() { + ArduinoCloud.update(); + // Your code here + DHT_SENSOR_READ(); + + +} + + +/* + Since Led is READ_WRITE variable, onLedChange() is + executed every time a new value is received from IoT Cloud. +*/ +void onLedChange() { + // Add your code here to act upon Led change + if(led == 1) + { + digitalWrite(2,HIGH); + } + else + { + digitalWrite(2,LOW); + } +} + + +void DHT_SENSOR_READ() +{ + + float h = dht.readHumidity(); + float t = dht.readTemperature(); + + temperature = t; + humidity = h; + + Serial.print("Temperature - "); Serial.println(t); + Serial.print("Humidity - "); Serial.println(h); + delay(2000); + +} + +``` + + +### DHT11 - Blink IoT Platform + + +## ESP32 with AWS + + ESP32 GPIO pins in real-time with AWS API Gateway WebSockets + + + +- PlatformIO project with ESP32 WebSocket Client (C/C++) +- Serverless Framework project utilizing AWS API Gateway WebSockets, AWS Lambda, and AWS DynamoDB to handle the communication between clients (TypeScript) +- Web Application to control ESP32 from the browser (ReactJS) + + +## Industry 4.0 and the MQTT Protocol + +```yml +version: '3' +services: + mosquitto: + image: eclipse-mosquitto + container_name: mosquitto + ports: + - "1883:1883" + - "9001:9001" + restart: always + volumes: + - /path/to/mosquitto/config:/mosquitto/config + + nodered: + image: nodered/node-red + container_name: nodered + ports: + - "1880:1880" + restart: always + volumes: + - /path/to/nodered/data:/data + environment: + - TZ=your_time_zone + +``` + + + + +Open a web browser and navigate to http://raspberry_pi_ip:1880 (replace raspberry_pi_ip with your Raspberry Pi's IP address). You should see the Node-RED user interface. + +### MQTT with the RPi and ESP32 + +#### MosquiTTO + + + + +sudo apt install -y mosquitto +sudo apt install -y mosquitto-clients + +#sudo apt install python3-pip +sudo pip3 install paho-mqtt + +sudo systemctl status mosquitto.service + + +Publish sample data (from the RPi to the Rpi): https://github.com/jiteshsaini/mqtt-demo/blob/main/rpi_mqtt_clients/client_pub.py + +```py +import time +import paho.mqtt.client as mqtt + + +def on_publish(client, userdata, mid): + print("message published") + + +client = mqtt.Client("rpi_client2") #this name should be unique +client.on_publish = on_publish +client.connect('127.0.0.1',1883) +# start a new thread +client.loop_start() + +k=0 +while True: + k=k+1 + if(k>20): + k=1 + + try: + msg =str(k) + pubMsg = client.publish( + topic='rpi/broadcast', + payload=msg.encode('utf-8'), + qos=0, + ) + pubMsg.wait_for_publish() + print(pubMsg.is_published()) + + except Exception as e: + print(e) + + time.sleep(2) +``` + +This will receive the sample data when both scripts are running- (in the RPi): + +```py +import paho.mqtt.client as mqtt +import time + +def on_connect(client, userdata, flags, rc): + global flag_connected + flag_connected = 1 + client_subscriptions(client) + print("Connected to MQTT server") + +def on_disconnect(client, userdata, rc): + global flag_connected + flag_connected = 0 + print("Disconnected from MQTT server") + +# a callback functions +def callback_esp32_sensor1(client, userdata, msg): + print('ESP sensor1 data: ', msg.payload.decode('utf-8')) + + +def callback_esp32_sensor2(client, userdata, msg): + print('ESP sensor2 data: ', str(msg.payload.decode('utf-8'))) + +def callback_rpi_broadcast(client, userdata, msg): + print('RPi Broadcast message: ', str(msg.payload.decode('utf-8'))) + +def client_subscriptions(client): + client.subscribe("esp32/#") + client.subscribe("rpi/broadcast") + +client = mqtt.Client("rpi_client1") #this should be a unique name +flag_connected = 0 + +client.on_connect = on_connect +client.on_disconnect = on_disconnect +client.message_callback_add('esp32/sensor1', callback_esp32_sensor1) +client.message_callback_add('esp32/sensor2', callback_esp32_sensor2) +client.message_callback_add('rpi/broadcast', callback_rpi_broadcast) +client.connect('127.0.0.1',1883) +# start a new thread +client.loop_start() +client_subscriptions(client) +print("......client setup complete............") + + +while True: + time.sleep(4) + if (flag_connected != 1): + print("trying to connect MQTT server..") +``` + + +Now, to publish data from the ESP32: + +You will need in the platformio.ini as + +lib_deps = knolleary/PubSubClient@^2.8 + +```cpp +/********* + Author: Jitesh Saini + This code is built upon the example code in pubsubclient library + Complete project details at https://helloworld.co.in +*********/ + +#include +#include + +// Replace the SSID/Password details as per your wifi router +const char* ssid = "yourSSID"; +const char* password = "yourPassword"; + +// Replace your MQTT Broker IP address here: +const char* mqtt_server = "192.168.1.45"; + +WiFiClient espClient; +PubSubClient client(espClient); + +long lastMsg = 0; + +#define ledPin 2 + +void blink_led(unsigned int times, unsigned int duration){ + for (int i = 0; i < times; i++) { + digitalWrite(ledPin, HIGH); + delay(duration); + digitalWrite(ledPin, LOW); + delay(200); + } +} + +void setup_wifi() { + delay(50); + Serial.println(); + Serial.print("Connecting to "); + Serial.println(ssid); + + WiFi.begin(ssid, password); + + int c=0; + while (WiFi.status() != WL_CONNECTED) { + blink_led(2,200); //blink LED twice (for 200ms ON time) to indicate that wifi not connected + delay(1000); // + Serial.print("."); + c=c+1; + if(c>10){ + ESP.restart(); //restart ESP after 10 seconds + } + } + + Serial.println(""); + Serial.println("WiFi connected"); + Serial.println("IP address: "); + Serial.println(WiFi.localIP()); + +} + +void connect_mqttServer() { + // Loop until we're reconnected + while (!client.connected()) { + + //first check if connected to wifi + if(WiFi.status() != WL_CONNECTED){ + //if not connected, then first connect to wifi + setup_wifi(); + } + + //now attemt to connect to MQTT server + Serial.print("Attempting MQTT connection..."); + // Attempt to connect + if (client.connect("ESP32_client1")) { // Change the name of client here if multiple ESP32 are connected + //attempt successful + Serial.println("connected"); + // Subscribe to topics here + client.subscribe("rpi/broadcast"); + //client.subscribe("rpi/xyz"); //subscribe more topics here + + } + else { + //attempt not successful + Serial.print("failed, rc="); + Serial.print(client.state()); + Serial.println(" trying again in 2 seconds"); + + blink_led(3,200); //blink LED three times (200ms on duration) to show that MQTT server connection attempt failed + // Wait 2 seconds before retrying + delay(2000); + } + } + +} + +//this function will be executed whenever there is data available on subscribed topics +void callback(char* topic, byte* message, unsigned int length) { + Serial.print("Message arrived on topic: "); + Serial.print(topic); + Serial.print(". Message: "); + String messageTemp; + + for (int i = 0; i < length; i++) { + Serial.print((char)message[i]); + messageTemp += (char)message[i]; + } + Serial.println(); + + // Check if a message is received on the topic "rpi/broadcast" + if (String(topic) == "rpi/broadcast") { + if(messageTemp == "10"){ + Serial.println("Action: blink LED"); + blink_led(1,1250); //blink LED once (for 1250ms ON time) + } + } + + //Similarly add more if statements to check for other subscribed topics +} + +void setup() { + pinMode(ledPin, OUTPUT); + Serial.begin(115200); + + setup_wifi(); + client.setServer(mqtt_server,1883);//1883 is the default port for MQTT server + client.setCallback(callback); +} + +void loop() { + + if (!client.connected()) { + connect_mqttServer(); + } + + client.loop(); + + long now = millis(); + if (now - lastMsg > 4000) { + lastMsg = now; + + client.publish("esp32/sensor1", "88"); //topic name (to which this ESP32 publishes its data). 88 is the dummy value. + + } + +} +``` + + + + +### Another try mqtt ESP32 + + + + + + + + + + + + + + +## ESP32 + MLX90614 + + + + +## car battery + + \ No newline at end of file diff --git a/_posts/2024-12-31-ky008.md b/_posts/2024-12-31-ky008.md new file mode 100644 index 0000000..ba1bb9a --- /dev/null +++ b/_posts/2024-12-31-ky008.md @@ -0,0 +1,39 @@ +--- +title: "Raspberry Pi Pico W:" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + + + + +The KY-008 sensor is a laser transmitter module that emits a red laser beam. It can be used for a variety of projects, including: + +Remote signaling: The laser beam can be used to send signals to a receiver module. This can be used to create a simple remote control, or to create a sensor that detects the presence of an object. +Line following: The laser beam can be used to create a line following robot. The robot can be programmed to follow the laser beam, even if it is curved or broken. +Distance measurement: The laser beam can be used to measure the distance to an object. This can be done by measuring the time it takes for the laser beam to reflect back to the receiver module. +Obstacle avoidance: The laser beam can be used to create an obstacle avoidance sensor. The sensor can be programmed to detect obstacles and avoid them. +Security system: The laser beam can be used to create a security system. The system can be programmed to sound an alarm if the laser beam is interrupted. +The KY-008 sensor is a versatile device that can be used for a variety of projects. It is important to note that the laser beam emitted by the KY-008 sensor can be harmful to eyes, so it should be used with caution. + +Here are some additional things to keep in mind when using the KY-008 sensor: + +The laser beam is emitted in a straight line, so it is important to make sure that there are no obstacles in the way. +The laser beam can be affected by sunlight and other bright light sources, so it is important to use the sensor in a dark environment. +The laser beam can be dimmed by adjusting the resistor on the module. + + + +Yes, you can use the KY-008 sensor with the Raspberry Pi Pico. The Pico has a 5V output pin that can be used to power the laser module. The laser module has two pins: VCC and GND. VCC should be connected to the 5V pin on the Pico and GND should be connected to the ground pin on the Pico. + +Here is a simple circuit that you can use to connect the KY-008 sensor to the Raspberry Pi Pico: + +VCC (laser) ---> 5V (Pico) +GND (laser) ---> GND (Pico) +Once the laser module is connected to the Pico, you can control it using software. There are many tutorials available online that show you how to do this. + +Please note that the laser beam emitted by the KY-008 sensor can be harmful to eyes, so it should be used with caution. It is important to make sure that the laser beam is not pointed directly at anyone. + +I hope this helps! \ No newline at end of file diff --git a/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md b/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md new file mode 100644 index 0000000..4a69377 --- /dev/null +++ b/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md @@ -0,0 +1,408 @@ +--- +title: "Raspberry Pi: Wifi to Ethernet Bridge (through Wireguard VPN)" +date: 2024-12-30T23:20:21+01:00 +draft: true +tags: ["Self-Hosting"] +--- + + + + + + +## Raspberry Pi: Wifi Bridge + + +I was inspired by the awsome work of **[William Halley in his blog](https://www.willhaley.com/blog/raspberry-pi-wifi-ethernet-bridge/)**, where I was able to follow succesfully the option 2 that it is proposed: *to share Wifi through Ethernet on a separated subnet*. + +### Initial Setup: Option 2 - Separate Subnet + +The script that is provided is this one (again, credits to William): + +``` +#!/usr/bin/env bash + +set -e + +[ $EUID -ne 0 ] && echo "run as root" >&2 && exit 1 + +apt update && \ + DEBIAN_FRONTEND=noninteractive apt install -y \ + dnsmasq netfilter-persistent iptables-persistent + +# Create and persist iptables rule. +iptables -t nat -A POSTROUTING -o wlan0 -j MASQUERADE +netfilter-persistent save + +# Enable ipv4 forwarding. +sed -i'' s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/ /etc/sysctl.conf + +# The Ethernet adapter will use a static IP of 10.1.1.1 on this new subnet. +cat <<'EOF' >/etc/network/interfaces.d/eth0 +auto eth0 +allow-hotplug eth0 +iface eth0 inet static + address 10.1.1.1 + netmask 255.255.255.0 + gateway 10.1.1.1 +EOF + +# Create a dnsmasq DHCP config at /etc/dnsmasq.d/bridge.conf. The Raspberry Pi +# will act as a DHCP server to the client connected over ethernet. +cat <<'EOF' >/etc/dnsmasq.d/bridge.conf +interface=eth0 +bind-interfaces +server=8.8.8.8 +domain-needed +bogus-priv +dhcp-range=10.1.1.2,10.1.1.254,12h +EOF + +systemctl mask networking.service +``` + +* If like me you are new to networking, I think going line by line and taking time to understand what we are doing is important: + * #!/usr/bin/env bash: This is the shebang line that determines the script's interpreter. In this case, the script will be run using bash shell. + * set -e: This command causes the shell to exit if any invoked command fails. + * [ $EUID -ne 0 ] && echo "run as root" >&2 && exit 1: This line checks if the script is run as root. If not, it prints an error message and exits. Root privileges are required to modify system configurations. + * apt update && \: This command updates the list of available packages from the repositories. + * DEBIAN_FRONTEND=noninteractive apt install -y \: This installs the necessary packages non-interactively, meaning it won't prompt for user input during installation. + * dnsmasq netfilter-persistent iptables-persistent: These are the packages being installed. Dnsmasq is a lightweight DHCP and caching DNS server. Netfilter-persistent and iptables-persistent are used for managing and saving iptables rules. + * iptables -t nat -A POSTROUTING -o wlan0 -j MASQUERADE: This line adds a rule to iptables that will masquerade all outgoing traffic from the Raspberry Pi as coming from itself, essentially making the Pi act as a gateway for the connected device. + * We are using wlan0 as is it the default for the Raspberry Pi + * netfilter-persistent save: This saves the iptables rules so they persist across reboots. + * sed -i'' s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/ /etc/sysctl.conf: This line enables IP forwarding, which is necessary for routing traffic. + * The cat <<'EOF' >/etc/network/interfaces.d/eth0 block: This block creates a network configuration file for the eth0 interface. It sets the interface to use a static IP address (10.1.1.1) and acts as a gateway on the 10.1.1.0/24 subnet. + * The cat <<'EOF' >/etc/dnsmasq.d/bridge.conf block: This block creates a dnsmasq configuration file that sets the Raspberry Pi to act as a DHCP server on the eth0 interface. This will assign IP addresses to devices connected to the Pi over Ethernet. + * systemctl mask networking.service: This command prevents the networking service from being started on boot. This is necessary because the script manually configures the network interfaces, and the networking service could interfere with this. + +* **Remember**, the names of wlan0 and eth0 used, can be different in other devices, check it with: + +{{< cmd >}} +ifconfig +{{< /cmd >}} + +The end result is that **the Raspberry Pi will act as a bridge between the WiFi connection and the Ethernet connection**, providing Internet access to devices connected via Ethernet- to the RPi. + + +## Raspberry Pi Bridge: Wifi to Ethernet (With wireguard) + +That was really great and I was really impressed and happy that it worked perfectly the first time I tried. + +Then, I wondered...*if the Raspberry Pi would be having a VPN connection, could we provide to the ethernet connected device that same connection?* + +I decided to try with **Wireguard** (you will need a working VPN server that generates Wireguard config) and surprisingly **it worked with some modification**: + + +1) First, we need to have wireguard installed: + +{{< cmd >}} +sudo apt install wireguard +cp /home/Downloads/your_vpn_wireguard_configuration.conf /etc/wireguard #download the wireguard config: account-wireguard configuration +sudo wg-quick your_vpn_wireguard_configuration #the name of the .conf file that you have downloaded +{{< /cmd >}} + +This will make your wireguard client to be connected to the server. Do you want to check your public IP? + +{{< cmd >}} +curl -sS https://ipinfo.io/json #the command to use +{{< /cmd >}} + +And if you need, to disconnect from Wireguard, just: + +{{< cmd >}} +wg-quick down +sudo wg-quick down your_vpn_wireguard_configuration +#sudo nano /etc/resolv.conf #to check/adapt DNS name (optional) +#sudo reboot (optional) +{{< /cmd >}} + +2) Use this command to check which network interface your wireguard VPN has: + +{{< cmd >}} +ifconfig +{{< /cmd >}} + +3) This will be our new **bridge_wireguard.sh** script to route the WIFI to ethernet and provide VPN connection at the same time: + + + +{{< cmd >}} +sudo nano bridge_wireguard.sh +{{< /cmd >}} + +``` +#!/usr/bin/env bash + +set -e + +[ $EUID -ne 0 ] && echo "run as root" >&2 && exit 1 + +apt update && \ + DEBIAN_FRONTEND=noninteractive apt install -y \ + dnsmasq netfilter-persistent iptables-persistent + +# Create and persist iptables rule. +# Here's the change: we're using the WireGuard interface (your_vpn_wireguard_netw_interface) instead of the WiFi interface (wlan0). +iptables -t nat -A POSTROUTING -o your_vpn_wireguard_netw_interface -j MASQUERADE +netfilter-persistent save + +# Enable ipv4 forwarding. +sed -i'' s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/ /etc/sysctl.conf + +# The Ethernet adapter will use a static IP of 10.1.1.1 on this new subnet. +cat <<'EOF' >/etc/network/interfaces.d/eth0 +auto eth0 +allow-hotplug eth0 +iface eth0 inet static + address 10.1.1.1 + netmask 255.255.255.0 + gateway 10.1.1.1 +EOF + +# Create a dnsmasq DHCP config at /etc/dnsmasq.d/bridge.conf. The Raspberry Pi +# will act as a DHCP server to the client connected over ethernet. +cat <<'EOF' >/etc/dnsmasq.d/bridge.conf +interface=eth0 +bind-interfaces +server=8.8.8.8 +domain-needed +bogus-priv +dhcp-range=10.1.1.2,10.1.1.254,12h +EOF + +systemctl mask networking.service +``` + +{{< cmd >}} +sudo bash bridge_wireguard.sh +sudo reboot +{{< /cmd >}} + + + + + + + + + + + + + + + +## FAQ + +### Checking WIFI Networks the RPi Connects + +```sh +nano /etc/wpa_supplicant/wpa_supplicant.conf +``` + +### Installing ping + + +```sh +apt-get install -y iputils-ping +``` \ No newline at end of file From 3c728f9d7ae7da658990f200f3961e7b827d68a6 Mon Sep 17 00:00:00 2001 From: Jesus Alcocer Tagua Date: Fri, 16 Feb 2024 23:30:58 +0100 Subject: [PATCH 2/5] adding mqtt --- _posts/2022-08-10-youtube-video-download.md | 16 ++++- _posts/2023-10-20-rpi-iot-dht1122-mongo.md | 2 +- _posts/2024-01-04-rpi-ansible.md | 7 ++- _posts/2024-02-15-rpi-mqtt.md | 63 +++++++++++++++++++ _posts/2024-12-31-HA.md | 2 +- _posts/2024-12-31-rpi_wifi_ethernet_bridge.md | 5 +- _posts/2024-13-15-rustberry-pi.md | 8 +++ 7 files changed, 94 insertions(+), 9 deletions(-) create mode 100644 _posts/2024-02-15-rpi-mqtt.md create mode 100644 _posts/2024-13-15-rustberry-pi.md diff --git a/_posts/2022-08-10-youtube-video-download.md b/_posts/2022-08-10-youtube-video-download.md index b174dd0..cc3fbe1 100644 --- a/_posts/2022-08-10-youtube-video-download.md +++ b/_posts/2022-08-10-youtube-video-download.md @@ -50,7 +50,7 @@ services: - ./db/:/data/db ``` -After deployment - just visit: http://localhost:8998. +After deployment - just visit: `http://localhost:8998` if you want to get to know more about the project: @@ -68,7 +68,13 @@ docker compose up -d ``` -The project is public at Github: +The project is public at [Github](https://github.com/TeamPiped/Piped). + +## My Favourite - MeTube + +ANd you can have [MeTube setup on your Raspberry](https://jalcocert.github.io/Linux/docs/linux__cloud.md/ansible/#ansible-like-a-pro) really quick. + +I have done it with [Ansible *and Docker*](https://jalcocert.github.io/Linux/docs/linux__cloud.md/ansible/) --- @@ -76,4 +82,8 @@ The project is public at Github: ### How to use RSS to subscribe to YT channels? - \ No newline at end of file + + +### Other Alternative Youtube Front Ends + +You can check other [alternatives to Self-Host your Youtube UI](https://fossengineer.com/youtube-alternative-front-ends), Invidious, YT Downloader... \ No newline at end of file diff --git a/_posts/2023-10-20-rpi-iot-dht1122-mongo.md b/_posts/2023-10-20-rpi-iot-dht1122-mongo.md index 90431eb..fa35ad5 100644 --- a/_posts/2023-10-20-rpi-iot-dht1122-mongo.md +++ b/_posts/2023-10-20-rpi-iot-dht1122-mongo.md @@ -306,7 +306,7 @@ volumes: metabase_data: ``` -Acces it at: http://localhost:3000 +Acces it at: `http://localhost:3000` ![Desktop View](/img/metabase-mongoDB.JPG){: width="972" height="589" } _Metabase Ready to Roll_ diff --git a/_posts/2024-01-04-rpi-ansible.md b/_posts/2024-01-04-rpi-ansible.md index 5246e56..b84ec78 100644 --- a/_posts/2024-01-04-rpi-ansible.md +++ b/_posts/2024-01-04-rpi-ansible.md @@ -79,6 +79,7 @@ ansible-playbook ./RPi/Z_ansible/Ansible_py_dht_influx_grafana.yml -i inventory. > This is the one: [Raspberry Pi - DHT to InfluxDB](https://jalcocert.github.io/RPi/posts/rpi-iot-dht11-influxdb/) {: .prompt-info } +--- ## FAQ @@ -88,7 +89,7 @@ Container are a way to encapsule all Software Project dependencies. For example to encapsule: MongoDB, Influx or the Python Script with all the libraries installed at a specified version. -To run containers, Ansible is actually using Docker. +To run containers, Ansible is actually using [Docker](https://jalcocert.github.io/RPi/posts/selfhosting-with-docker/). You can check the installed versions with: @@ -105,4 +106,6 @@ Why shouldnt we do it with our Pi's? ### Why Docker for SelfHosting? - \ No newline at end of file + + +You can also try [containers with Podman](https://fossengineer.com/docker-alternatives-for-data-analytics/) \ No newline at end of file diff --git a/_posts/2024-02-15-rpi-mqtt.md b/_posts/2024-02-15-rpi-mqtt.md new file mode 100644 index 0000000..ec4c865 --- /dev/null +++ b/_posts/2024-02-15-rpi-mqtt.md @@ -0,0 +1,63 @@ +--- +title: MQTT with Raspberry Pi +author: JAlcocerT +date: 2024-02-15 00:34:00 +0800 +categories: [Make your Raspberry Useful] +tags: [IoT, Sensors] +--- + +## Learning MQTT with a RPi + +### Why MQTT for IoT? + +### To Do list + +- [ ] Send DHT Data to MongoDB + + [x] Install a MQTT Client + + [ ] Install a MQTT Broker + + [ ] Testing the MQTT Broker + + [ ] Test the Connection + +## Install MQTT Client + +There are many options - I like [MQTTX](https://mqttx.app/) which is free. + +Which you can install in any platform (and even SelfHost MQTTX with Docker). + +Here is how to install MQTTx with [Flatpak](https://jalcocert.github.io/Linux/docs/debian/linux_installing_apps/#flatpak): + +```sh +flatpak install flathub com.emqx.MQTTX +``` + +## Install MQTT Broker + +```sh +docker pull emqx/emqx:5.5.0 +docker run -d --name emqx -p 1883:1883 -p 8083:8083 -p 8084:8084 -p 8883:8883 -p 18083:18083 emqx/emqx:5.5.0 +``` + +You have other MQTT Broker options like [Eclipse Mosquitto](https://github.com/eclipse/mosquitto) + +### Testing EMQx + +```py +import paho.mqtt.publish as publish + +# MQTT Broker (EMQX) details +broker_address = "broker.emqx.io" +port = 1883 +topic = "python/mqtt" + +# Message to publish +message = "Hello from Python!" + +# Publish the message +publish.single(topic, message, hostname=broker_address, port=port) +print(f"Message Published to {topic}") +``` + +--- + +## FAQ + diff --git a/_posts/2024-12-31-HA.md b/_posts/2024-12-31-HA.md index e28db40..e7cd8cb 100644 --- a/_posts/2024-12-31-HA.md +++ b/_posts/2024-12-31-HA.md @@ -71,5 +71,5 @@ You will need HACS Installed. Connect to the server where [Ollama is running](https://fossengineer.com/selfhosting-llms-ollama/). -You will need a local ip for example to the PC where you run Ollama: http://homeassistant.local:11434 #192.168.3.103 +You will need a local ip for example to the PC where you run Ollama: `http://homeassistant.local:11434` diff --git a/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md b/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md index 4a69377..c02c7db 100644 --- a/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md +++ b/_posts/2024-12-31-rpi_wifi_ethernet_bridge.md @@ -21,7 +21,7 @@ I was inspired by the awsome work of **[William Halley in his blog](https://www. The script that is provided is this one (again, credits to William): -``` +```sh #!/usr/bin/env bash set -e @@ -133,7 +133,7 @@ ifconfig sudo nano bridge_wireguard.sh {{< /cmd >}} -``` +```sh #!/usr/bin/env bash set -e @@ -391,6 +391,7 @@ sudo bash bridge_docker_mullvad.sh sudo reboot ``` --> +--- ## FAQ diff --git a/_posts/2024-13-15-rustberry-pi.md b/_posts/2024-13-15-rustberry-pi.md new file mode 100644 index 0000000..c1e0b07 --- /dev/null +++ b/_posts/2024-13-15-rustberry-pi.md @@ -0,0 +1,8 @@ +--- +title: RustBerry Pi - Learning Rust with a RPi +author: JAlcocerT +date: 2024-12-15 00:34:00 +0800 +categories: [Make your Raspberry Useful] +tags: [IoT] +--- + From 5e3b242723aa50bfc0d0ac4dcf4cd6cf6fb27ba3 Mon Sep 17 00:00:00 2001 From: Jesus Alcocer Tagua Date: Sat, 17 Feb 2024 13:18:23 +0100 Subject: [PATCH 3/5] adding mqtt info + nodered docker --- Z_MQTT/C/mqtt_publish_server.c | 50 ++++++ Z_MQTT/C/readme.md | 14 ++ Z_MQTT/Node-Red/Docker-compose.yml | 55 +++++++ Z_MQTT/Python/Python_push_distribution.py | 30 ++++ Z_MQTT/Readme.md | 0 _posts/2021-07-21-getting-started.md | 6 +- _posts/2024-02-15-rpi-mqtt.md | 184 +++++++++++++++++++++- _posts/2024-12-11-rpi-gps-superset.md | 1 + 8 files changed, 332 insertions(+), 8 deletions(-) create mode 100644 Z_MQTT/C/mqtt_publish_server.c create mode 100644 Z_MQTT/C/readme.md create mode 100644 Z_MQTT/Node-Red/Docker-compose.yml create mode 100644 Z_MQTT/Python/Python_push_distribution.py create mode 100644 Z_MQTT/Readme.md diff --git a/Z_MQTT/C/mqtt_publish_server.c b/Z_MQTT/C/mqtt_publish_server.c new file mode 100644 index 0000000..d67f390 --- /dev/null +++ b/Z_MQTT/C/mqtt_publish_server.c @@ -0,0 +1,50 @@ +#include +#include +#include "MQTTClient.h" + +#define ADDRESS "tcp://192.168.3.200:1883" +#define CLIENTID "ExampleClientPub" +#define TOPIC "c/mqtt" +#define PAYLOAD "Hello from C!" +#define QOS 1 +#define TIMEOUT 10000L + +// int main(int argc, char* argv[]) { +// // MQTTClient declaration and other code for connecting, publishing, and disconnecting +// } + +int main(int argc, char* argv[]) { + MQTTClient client; + MQTTClient_connectOptions conn_opts = MQTTClient_connectOptions_initializer; + MQTTClient_message pubmsg = MQTTClient_message_initializer; + MQTTClient_deliveryToken token; + int rc; + + // Initialize the client + MQTTClient_create(&client, ADDRESS, CLIENTID, MQTTCLIENT_PERSISTENCE_NONE, NULL); + conn_opts.keepAliveInterval = 20; + conn_opts.cleansession = 1; + + // Connect to the MQTT broker + if ((rc = MQTTClient_connect(client, &conn_opts)) != MQTTCLIENT_SUCCESS) { + printf("Failed to connect, return code %d\n", rc); + exit(-1); + } + + // Prepare and publish the message + pubmsg.payload = PAYLOAD; + pubmsg.payloadlen = strlen(PAYLOAD); + pubmsg.qos = QOS; + pubmsg.retained = 0; + MQTTClient_publishMessage(client, TOPIC, &pubmsg, &token); + printf("Waiting for up to %ld seconds for publication of %s\n" + "on topic %s for client with ClientID: %s\n", + TIMEOUT / 1000, PAYLOAD, TOPIC, CLIENTID); + rc = MQTTClient_waitForCompletion(client, token, TIMEOUT); + printf("Message with delivery token %d delivered\n", token); + + // Disconnect + MQTTClient_disconnect(client, 10000); + MQTTClient_destroy(&client); + return rc; +} diff --git a/Z_MQTT/C/readme.md b/Z_MQTT/C/readme.md new file mode 100644 index 0000000..c5965d3 --- /dev/null +++ b/Z_MQTT/C/readme.md @@ -0,0 +1,14 @@ + +```sh +sudo apt-get update +#sudo apt-get install libpaho-mqtt-dev +sudo apt-get install build-essential git cmake + + + +gcc -o mqtt_publish mqtt_publish.c -lpaho-mqtt3c +./mqtt_publish +#gcc -o mqtt_publish_server mqtt_publish_server.c -lpaho-mqtt3c +#./mqtt_publish_server + +``` \ No newline at end of file diff --git a/Z_MQTT/Node-Red/Docker-compose.yml b/Z_MQTT/Node-Red/Docker-compose.yml new file mode 100644 index 0000000..e84d311 --- /dev/null +++ b/Z_MQTT/Node-Red/Docker-compose.yml @@ -0,0 +1,55 @@ +version: '3' +services: + node-red: + image: nodered/node-red + ports: + - "1880:1880" + volumes: + - myNodeREDdata:/data + depends_on: + - mqtt-broker + + mqtt-broker: + image: emqx/emqx:5.0.0 + ports: + - "1883:1883" + - "8083:8083" + - "8084:8084" + - "8883:8883" + - "18083:18083" + +volumes: + myNodeREDdata: + +#sudo docker-compose up -d + +#docker run -it -p 1880:1880 -v myNodeREDdata:/data --name mynodered nodered/node-red + +################################# +# version: '3' +# services: +# node-red: +# image: nodered/node-red +# ports: +# - "1880:1880" +# environment: +# - TZ=UTC # Optional: Set timezone if needed +# volumes: +# - ./data:/data # Optional: Mount a volume to persist data + +# version: '3' +# services: +# node-red: +# image: nodered/node-red +# ports: +# - "1880:1880" +# environment: +# - TZ=UTC # Optional: Set timezone if needed +# volumes: +# - ./data:/data # Optional: Mount a volume to persist data + + # mqtt-broker: + # image: emqx/emqx + # ports: + # - "1883:1883" + diff --git a/Z_MQTT/Python/Python_push_distribution.py b/Z_MQTT/Python/Python_push_distribution.py new file mode 100644 index 0000000..ae7e650 --- /dev/null +++ b/Z_MQTT/Python/Python_push_distribution.py @@ -0,0 +1,30 @@ +import time +import random +import math +import paho.mqtt.publish as publish + +# MQTT Broker (EMQX) details +#broker_address = "broker.emqx.io" #local broker +broker_address = "192.168.3.200" #local network broker +port = 1883 +topic = "python/mqtt" + +while True: + # Generate a random value based on normal distribution + mean = 25 # Mean of the distribution + std_dev = 10 # Standard deviation of the distribution + value = random.normalvariate(mean, std_dev) + value = max(0, min(50, value)) # Ensure value is within [0, 50] range + + # Message to publish + message = str(value) + + # Publish the message + publish.single(topic, message, hostname=broker_address, port=port) + + print(f"Message Published: {message}") + + # Wait for 1 second + time.sleep(0.1) + +#python3 Python_push_distribution.py \ No newline at end of file diff --git a/Z_MQTT/Readme.md b/Z_MQTT/Readme.md new file mode 100644 index 0000000..e69de29 diff --git a/_posts/2021-07-21-getting-started.md b/_posts/2021-07-21-getting-started.md index 3d9b2ba..7669d08 100644 --- a/_posts/2021-07-21-getting-started.md +++ b/_posts/2021-07-21-getting-started.md @@ -101,12 +101,10 @@ You can also use it with the Cloud: | **Apache Superset** | Yes | Open-source data visualization and data exploration platform. Supports SQL querying. Customizable and extensible | Requires technical knowledge for setup and customization. May have performance issues with very large datasets | | **Kibana** | Yes | Part of the Elastic Stack, excellent for visualizing Elasticsearch data. Great for log and time-series analytics. Real-time data visualization | Primarily tailored to Elasticsearch data. Can be complex to configure and optimize. Less versatile for non-Elasticsearch data | | **KNIME** | Yes | User-friendly, visual data pipeline design. Extensive plugin ecosystem. Good for non-programmers. Strong in data preprocessing and analysis | Can be less intuitive for complex, custom data analysis. Performance issues with very large datasets | -| **Tableau** | No | Exceptional data visualization capabilities. Intuitive and user-friendly. Strong in business intelligence | Expensive. Not open source. More focused on visualization than data modeling | | **Python Libraries** (e.g., pandas, scikit-learn) | Yes | Highly flexible and powerful. Huge ecosystem and community. Ideal for custom, complex analysis | Requires programming knowledge. Steeper learning curve for non-programmers | | **R Libraries** (e.g., ggplot2, dplyr) | Yes | Excellent for statistical analysis and data visualization. Large number of packages for various analyses. Strong academic and research community support | Requires programming knowledge. Less intuitive for those unfamiliar with R | -| **Alteryx** | No | Strong in data blending and preparation. Advanced analytics capabilities. Good integration with other tools | Expensive. Not open source. Steeper learning curve | -| **RapidMiner** | No | Comprehensive data science platform. Good for machine learning and predictive modeling. User-friendly with a visual approach | Free version is limited. Can be expensive for the full version. Steep learning curve for advanced features | -| **QlikView/Qlik Sense** | No | Powerful for interactive data discovery and BI. Flexible and customizable. Good data integration | Can be expensive. Steeper learning curve compared to some competitors. Not open source | + +* **Others**: Grafana, Redash, Node-Red