Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/JAlcocerT/RPi
Browse files Browse the repository at this point in the history
  • Loading branch information
JAlcocerT committed Feb 26, 2024
2 parents a6d2266 + 16899a9 commit bce7d1e
Show file tree
Hide file tree
Showing 32 changed files with 754 additions and 16 deletions.
50 changes: 50 additions & 0 deletions Z_MQTT/C/mqtt_publish_server.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#include <stdio.h>
#include <stdlib.h>
#include "MQTTClient.h"

#define ADDRESS "tcp://192.168.3.200:1883"
#define CLIENTID "ExampleClientPub"
#define TOPIC "c/mqtt"
#define PAYLOAD "Hello from C!"
#define QOS 1
#define TIMEOUT 10000L

// int main(int argc, char* argv[]) {
// // MQTTClient declaration and other code for connecting, publishing, and disconnecting
// }

int main(int argc, char* argv[]) {
MQTTClient client;
MQTTClient_connectOptions conn_opts = MQTTClient_connectOptions_initializer;
MQTTClient_message pubmsg = MQTTClient_message_initializer;
MQTTClient_deliveryToken token;
int rc;

// Initialize the client
MQTTClient_create(&client, ADDRESS, CLIENTID, MQTTCLIENT_PERSISTENCE_NONE, NULL);
conn_opts.keepAliveInterval = 20;
conn_opts.cleansession = 1;

// Connect to the MQTT broker
if ((rc = MQTTClient_connect(client, &conn_opts)) != MQTTCLIENT_SUCCESS) {
printf("Failed to connect, return code %d\n", rc);
exit(-1);
}

// Prepare and publish the message
pubmsg.payload = PAYLOAD;
pubmsg.payloadlen = strlen(PAYLOAD);
pubmsg.qos = QOS;
pubmsg.retained = 0;
MQTTClient_publishMessage(client, TOPIC, &pubmsg, &token);
printf("Waiting for up to %ld seconds for publication of %s\n"
"on topic %s for client with ClientID: %s\n",
TIMEOUT / 1000, PAYLOAD, TOPIC, CLIENTID);
rc = MQTTClient_waitForCompletion(client, token, TIMEOUT);
printf("Message with delivery token %d delivered\n", token);

// Disconnect
MQTTClient_disconnect(client, 10000);
MQTTClient_destroy(&client);
return rc;
}
14 changes: 14 additions & 0 deletions Z_MQTT/C/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

```sh
sudo apt-get update
#sudo apt-get install libpaho-mqtt-dev
sudo apt-get install build-essential git cmake



gcc -o mqtt_publish mqtt_publish.c -lpaho-mqtt3c
./mqtt_publish
#gcc -o mqtt_publish_server mqtt_publish_server.c -lpaho-mqtt3c
#./mqtt_publish_server

```
45 changes: 45 additions & 0 deletions Z_MQTT/Python/DHT_to_MQTT.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import adafruit_dht
import time
import os
import paho.mqtt.client as mqtt

# Set default sensor type to DHT22
dht_sensor_type = os.environ.get('DHT_SENSOR_TYPE', 'DHT22')

if dht_sensor_type == 'DHT11':
DHT_SENSOR = Adafruit_DHT.DHT11
elif dht_sensor_type == 'DHT22':
DHT_SENSOR = Adafruit_DHT.DHT22
else:
print(f"Invalid DHT sensor type '{dht_sensor_type}'. Defaulting to DHT22.")
DHT_SENSOR = Adafruit_DHT.DHT22

DHT_PIN = int(os.environ.get('DHT_PIN', '4'))

# Configure MQTT connection parameters
mqtt_broker = os.environ.get('MQTT_BROKER', '192.168.3.200')
mqtt_port = int(os.environ.get('MQTT_PORT', '1883'))
mqtt_topic_temp = os.environ.get('MQTT_TOPIC_TEMP', 'sensor/temperature')
mqtt_topic_hum = os.environ.get('MQTT_TOPIC_HUM', 'sensor/humidity')
mqtt_username = os.environ.get('MQTT_USERNAME', '')
mqtt_password = os.environ.get('MQTT_PASSWORD', '')

# Initialize MQTT client and connect to the broker
client = mqtt.Client()
if mqtt_username and mqtt_password:
client.username_pw_set(mqtt_username, mqtt_password)
client.connect(mqtt_broker, mqtt_port, 60)

while True:
humidity, temperature = Adafruit_DHT.read_retry(DHT_SENSOR, DHT_PIN)
if humidity is not None and temperature is not None:
# Publish temperature and humidity to their respective topics
client.publish(mqtt_topic_temp, '{:.2f}'.format(temperature))
client.publish(mqtt_topic_hum, '{:.2f}'.format(humidity))
print("Temperature sent to MQTT topic: {}".format(mqtt_topic_temp))
print("Humidity sent to MQTT topic: {}".format(mqtt_topic_hum))
else:
print("Sensor failure. Check wiring.")
time.sleep(5)

#python3 DHT_to_MQTT.py
55 changes: 55 additions & 0 deletions Z_MQTT/Python/MQTT_to_Streamlit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import streamlit as st
import paho.mqtt.client as mqtt
import time

# Define the MQTT client
client = mqtt.Client()

# MQTT Settings
MQTT_BROKER = "192.168.3.200" # Example broker, replace with your broker's address
MQTT_PORT = 1883
MQTT_TOPIC = "python/mqtt"

# Callback when connecting to the MQTT broker
def on_connect(client, userdata, flags, rc):
if rc == 0:
print("Connected to MQTT Broker!")
else:
print("Failed to connect, return code %d\n", rc)

# Callback when receiving a message from the MQTT broker
def on_message(client, userdata, msg):
message = msg.payload.decode()
st.session_state['last_message'] = message
print(f"Received `{message}` from `{msg.topic}` topic")

client.on_connect = on_connect
client.on_message = on_message

client.connect(MQTT_BROKER, MQTT_PORT, 60)

# Subscribe to the topic
client.subscribe(MQTT_TOPIC)

# Start the loop in another thread
client.loop_start()

# Streamlit app
st.title('MQTT Streamlit Real-time Data Viewer')

# Initialize session state
if 'last_message' not in st.session_state:
st.session_state['last_message'] = "Waiting for data..."

# Display the last message
st.write(f"Last message: {st.session_state['last_message']}")

# Use a button to update messages manually (for demonstration)
if st.button('Update'):
st.write(f"Last message: {st.session_state['last_message']}")

# Stop the loop before exiting
st.stop()
client.loop_stop()

#python3 MQTT_to_Streamlit.py
30 changes: 30 additions & 0 deletions Z_MQTT/Python/Python_push_distribution.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import time
import random
import math
import paho.mqtt.publish as publish

# MQTT Broker (EMQX) details
#broker_address = "broker.emqx.io" #local broker
broker_address = "192.168.3.200" #local network broker
port = 1883
topic = "python/mqtt"

while True:
# Generate a random value based on normal distribution
mean = 25 # Mean of the distribution
std_dev = 10 # Standard deviation of the distribution
value = random.normalvariate(mean, std_dev)
value = max(0, min(50, value)) # Ensure value is within [0, 50] range

# Message to publish
message = str(value)

# Publish the message
publish.single(topic, message, hostname=broker_address, port=port)

print(f"Message Published: {message}")

# Wait for 1 second
time.sleep(0.5)

#python3 Python_push_distribution.py
6 changes: 6 additions & 0 deletions Z_MQTT/Python/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

* <https://pypi.org/project/paho-mqtt/>

```sh
pip install paho-mqtt
```
Empty file added Z_MQTT/Readme.md
Empty file.
Empty file added Z_Micros/readme.md
Empty file.
19 changes: 19 additions & 0 deletions Z_SelfHosting/Emqx/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
FROM erlang:latest

# Set working directory
WORKDIR /app

# Copy your Erlang application code into the container
COPY . /app

# Install any additional dependencies or packages if needed
#RUN apt-get update && apt-get install -y <package-name>

# Run your Erlang application
CMD ["erl"]


#docker build -t my-erlang-app .

#docker run -it my-erlang-app
#erl -version
22 changes: 22 additions & 0 deletions Z_SelfHosting/Emqx/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@



```Dockerfile
FROM erlang:latest

# Set working directory
WORKDIR /app

# Copy your Erlang application code into the container
COPY . /app

# Install any additional dependencies or packages if needed
RUN apt-get update && apt-get install -y <package-name>

# Run your Erlang application
CMD ["erl"]
```

```sh
docker build -t my-erlang-app .
```
Empty file added Z_SelfHosting/Grafana/readme.md
Empty file.
17 changes: 17 additions & 0 deletions Z_SelfHosting/Home_Assistant/Docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
version: "2.1"
services:
homeassistant:
image: lscr.io/linuxserver/homeassistant:latest
container_name: homeassistant
#network_mode: host
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Rome
volumes:
- ~/Docker/HomeAssistant:/config
ports:
- 8123:8123 #optional
#devices:
# - /path/to/device:/path/to/device #optional
restart: unless-stopped
3 changes: 3 additions & 0 deletions Z_SelfHosting/Home_Assistant/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

* <https://jalcocert.github.io/RPi/posts/rpi-iot-dht11-influxdb/#integrating-home-assistant-with-influxdb>
* <https://jalcocert.github.io/RPi/posts/rpi-mqtt/>
55 changes: 55 additions & 0 deletions Z_SelfHosting/Node-Red/Docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
version: '3'
services:
node-red:
image: nodered/node-red
ports:
- "1880:1880"
volumes:
- myNodeREDdata:/data
depends_on:
- mqtt-broker

mqtt-broker:
image: emqx/emqx:5.0.0
ports:
- "1883:1883"
- "8083:8083"
- "8084:8084"
- "8883:8883"
- "18083:18083"

volumes:
myNodeREDdata:

#sudo docker-compose up -d

#docker run -it -p 1880:1880 -v myNodeREDdata:/data --name mynodered nodered/node-red

#################################
# version: '3'
# services:
# node-red:
# image: nodered/node-red
# ports:
# - "1880:1880"
# environment:
# - TZ=UTC # Optional: Set timezone if needed
# volumes:
# - ./data:/data # Optional: Mount a volume to persist data

# version: '3'
# services:
# node-red:
# image: nodered/node-red
# ports:
# - "1880:1880"
# environment:
# - TZ=UTC # Optional: Set timezone if needed
# volumes:
# - ./data:/data # Optional: Mount a volume to persist data

# mqtt-broker:
# image: emqx/emqx
# ports:
# - "1883:1883"

Empty file added Z_SelfHosting/Readme.md
Empty file.
27 changes: 27 additions & 0 deletions Z_SelfHosting/Redash/Readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

* <https://github.com/getredash/redash>
* <https://redash.io/help/data-sources/querying/supported-data-sources>

* <https://hub.docker.com/r/redash/redash/tags?page=1>


https://github.com/getredash/redash


```sh
# git clone https://github.com/getredash/redash
# cd redash
# cd setup

git clone https://github.com/getredash/setup
cd setup
chmod +x setup.sh
./setup.sh
```

* <https://raw.githubusercontent.com/getredash/setup/master/setup.sh>
* <https://github.com/getredash/setup/blob/master/data/docker-compose.yml>

`localhost:5000`

https://redash.io/help/user-guide/visualizations
2 changes: 2 additions & 0 deletions Z_SelfHosting/Superset/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
## Presto

6 changes: 2 additions & 4 deletions _posts/2021-07-21-getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,10 @@ You can also use it with the Cloud:
| **Apache Superset** | Yes | Open-source data visualization and data exploration platform. Supports SQL querying. Customizable and extensible | Requires technical knowledge for setup and customization. May have performance issues with very large datasets |
| **Kibana** | Yes | Part of the Elastic Stack, excellent for visualizing Elasticsearch data. Great for log and time-series analytics. Real-time data visualization | Primarily tailored to Elasticsearch data. Can be complex to configure and optimize. Less versatile for non-Elasticsearch data |
| **KNIME** | Yes | User-friendly, visual data pipeline design. Extensive plugin ecosystem. Good for non-programmers. Strong in data preprocessing and analysis | Can be less intuitive for complex, custom data analysis. Performance issues with very large datasets |
| **Tableau** | No | Exceptional data visualization capabilities. Intuitive and user-friendly. Strong in business intelligence | Expensive. Not open source. More focused on visualization than data modeling |
| **Python Libraries** (e.g., pandas, scikit-learn) | Yes | Highly flexible and powerful. Huge ecosystem and community. Ideal for custom, complex analysis | Requires programming knowledge. Steeper learning curve for non-programmers |
| **R Libraries** (e.g., ggplot2, dplyr) | Yes | Excellent for statistical analysis and data visualization. Large number of packages for various analyses. Strong academic and research community support | Requires programming knowledge. Less intuitive for those unfamiliar with R |
| **Alteryx** | No | Strong in data blending and preparation. Advanced analytics capabilities. Good integration with other tools | Expensive. Not open source. Steeper learning curve |
| **RapidMiner** | No | Comprehensive data science platform. Good for machine learning and predictive modeling. User-friendly with a visual approach | Free version is limited. Can be expensive for the full version. Steep learning curve for advanced features |
| **QlikView/Qlik Sense** | No | Powerful for interactive data discovery and BI. Flexible and customizable. Good data integration | Can be expensive. Steeper learning curve compared to some competitors. Not open source |

* **Others**: Grafana, Redash, Node-Red, JS ([Epoch](https://epochjs.github.io/epoch/real-time/), [Plotly](https://plotly.com/javascript/streaming/), [chartjs](https://nagix.github.io/chartjs-plugin-streaming/1.9.0/))

<!--
![img-description](https://pbs.twimg.com/media/FJAFshwXoAEf9HV?format=jpg&name=large)
Expand Down
Loading

0 comments on commit bce7d1e

Please sign in to comment.