Skip to content

Commit

Permalink
Merge pull request #37 from lexara-prime-ai/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
lexara-prime-ai authored Jun 4, 2024
2 parents af8502c + 9d3fa0e commit a63a4b5
Show file tree
Hide file tree
Showing 5 changed files with 182 additions and 87 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
.terraform
*.tfstate
.env
service_account.json
# Added by cargo

/target
Expand Down
9 changes: 6 additions & 3 deletions hyper/hyper/constants.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
class CONSTANTS:
FILE_PATH = './tableau/data'
FILE_NAME = 'wspr_spots.csv'
FILE_PATH = "./tableau/data"
FILE_NAME = "wspr_spot_data.csv"
FULL_PATH = FILE_PATH + "/" + FILE_NAME
SCOPES = ["https://www.googleapis.com/auth/drive"]
SERVICE_ACCOUNT_FILE = "service_account.json"
PARENT_FOLDER_ID = "1uVnscmoxu91XT1LMnZoOOn3iCTirlEII"
33 changes: 33 additions & 0 deletions hyper/hyper/drive.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import constants
from google.oauth2 import service_account
from googleapiclient.discovery import build


def authenticate():
try:
print("\n[Authenticating] <wspr_cdk> service user...")

credentials = service_account.Credentials.from_service_account_file(
constants.SERVICE_ACCOUNT_FILE, scopes=constants.SCOPES
)

return credentials
except Exception as e:
print("\n[ERROR] -> Failed to [Authenticate] <wspr_cdk> service user: \n", e)


def upload_to_drive(file_path):
try:
credentials = authenticate()
service = build("drive", "v3", credentials=credentials)

file_metadata = {
"name": "wspr_spot_data",
"parents": [constants.PARENT_FOLDER_ID],
}

print("[Uploading] file to Google Drive...\n")

service.files().create(body=file_metadata, media_body=file_path).execute()
except Exception as e:
print("\n[ERROR] -> Failed to upload to Google Drive: \n", e)
158 changes: 101 additions & 57 deletions hyper/hyper/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,77 +2,121 @@
import csv
import os

import python_wrapper.python_wrapper
import constants
import drive

import python_wrapper.python_wrapper


class Server:
def __init__(self):
self.write_path = os.path.join(
constants.CONSTANTS.FILE_PATH, 'wspr_spot_data.csv')
self.write_path = os.path.join(constants.FILE_PATH, "wspr_spot_data.csv")

async def write_to_csv(self):
"""
Args: self.
return type: ()
Args: self.
return type: ()
"""
output = await python_wrapper.python_wrapper.get_wspr_spots("10", "JSON")
data = output.get_data()

# Display data that's being fetched for [DEBUG] purposes.
await self.display_data(data)

# Write data to csv. -> <wspr_spots.csv>
write_path = self.write_path
print('\nWrite path: \n', write_path)

# Check if directory exists.
if not os.path.exists(constants.CONSTANTS.FILE_PATH):
os.makedirs(constants.CONSTANTS.FILE_PATH)

with open(write_path, mode='w', newline='') as file:
writer = csv.writer(file)
writer.writerow([
'ID', 'Time', 'Band', 'RX Sign', 'RX Lat', 'RX Lon', 'RX Loc',
'TX Sign', 'TX Lat', 'TX Lon', 'TX Loc', 'Distance', 'Azimuth',
'RX Azimuth', 'Frequency', 'Power', 'SNR', 'Drift', 'Version', 'Code'
])

for record in data:
writer.writerow([
record['id'], record['time'], record['band'], record['rx_sign'],
record['rx_lat'], record['rx_lon'], record['rx_loc'], record['tx_sign'],
record['tx_lat'], record['tx_lon'], record['tx_loc'], record['distance'],
record['azimuth'], record['rx_azimuth'], record['frequency'],
record['power'], record['snr'], record['drift'], record['version'], record['code']
])

try:
output = await python_wrapper.python_wrapper.get_wspr_spots("1000", "JSON")
data = output.get_data()

# Display data that's being fetched for [DEBUG] purposes.
await self.display_data(data)

# Write data to csv. -> <wspr_spots.csv>
write_path = self.write_path
print("\nWrite path: \n", write_path)

# Check if directory exists.
if not os.path.exists(constants.FILE_PATH):
os.makedirs(constants.FILE_PATH)

with open(write_path, mode="w", newline="") as file:
writer = csv.writer(file)
writer.writerow(
[
"ID",
"Time",
"Band",
"RX Sign",
"RX Lat",
"RX Lon",
"RX Loc",
"TX Sign",
"TX Lat",
"TX Lon",
"TX Loc",
"Distance",
"Azimuth",
"RX Azimuth",
"Frequency",
"Power",
"SNR",
"Drift",
"Version",
"Code",
]
)

for record in data:
writer.writerow(
[
record["id"],
record["time"],
record["band"],
record["rx_sign"],
record["rx_lat"],
record["rx_lon"],
record["rx_loc"],
record["tx_sign"],
record["tx_lat"],
record["tx_lon"],
record["tx_loc"],
record["distance"],
record["azimuth"],
record["rx_azimuth"],
record["frequency"],
record["power"],
record["snr"],
record["drift"],
record["version"],
record["code"],
]
)

# Upload [output] file to Google Drive.
drive.upload_to_drive(constants.FULL_PATH)
except Exception as e:
print("An [ERROR] occurred: ", e)

async def display_data(self, data):
"""
Args: self, data -> WsprSpot dict.
return type: ()
Args: self, data -> WsprSpot dict.
return type: ()
"""
for record in data:
id_field = record['id']
time_field = record['time']
band_field = record['band']
rx_sign_field = record['rx_sign']
rx_lat_field = record['rx_lat']
rx_lon_field = record['rx_lon']
rx_loc_field = record['rx_loc']
tx_sign_field = record['tx_sign']
tx_lat_field = record['tx_lat']
tx_lon_field = record['tx_lon']
tx_loc_field = record['tx_loc']
distance_field = record['distance']
azimuth_field = record['azimuth']
rx_azimuth_field = record['rx_azimuth']
frequency_field = record['frequency']
power_field = record['power']
snr_field = record['snr']
drift_field = record['drift']
version_field = record['version']
code_field = record['code']
id_field = record["id"]
time_field = record["time"]
band_field = record["band"]
rx_sign_field = record["rx_sign"]
rx_lat_field = record["rx_lat"]
rx_lon_field = record["rx_lon"]
rx_loc_field = record["rx_loc"]
tx_sign_field = record["tx_sign"]
tx_lat_field = record["tx_lat"]
tx_lon_field = record["tx_lon"]
tx_loc_field = record["tx_loc"]
distance_field = record["distance"]
azimuth_field = record["azimuth"]
rx_azimuth_field = record["rx_azimuth"]
frequency_field = record["frequency"]
power_field = record["power"]
snr_field = record["snr"]
drift_field = record["drift"]
version_field = record["version"]
code_field = record["code"]

# Verify content.
print("\nFetching [ROW] >\n")
Expand Down
68 changes: 41 additions & 27 deletions scripts/bash/python_deps.sh
Original file line number Diff line number Diff line change
@@ -1,34 +1,48 @@
#!/bin/bash

# Update the package list
echo "Updating package list..."
sudo apt-get update

# Install pip if it is not already installed
echo "Checking for pip..."
if ! command -v pip &> /dev/null; then
echo "pip not found. Installing pip..."
sudo apt-get install -y python3-pip
# Check if pip is installed.
if ! command -v pip &>/dev/null; then
echo "pip not found. Installing pip..."
sudo apt-get install -y python3-pip
else
echo "pip is already installed."
echo "pip is already installed."
fi

# Install mkdocs
echo "Installing dependencies [mkdocs], [tableauhyperapi]..."
pip install mkdocs
pip install tableauhyperapi
# Modules that will be installed/upgraded.
modules=("mkdocs" "tableauhyperapi" "google-api-python-client" "google-auth-httplib2" "google-auth-oauthlib")

# Verify installation
echo "Verifying mkdocs installation..."
if python3 -c "import mkdocs" &> /dev/null; then
echo "mkdocs successfully installed."
else
echo "Failed to install mkdocs."
fi
echo "Installing dependencies: ${modules[*]}..."
pip install "${modules[@]}" --upgrade

echo "Verifying tableauhyperapi installation..."
if python3 -c "import tableauhyperapi" &> /dev/null; then
echo "tableauhyperapi successfully installed."
else
echo "Failed to install tableauhyperapi."
fi
# Verify module installation.
verify_installation() {
local module=$1
local import_name=$2
echo "Verifying ${module} installation..."
if python3 -c "import ${import_name}" &>/dev/null; then
echo "${module} successfully installed."
else
echo "Failed to install ${module}."
fi
}

# The following dictionary contains module to import name mappings.
declare -A module_import_map=(
["mkdocs"]="mkdocs"
["tableauhyperapi"]="tableauhyperapi"
["google-api-python-client"]="googleapiclient"
["google-auth-httplib2"]="google_auth_httplib2"
["google-auth-oauthlib"]="google_auth_oauthlib"
)

# Verify installation of each module.
for module in "${!module_import_map[@]}"; do
verify_installation "${module}" "${module_import_map[${module}]}"
done


# An array modules contains the names of all the modules to be installed.
# The pip install command installs or upgrades all the modules listed in the array.
# A dictionary module_import_map maps module names to their respective import names.
# The verify_installation function takes a module name and its import name as arguments, checks if the module can be imported, and prints the appropriate message.
# The script iterates over the module_import_map dictionary to verify the installation of each module using the verify_installation function.

0 comments on commit a63a4b5

Please sign in to comment.