Skip to content

Commit

Permalink
Merge pull request #1 from CMCC-Foundation/test_marketplace
Browse files Browse the repository at this point in the history
Test marketplace
  • Loading branch information
atakeigor authored Oct 15, 2024
2 parents f8d0f2c + a219d13 commit aa1c99b
Show file tree
Hide file tree
Showing 101 changed files with 21,886 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .cdsapirc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
url: https://cds-beta.climate.copernicus.eu/api
key: None
29 changes: 29 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: ci
on:
push:
branches:
- main
permissions:
contents: write
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Configure Git Credentials
run: |
git config user.name atakeigor
git config user.email [email protected]
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
- uses: actions/cache@v4
with:
key: mkdocs-material-${{ env.cache_id }}
path: .cache
restore-keys: |
mkdocs-material-
- run: pip install mkdocs-material
- working-directory: ./documentation
run: mkdocs gh-deploy --force
69 changes: 69 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# system files
.DS_Store
src/preprocessing/__pycache__/
src/utils/__pycache__/
src/postprocessing/__pycache__/
src/preprocessing/extract/bin/
src/__pycache__/
src/plot/__pycache__/
__pycache__/main.cpython-311.pyc
src/download/__pycache__/__init__.cpython-311.pyc
src/download/__pycache__/download_copernicus_parser.cpython-311.pyc
src/download/__pycache__/download_era5_parser.cpython-311.pyc
# log files
compile.log
medslik_run.log
# folders and files used in other branches
src/model/bin/
src/model/mod/
src/model/test/*.o
src/model/test/*.exe
src/model/test/*.mod
src/model/obj/
src/utils/jday/bin/
src/model/test/data/test_envdata_20170423.nc
src/model/test/data/test_envdata_20170424.nc
src/model/test/data/test_envdata_20170425.nc
fort.91
output
# data not uploaded
data/gebco/GEBCO_2023.nc
data/gshhs/GSHHS_shp/c/*
data/gshhs/GSHHS_shp/f/*
data/gshhs/GSHHS_shp/h/*
data/gshhs/GSHHS_shp/i/*
data/gshhs/GSHHS_shp/l/*
data/gshhs/f/GSHHS_f_L1.shp
data/gshhs/f/GSHHS_f_L1.shx
# algeria test case
cases/algeria/out_files/
cases/algeria/bnc_files/dtmcst1.d
cases/algeria/xp_files/
cases/algeria/bnc_files/
cases/algeria/oce_files/
cases/algeria/met_files/
cases/algeria/*.log
cases/algeria/detections
# paria test case
cases/paria/out_files/oil_concentration.nc
cases/parameters/*
cases/paria/out_files
cases/paria/xp_files
cases/paria/*.log
cases/paria/bnc_files/dtmcst1.d
cases/paria/detections
# lebanon test case
cases/lebanon/out_files/
cases/lebanon/bnc_files/dtmcst1.d
cases/lebanon/xp_files/
cases/lebanon/*.log
cases/lebanon/detections
# Syria test case
cases/syria/out_files/
cases/syria/bnc_files/dtmcst1.d
cases/syria/xp_files/
cases/syria/bnc_files/
cases/syria/oce_files/
cases/syria/met_files/
cases/syria/*.log
cases/syria/detections
146 changes: 146 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
# THIS FILE CONTAINS THE NECESSARY ELEMENTS TO RUN MEDSLIK-II in a container
FROM continuumio/miniconda3

# Set the working directory
WORKDIR /app

COPY requirements_conda.txt .

COPY requirements.txt .

# Create a Conda environment with a specific Python version
RUN conda create -n medslik --file ./requirements_conda.txt

# Make RUN commands use the new environment
SHELL ["conda", "run", "-n", "medslik", "/bin/bash", "-c"]

RUN conda install -n medslik pip

# Install pip packages from the requirements.txt file
RUN pip install -r ./requirements.txt

#Set up the Home dor as the Medslik Directory
WORKDIR /Medslik-II

# Create a non-privileged user that the app will run under.
# See https://docs.docker.com/go/dockerfile-user-best-practices/
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/Medslik-II" \
--uid "${UID}" \
appuser

#Allow appuser to navigate and read scripts
RUN chown -R appuser:appuser /Medslik-II

# Download dependencies as a separate step to take advantage of Docker's caching.
# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds.
# Leverage a bind mount to requirements.txt to avoid having to copy them into
# into this layer.
# RUN --mount=type=cache,target=/root/.cache/pip \
# --mount=type=bind,source=requirements.txt,target=requirements.txt \
# python -m pip install -r requirements.txt

RUN apt-get update \
&& apt-get install -yq --no-install-recommends \
build-essential \
curl \
fuse \
gfortran \
g++ \
git \
gnupg \
gnupg2 \
keychain \
libcurl4-openssl-dev \
libfuse-dev \
liblapack-dev \
libssl-dev \
locate \
lsb-release \
make \
m4 \
nano \
rsync \
tzdata \
tini \
unzip \
vim \
wget \
zip

# build netcdf with gcc and g-fortran
ENV CC=gcc
ENV FC=gfortran

# set library location
ENV PREFIXDIR=/usr/local

WORKDIR /

## get zlib
ENV ZLIB_VERSION=zlib-1.3.1
RUN wget https://zlib.net/${ZLIB_VERSION}.tar.gz && tar -xvzf ${ZLIB_VERSION}.tar.gz
RUN cd ${ZLIB_VERSION} \
&& ./configure --prefix=${PREFIXDIR} \
&& make install
WORKDIR /
RUN rm -rf ${ZLIB_VERSION}.tar.gz ${ZLIB_VERSION}

## get hdf5-1.8
ENV HDF518_VERSION=hdf5-1.8.21
RUN wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8/${HDF518_VERSION}/src/${HDF518_VERSION}.tar.gz && tar -xvzf ${HDF518_VERSION}.tar.gz
RUN cd ${HDF518_VERSION} \
&& ./configure --with-zlib=${PREFIXDIR} --prefix=${PREFIXDIR} --enable-hl --enable-shared\
&& make \
&& make install
WORKDIR /
RUN rm -rf ${HDF518_VERSION}.tar.gz ${HDF518_VERSION}

## get netcdf-c
ENV NETCDFC_VERSION=4.8.0
RUN wget https://github.com/Unidata/netcdf-c/archive/v${NETCDFC_VERSION}.tar.gz && tar -xvzf v${NETCDFC_VERSION}.tar.gz
RUN cd netcdf-c-${NETCDFC_VERSION} \
&& CPPFLAGS=-I${PREFIXDIR}/include LDFLAGS=-L${PREFIXDIR}/lib ./configure --prefix=${PREFIXDIR} --enable-netcdf-4 --enable-shared --enable-dap \
&& make install
WORKDIR /
RUN rm -rf v${NETCDFC_VERSION}.tar.gz netcdf-fortran-${NETCDFC_VERSION}

# Set these flags because some NETCDF libraries do not allow some numerical warnings. As they are not error, these f;ags do not interfer with the results
ENV FCFLAGS="-w -fallow-argument-mismatch -O2"
ENV FFLAGS="-w -fallow-argument-mismatch -O2"

## get netcdf-fortran
ENV NETCDFFORTRAN_VERSION=4.6.0
RUN wget https://github.com/Unidata/netcdf-fortran/archive/v${NETCDFFORTRAN_VERSION}.tar.gz && tar -xvzf v${NETCDFFORTRAN_VERSION}.tar.gz
RUN cd netcdf-fortran-${NETCDFFORTRAN_VERSION} \
&& CPPFLAGS=-I${PREFIXDIR}/include LDFLAGS=-L${PREFIXDIR}/lib ./configure --prefix=${PREFIXDIR} \
# && make check \
&& make install
WORKDIR /
RUN rm -rf v${NETCDFFORTRAN_VERSION}.tar.gz netcdf-fortran-${NETCDFFORTRAN_VERSION}

#Create a env variable to indicate where the netcdf libaries are saved. Otherwise, executables are not able to run
ENV LD_LIBRARY_PATH=/usr/local/lib

# Switch to the non-privileged user to run the application.
USER appuser

#Change to home directory again
WORKDIR /Medslik-II

RUN echo "source activate medslik" > ~/.bashrc

ENV PATH /opt/conda/envs/medslik/bin:$PATH

# Copy the source code into the container.
COPY --chown=appuser:appuser . .

EXPOSE 8501

HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health

# Run the application.
ENTRYPOINT ["streamlit", "run", "interface/WITOIL_on_Cloud.py","--server.port=8501", "--server.address=0.0.0.0"]
46 changes: 46 additions & 0 deletions cases/syria/config_syria.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
[simulation]
name = "syria" # name of the simulation.
experiment_path = "cases/" # path where experiment folder should be placed
start_datetime = 2021-08-22T13:43:00 # start date of the simulation
sim_length = 24.0 # length of the simulation in HOURS
spill_lat = [35.25] # lat of oil spill
spill_lon = [35.90] # lon of oil spill
spill_duration = [0.0] # duration of oil spill HOURS. = 0 for instantaneous release
spill_rate = [27.78] # spill rate TONS/HOUR
slick_age = [0.0] # age of oil slick in HOURS
oil = [28] # either api (number) of the oil or name (string), e.g. ["Ragusa"]. considering that name must be exact
area_spill = true
area_vertex = false #comprehends thre levels of lists. 1st: all slicks. 2nd: individual slick. 3rd: Coordinates of each vertex in each individual slick
multiple_slick = false
advanced_parameters = false # user must provide parameters.toml file
advanced_parameters_path = "" # this path shuld be provided only if "advanced_parameters" is = true
[download]
download_data = true
download_curr = true
download_wind = true
copernicus_user = "my_username"
copernicus_password = "my_password"
[input_files]
set_domain = false
delta = [0.75] # default distance in degrees to download or crop data in case lat and lon areas are not provided
lat = [9, 12]
lon = [-62, -65]
[input_files.dtm]
bathymetry_path = "data/gebco/GEBCO_2023.nc" # GEBCO 2023 bathymetry file
coastline_path = "data/gshhs/f/GSHHS_f_L1.shp" # coastline shapefile gshhs
[input_files.metoce]
oce_data_path = "path/to/oce/data"
met_data_path = "path/to/met/data"
[input_files.shapefile]
shape_path = "cases/syria/result.shp" # add "path/to/shapefile" in case you want to start from shapefile
[run_options]
preprocessing = true
preprocessing_metoce = true
preprocessing_dtm = true
run_model = true
postprocessing = true # conversion from particles to concentration
[plot_options]
plotting = true
# Domain for plotting
plot_lon = [35.75,36] # [min lon, max lon]
plot_lat = [35.1,35.4] # [min lat, max lat]]
19 changes: 19 additions & 0 deletions compile.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# set folders
DIR_EXE="$PWD/src/model/bin/"
DIR_SRC="$PWD/src/model/"
NETCDF="$CONDA_PREFIX/"
# print message
echo "============================="
echo " MEDSLIK-II SOFTWARE ... "
echo "============================="
echo "MODEL FOLDER $DIR_SRC"
echo "EXEC FOLDER $DIR_EXE"
echo "NETCDF LIBRARIES TAKEN FROM $NETCDF"
echo "============================="
# compile model
echo " -- Compiling model ..."
mkdir -p "$DIR_EXE"
gfortran -I"$NETCDF/include" -L"$NETCDF/lib" "$DIR_SRC/simulation.for" -lnetcdf -lnetcdff -o "$DIR_EXE/simulation.exe"
# print message
echo "============================="
echo "END SUCCESSFULLY"
49 changes: 49 additions & 0 deletions config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# NOTE: this config is provided as a TEMPLATE, it should be changed by the user before using it.
[simulation]
name = "my_experiment" # name of the simulation.
experiment_path = "path/to/user/output/folder" # path where experiment folder should be placed
start_datetime = 2021-08-21T03:43:00 # start date of the simulation
sim_length = 48.0 # length of the simulation in HOURS
spill_lat = [35.25] # lat of oil spill (deg N)
spill_lon = [35.90] # lon of oil spill (deg E)
spill_duration = [0.0] # duration of oil spill HOURS. = 0.0 for instantaneous release
spill_rate = [27.78] # spill rate TONS/HOUR
slick_age = [0.0] # age of oil slick in HOURS
oil = [28] # either oil api (number) or name (string), e.g. ["Ragusa"]. Please, consider that name must be exact.
area_spill = true
area_vertex = false # comprehends thre levels of lists. 1st: all slicks. 2nd: individual slick. 3rd: Coordinates of each vertex in each individual slick
multiple_slick = false
advanced_parameters = false # if = true, user must provide parameters.toml file
advanced_parameters_path = "src/parameters.toml" # this path shuld be provided only if "advanced_parameters" is = true
[download]
download_data = false # = true if data are not provided by the user
download_curr = false # = true : OCE data are downloaded from Copernicus Marine Service
download_wind = false # = true : MET data are downloaded from ECMWF ERA5 product
copernicus_user = "my_username"
copernicus_password = "my_password"
[input_files]
set_domain = true # If the user wants to set the domain for cropping/preprocessing input data
lat = [31, 38]
lon = [32, 37]
delta = [0.75] # default domain length in degrees (applied to both lon/lat), to download or crop data
# note: delta is used only if set_domain = false
[input_files.dtm]
bathymetry_path = "data/gebco/GEBCO_2023.nc" # GEBCO 2023 bathymetry file
coastline_path = "data/gshhs/f/GSHHS_f_L1.shp" # coastline shapefile gshhs
[input_files.metoce]
oce_data_path = "path/to/oce/data" # to provide if dowload_curr = false
met_data_path = "path/to/met/data" # to provide if dowload_wind = false
[input_files.shapefile]
shape_path = "path/to/shapefile" # add "path/to/shapefile" in case you want to start from shapefile.
# set shape_path = false or "none" if you do not want to start from a shapefile
[run_options]
preprocessing = true # = false if no preprocessing at all should be performed
preprocessing_metoce = true # MET/OCE data preprocessing
preprocessing_dtm = true # bathymetry and coastline preprocessing
run_model = true # run oil spill model
postprocessing = true # conversion from particles to concentration
[plot_options]
plotting = true # = true if results should be plotted (it should be used only if postprocessing = true)
# Domain for plotting
plot_lon = [35.5,36.5] # [min lon, max lon]
plot_lat = [35,36] # [min lat, max lat]]
Empty file added data/COPERNICUS/.gitkeep
Empty file.
Empty file added data/ERA5/.gitkeep
Empty file.
Empty file added data/gebco/.gitkeep
Empty file.
Empty file added data/gshhs/.gitkeep
Empty file.
Loading

0 comments on commit aa1c99b

Please sign in to comment.