Skip to content

Commit

Permalink
Merge pull request #15 from NREL/use_prettyPlot
Browse files Browse the repository at this point in the history
remove plots util and use pretty plot instead
  • Loading branch information
malihass authored Nov 28, 2023
2 parents 1142fd5 + 1259c92 commit 2db6c23
Show file tree
Hide file tree
Showing 23 changed files with 102 additions and 1,066 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ jobs:
format utils true
- name: Normalizing flow test
run: |
python main_iterative.py tests/input_test
python main_iterative.py -i tests/input_test
- name: Bins test
run: |
python main_iterative.py tests/input_test_bins
python main_iterative.py -i tests/input_test_bins
- name: Parallel normalizing flow test
run: |
mpiexec -np 2 python main_iterative.py tests/input_test
mpiexec -np 2 python main_iterative.py -i tests/input_test
3 changes: 1 addition & 2 deletions data-efficientML/artificialCase/trainGP.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@
import os
import warnings

from myProgressBar import printProgressBar
from plotsUtil import *
from sklearn.gaussian_process.kernels import RBF
from sklearn.gaussian_process.kernels import ConstantKernel as C
from sklearn.gaussian_process.kernels import WhiteKernel
from prettyPlot.progressBar import print_progress_bar


def partitionData(nData, nBatch):
Expand Down
11 changes: 6 additions & 5 deletions data-efficientML/artificialCase/trainNN.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@
# NN Stuff
import tensorflow as tf
from myNN_better import *
from myProgressBar import printProgressBar
from plotsUtil import *
from sklearn.model_selection import train_test_split
from tensorflow import keras
from tensorflow.keras import layers, regularizers

from prettyPlot.progressBar import print_progress_bar
from parallel import irank, iroot

def partitionData(nData, nBatch):
# ~~~~ Partition the data across batches
Expand All @@ -38,23 +37,25 @@ def getPrediction(model, data):
ApproxBatchSize = 10000
nBatch = max(int(round(nPoints / ApproxBatchSize)), 1)
nData_b, startData_b = partitionData(nPoints, nBatch)
printProgressBar(
print_progress_bar(
0,
nBatch,
prefix="Eval " + str(0) + " / " + str(nBatch),
suffix="Complete",
length=50,
extraCond=(irank==iroot),
)
for ibatch in range(nBatch):
start_ = startData_b[ibatch]
end_ = startData_b[ibatch] + nData_b[ibatch]
result[start_:end_] = np.squeeze(model(data[start_:end_]))
printProgressBar(
print_progress_bar(
ibatch + 1,
nBatch,
prefix="Eval " + str(ibatch + 1) + " / " + str(nBatch),
suffix="Complete",
length=50,
extraCond=(irank==iroot),
)

return result
Expand Down
6 changes: 3 additions & 3 deletions data-efficientML/artificialCase/util/myNN_better.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@

import numpy as np
import tensorflow as tf
from myProgressBar import printProgressBar
from tensorflow.keras import backend as K
from tensorflow.keras import layers, losses, optimizers, regularizers
from tensorflow.keras.callbacks import CSVLogger
from tensorflow.keras.constraints import max_norm, unit_norm
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model
from prettyPlot.progressBar import print_progress_bar


@tf.function
Expand Down Expand Up @@ -72,7 +72,7 @@ def train(
dsOptimizer = optimizers.Adam(learning_rate=lr)

# Train
printProgressBar(
print_progress_bar(
0,
epochs,
prefix="Loss=%s Epoch= %d / %d " % ("?", 0, epochs),
Expand All @@ -99,7 +99,7 @@ def train(
train_mse_loss += tf.reduce_sum(mse)
nSample += mse.shape[0]

printProgressBar(
print_progress_bar(
epoch + 1,
epochs,
prefix="Loss=%.2f Epoch= %d / %d "
Expand Down
37 changes: 0 additions & 37 deletions data-efficientML/artificialCase/util/myProgressBar.py

This file was deleted.

Loading

0 comments on commit 2db6c23

Please sign in to comment.