From 86f32157cb1803c271ff7ee579b4d9980b3768f6 Mon Sep 17 00:00:00 2001 From: Christian Biasuzzi Date: Wed, 16 Nov 2016 12:36:09 +0100 Subject: [PATCH 1/7] adds optional case-file parameter to the online itool command --- .../online/OnlineWorkflowParameters.java | 13 ++++++- .../online/OnlineWorkflowImpl.java | 39 ++++++++++++------- .../online/db/OnlineDbMVStore.java | 9 ++++- .../online/tools/ListOnlineWorkflowsTool.java | 6 +++ .../online/tools/OnlineWorkflowCommand.java | 7 ++++ .../online/tools/OnlineWorkflowTool.java | 15 +++++-- 6 files changed, 70 insertions(+), 19 deletions(-) diff --git a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java index 9e3f6b3b..54c5163f 100644 --- a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java +++ b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java @@ -48,6 +48,7 @@ public class OnlineWorkflowParameters implements Serializable { private float limitReduction; private boolean handleViolationsInN; private float constraintMargin; + private String caseFile; public static OnlineWorkflowParameters loadDefault() { ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters"); @@ -68,6 +69,7 @@ public static OnlineWorkflowParameters loadDefault() { float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION); boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DEFAULT_HANDLE_VIOLATIONS_IN_N); float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN); + String caseFile = config.getStringProperty("caseFile", null); return new OnlineWorkflowParameters(baseCaseDate, states, @@ -85,14 +87,15 @@ public static OnlineWorkflowParameters loadDefault() { mergeOptimized, limitReduction, handleViolationsInN, - constraintMargin + constraintMargin, + caseFile ); } public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, - float limitReduction, boolean handleViolationsInN, float constraintMargin) { + float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { Objects.requireNonNull(baseCaseDate); Objects.requireNonNull(histoInterval); Objects.requireNonNull(countries); @@ -114,6 +117,7 @@ public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval hist this.limitReduction = limitReduction; this.handleViolationsInN = handleViolationsInN; this.constraintMargin = constraintMargin; + this.caseFile = caseFile; } public DateTime getBaseCaseDate() { @@ -184,6 +188,8 @@ public float getConstraintMargin() { return constraintMargin; } + public String getCaseFile() { return caseFile; } + @Override public String toString() { return "{baseCaseDate=" + baseCaseDate @@ -203,6 +209,7 @@ public String toString() { + ", limitReduction=" + limitReduction + ", handleViolationsInN=" + handleViolationsInN + ", constraintMargin=" + constraintMargin + + ", caseFile=" + caseFile + "}"; } @@ -274,4 +281,6 @@ public void setConstraintMargin(float constraintMargin) { this.constraintMargin = constraintMargin; } + public void setCaseFile(String caseFile) { this.caseFile = caseFile; } + } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java index b6e32a79..3a4f55b2 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java @@ -8,19 +8,13 @@ package eu.itesla_project.online; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.EnumMap; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; +import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import eu.itesla_project.cases.CaseType; +import eu.itesla_project.iidm.import_.Importers; import org.joda.time.format.DateTimeFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -142,8 +136,19 @@ public OnlineWorkflowImpl( this.rulesFacadeFactory = rulesFacadeFactory; this.parameters = parameters; this.startParameters = startParameters; - this.id = DateTimeFormat.forPattern("yyyyMMdd_HHmm_").print(parameters.getBaseCaseDate())+new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date()); - logger.info(parameters.toString()); + if (parameters.getCaseFile() != null) { + //TODO avoid loading network twice, here and in the start method + // load network, to get its ID and override existing parameters (base-case, countries, case-type) + Network network = Importers.loadNetwork(parameters.getCaseFile()); + if (network == null) { + throw new RuntimeException("Case '" + parameters.getCaseFile() + "' not found"); + } + this.parameters.setBaseCaseDate(network.getCaseDate()); + this.parameters.setCountries(network.getCountries()); + this.parameters.setCaseType((network.getForecastDistance() == 0) ? CaseType.SN : CaseType.FO); + } + this.id = DateTimeFormat.forPattern("yyyyMMdd_HHmm_").print(this.parameters.getBaseCaseDate()) + new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date()); + logger.info(this.parameters.toString()); } @@ -164,8 +169,16 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { for (OnlineApplicationListener l :listeners) l.onWorkflowUpdate(new StatusSynthesis(id,StatusSynthesis.STATUS_RUNNING)); - Network network = MergeUtil.merge(caseRepository, parameters.getBaseCaseDate(), parameters.getCaseType(), parameters.getCountries(), - loadFlowFactory, 0, mergeOptimizerFactory, computationManager, parameters.isMergeOptimized()); + Network network = null; + if (parameters.getCaseFile() != null ) { + network = Importers.loadNetwork(parameters.getCaseFile()); + if (network == null) { + throw new RuntimeException("Case '" + parameters.getCaseFile() + "' not found"); + } + } else { + network = MergeUtil.merge(caseRepository, parameters.getBaseCaseDate(), parameters.getCaseType(), parameters.getCountries(), + loadFlowFactory, 0, mergeOptimizerFactory, computationManager, parameters.isMergeOptimized()); + } logger.info("- Network id: " + network.getId()); logger.info("- Network name: "+ network.getName()); diff --git a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java index 02588bea..87c4f2bf 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java @@ -122,6 +122,7 @@ public class OnlineDbMVStore implements OnlineDb { private static final String STORED_PARAMETERS_LIMIT_REDUCTION_KEY = "limit_reduction"; private static final String STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY = "handle_violations"; private static final String STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY = "constraint_margin"; + private static final String STORED_PARAMETERS_CASE_FILE_KEY = "case_file"; private static final String STORED_STATES_PROCESSING_STATUS_MAP_NAME = "statesProcessingStatus"; private static final String STORED_STATES_LIST_KEY = "states"; private static final String STORED_STATES_STATE_DETAILS_KEY = "stateStatusDetails"; @@ -842,6 +843,8 @@ public void storeWorkflowParameters(String workflowId, OnlineWorkflowParameters storedParametersMap.put(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY, Boolean.toString(parameters.isHandleViolationsInN())); // store merge constraint margin storedParametersMap.put(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY, Float.toString(parameters.getConstraintMargin())); + // store case file name (null if it was not specified) + storedParametersMap.put(STORED_PARAMETERS_CASE_FILE_KEY, parameters.getCaseFile()); wfMVStore.commit(); } @@ -890,6 +893,9 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { float constraintMargin = OnlineWorkflowParameters.DEFAULT_CONSTRAINT_MARGIN; if ( storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) constraintMargin = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)); + String caseFile=null; + if ( storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) + caseFile = storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY); return new OnlineWorkflowParameters(baseCaseDate, states, @@ -907,7 +913,8 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { mergeOptimized, limitReduction, handleViolations, - constraintMargin + constraintMargin, + caseFile ); } else { LOGGER.warn("No configuration parameters of wf {} stored in online db", workflowId); diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java index 5b61d5fa..c8f49611 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java @@ -203,6 +203,12 @@ public void run(CommandLine line) throws Exception { table.addCell(" "); table.addCell("Constrain Margin = "+Float.toString(parameters.getConstraintMargin())); wfJsonData.put(OnlineWorkflowCommand.CONSTRAINT_MARGIN, Float.toString(parameters.getConstraintMargin())); + if (parameters.getCaseFile() != null) { + table.addCell(" "); + table.addCell(" "); + table.addCell("Case file = " + parameters.getCaseFile()); + wfJsonData.put(OnlineWorkflowCommand.CASE_FILE, parameters.getCaseFile()); + } } else { table.addCell("-"); } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java index 9ae0bb42..b3e06715 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java @@ -43,6 +43,7 @@ public class OnlineWorkflowCommand implements Command { public static final String LIMIT_REDUCTION = "limits-reduction"; public static final String HANDLE_VIOLATION_IN_N = "handle-violations"; public static final String CONSTRAINT_MARGIN = "constraint-margin"; + public static final String CASE_FILE="case-file"; @Override public String getName() { @@ -196,6 +197,12 @@ public Options getOptions() { .argName(CONSTRAINT_MARGIN) .build()); + opts.addOption(Option.builder().longOpt(CASE_FILE) + .desc("case file - Note: override (base-case, case-type, countries) parameters") + .hasArg() + .argName(CASE_FILE) + .build()); + return opts; } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java index 98f55c0a..ce06c87a 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java @@ -95,6 +95,9 @@ public void run(CommandLine line) throws Exception { } } + if(line.hasOption(OnlineWorkflowCommand.CASE_FILE)) + params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); + String histo=line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); if(histo!=null) params.setHistoInterval(Interval.parse(histo)); @@ -168,10 +171,16 @@ public void run(CommandLine line) throws Exception { if( line.hasOption(OnlineWorkflowCommand.START_CMD)) { - for (DateTime basecase:baseCasesSet) { - params.setBaseCaseDate(basecase); - System.out.println("starting Online Workflow, basecase " + basecase.toString()); + if (params.getCaseFile() != null) { + System.out.println("starting Online Workflow, caseFile " + params.getCaseFile()); application.startWorkflow(startconfig, params); + + } else { + for (DateTime basecase : baseCasesSet) { + params.setBaseCaseDate(basecase); + System.out.println("starting Online Workflow, basecase " + basecase.toString()); + application.startWorkflow(startconfig, params); + } } } else if(line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) From fe48fb255ea0268890e737b8547466762ebfaa69 Mon Sep 17 00:00:00 2001 From: Christian Biasuzzi Date: Fri, 18 Nov 2016 17:00:09 +0100 Subject: [PATCH 2/7] reformats code: fix indentation, spaces, etc. --- .../online/OnlineWorkflowParameters.java | 67 +-- .../online/OnlineWorkflowImpl.java | 183 +++---- .../online/db/OnlineDbMVStore.java | 500 +++++++++--------- .../online/tools/ListOnlineWorkflowsTool.java | 88 ++- .../online/tools/OnlineWorkflowCommand.java | 29 +- .../online/tools/OnlineWorkflowTool.java | 106 ++-- 6 files changed, 461 insertions(+), 512 deletions(-) diff --git a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java index 54c5163f..17ced399 100644 --- a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java +++ b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java @@ -7,20 +7,19 @@ */ package eu.itesla_project.modules.online; -import java.io.Serializable; -import java.util.*; - +import eu.itesla_project.cases.CaseType; import eu.itesla_project.commons.config.ModuleConfig; import eu.itesla_project.commons.config.PlatformConfig; import eu.itesla_project.iidm.network.Country; -import eu.itesla_project.cases.CaseType; import eu.itesla_project.simulation.securityindexes.SecurityIndexType; - import org.joda.time.DateTime; import org.joda.time.Interval; +import java.io.Serializable; +import java.util.Objects; +import java.util.Set; + /** - * * @author Quinary */ public class OnlineWorkflowParameters implements Serializable { @@ -71,15 +70,15 @@ public static OnlineWorkflowParameters loadDefault() { float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN); String caseFile = config.getStringProperty("caseFile", null); - return new OnlineWorkflowParameters(baseCaseDate, - states, - histoInterval, - offlineWorkflowId, - timeHorizon, - feAnalysisId, - rulesPurityThreshold, - storeStates, - analyseBasecase, + return new OnlineWorkflowParameters(baseCaseDate, + states, + histoInterval, + offlineWorkflowId, + timeHorizon, + feAnalysisId, + rulesPurityThreshold, + storeStates, + analyseBasecase, validation, securityIndexes, caseType, @@ -89,19 +88,19 @@ public static OnlineWorkflowParameters loadDefault() { handleViolationsInN, constraintMargin, caseFile - ); + ); } - public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, - String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, - Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, - float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { + public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, + String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, + Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, + float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { Objects.requireNonNull(baseCaseDate); Objects.requireNonNull(histoInterval); Objects.requireNonNull(countries); Objects.requireNonNull(caseType); this.baseCaseDate = baseCaseDate; - this.states=states; + this.states = states; this.histoInterval = histoInterval; this.offlineWorkflowId = offlineWorkflowId; this.timeHorizon = timeHorizon; @@ -128,20 +127,20 @@ public Interval getHistoInterval() { return histoInterval; } - public int getStates() { - return states; + public int getStates() { + return states; } - public String getOfflineWorkflowId() { - return offlineWorkflowId; + public String getOfflineWorkflowId() { + return offlineWorkflowId; } - public TimeHorizon getTimeHorizon() { - return timeHorizon; + public TimeHorizon getTimeHorizon() { + return timeHorizon; } - public String getFeAnalysisId() { - return feAnalysisId; + public String getFeAnalysisId() { + return feAnalysisId; } public double getRulesPurityThreshold() { @@ -188,7 +187,9 @@ public float getConstraintMargin() { return constraintMargin; } - public String getCaseFile() { return caseFile; } + public String getCaseFile() { + return caseFile; + } @Override public String toString() { @@ -202,7 +203,7 @@ public String toString() { + ", storeStates=" + storeStates + ", analyseBasecase=" + analyseBasecase + ", validation=" + validation - + ", securityIndexes=" + securityIndexes + + ", securityIndexes=" + securityIndexes + ", caseType=" + caseType + ", countries=" + countries + ", mergeOptimized=" + mergeOptimized @@ -281,6 +282,8 @@ public void setConstraintMargin(float constraintMargin) { this.constraintMargin = constraintMargin; } - public void setCaseFile(String caseFile) { this.caseFile = caseFile; } + public void setCaseFile(String caseFile) { + this.caseFile = caseFile; + } } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java index 3a4f55b2..73e97da7 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/OnlineWorkflowImpl.java @@ -7,25 +7,15 @@ */ package eu.itesla_project.online; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - +import eu.itesla_project.cases.CaseRepository; import eu.itesla_project.cases.CaseType; -import eu.itesla_project.iidm.import_.Importers; -import org.joda.time.format.DateTimeFormat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import eu.itesla_project.computation.ComputationManager; +import eu.itesla_project.iidm.import_.Importers; import eu.itesla_project.iidm.network.Network; import eu.itesla_project.loadflow.api.LoadFlow; import eu.itesla_project.loadflow.api.LoadFlowFactory; import eu.itesla_project.merge.MergeOptimizerFactory; import eu.itesla_project.merge.MergeUtil; -import eu.itesla_project.cases.CaseRepository; import eu.itesla_project.modules.constraints.ConstraintsModifier; import eu.itesla_project.modules.contingencies.ContingenciesAndActionsDatabaseClient; import eu.itesla_project.modules.histo.HistoDbClient; @@ -33,32 +23,29 @@ import eu.itesla_project.modules.mcla.MontecarloSampler; import eu.itesla_project.modules.mcla.MontecarloSamplerFactory; import eu.itesla_project.modules.mcla.MontecarloSamplerParameters; -import eu.itesla_project.modules.online.OnlineDb; -import eu.itesla_project.modules.online.OnlineRulesFacade; -import eu.itesla_project.modules.online.OnlineWorkflowParameters; -import eu.itesla_project.modules.online.RulesFacadeFactory; -import eu.itesla_project.modules.online.RulesFacadeParameters; -import eu.itesla_project.modules.online.TimeHorizon; +import eu.itesla_project.modules.online.*; import eu.itesla_project.modules.optimizer.CorrectiveControlOptimizer; import eu.itesla_project.modules.optimizer.CorrectiveControlOptimizerFactory; import eu.itesla_project.modules.optimizer.CorrectiveControlOptimizerParameters; import eu.itesla_project.modules.rules.RulesDbClient; -import eu.itesla_project.simulation.securityindexes.SecurityIndex; +import eu.itesla_project.modules.wca.*; +import eu.itesla_project.online.ContingencyStatesIndexesSynthesis.SecurityIndexInfo; import eu.itesla_project.simulation.ImpactAnalysis; import eu.itesla_project.simulation.SimulationParameters; import eu.itesla_project.simulation.SimulatorFactory; import eu.itesla_project.simulation.Stabilization; -import eu.itesla_project.modules.wca.UncertaintiesAnalyserFactory; -import eu.itesla_project.modules.wca.WCA; -import eu.itesla_project.modules.wca.WCACluster; -import eu.itesla_project.modules.wca.WCAClusterNum; -import eu.itesla_project.modules.wca.WCAFactory; -import eu.itesla_project.modules.wca.WCAParameters; -import eu.itesla_project.modules.wca.WCAResult; -import eu.itesla_project.online.ContingencyStatesIndexesSynthesis.SecurityIndexInfo; +import eu.itesla_project.simulation.securityindexes.SecurityIndex; +import org.joda.time.format.DateTimeFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; /** - * * @author Quinary */ public class OnlineWorkflowImpl implements OnlineWorkflow { @@ -71,7 +58,7 @@ public class OnlineWorkflowImpl implements OnlineWorkflow { private final RulesDbClient rulesDbClient; private final ForecastErrorsDataStorage feDataStorage; private final OnlineWorkflowParameters parameters; - private List listeners=new ArrayList(); + private List listeners = new ArrayList(); private final CaseRepository caseRepository; private final WCAFactory wcaFactory; private final LoadFlowFactory loadFlowFactory; @@ -104,7 +91,7 @@ public OnlineWorkflowImpl( RulesFacadeFactory rulesFacadeFactory, OnlineWorkflowParameters parameters, OnlineWorkflowStartParameters startParameters - ) { + ) { Objects.requireNonNull(computationManager, "computation manager is null"); Objects.requireNonNull(cadbClient, "contingencies and actions DB client is null"); Objects.requireNonNull(histoDbClient, "histo DB client is null"); @@ -165,12 +152,12 @@ public String getId() { */ @Override public void start(OnlineWorkflowContext oCtx) throws Exception { - logger.info("{} Online workflow processing, started.",id); - for (OnlineApplicationListener l :listeners) - l.onWorkflowUpdate(new StatusSynthesis(id,StatusSynthesis.STATUS_RUNNING)); + logger.info("{} Online workflow processing, started.", id); + for (OnlineApplicationListener l : listeners) + l.onWorkflowUpdate(new StatusSynthesis(id, StatusSynthesis.STATUS_RUNNING)); Network network = null; - if (parameters.getCaseFile() != null ) { + if (parameters.getCaseFile() != null) { network = Importers.loadNetwork(parameters.getCaseFile()); if (network == null) { throw new RuntimeException("Case '" + parameters.getCaseFile() + "' not found"); @@ -181,7 +168,7 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { } logger.info("- Network id: " + network.getId()); - logger.info("- Network name: "+ network.getName()); + logger.info("- Network name: " + network.getName()); // needed in order to correctly handle multithreading access to network network.getStateManager().allowStateMultiThreadAccess(true); @@ -196,31 +183,31 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { oCtx.setResults(new ForecastAnalysisResults(this.getId(), oCtx.getTimeHorizon())); oCtx.setSecurityRulesResults(new SecurityRulesApplicationResults(this.getId(), oCtx.getTimeHorizon())); oCtx.setWcaResults(new WCAResults(this.getId(), oCtx.getTimeHorizon())); - if ( parameters.validation() ) + if (parameters.validation()) oCtx.setWcaSecurityRulesResults(new SecurityRulesApplicationResults(this.getId(), oCtx.getTimeHorizon())); logger.info(" - WCA processing......"); - for (OnlineApplicationListener l :listeners) - l.onWcaUpdate(new RunningSynthesis(id,true)); + for (OnlineApplicationListener l : listeners) + l.onWcaUpdate(new RunningSynthesis(id, true)); // maybe we should put also the stopWcaOnViolations wca parameter as online workflow parameter WCAParameters wcaParameters = new WCAParameters(parameters.getHistoInterval(), parameters.getOfflineWorkflowId(), parameters.getSecurityIndexes(), parameters.getRulesPurityThreshold(), true); WCA wca = wcaFactory.create(oCtx.getNetwork(), computationManager, histoDbClient, rulesDbClient, uncertaintiesAnalyserFactory, cadbClient, loadFlowFactory); WCAResult result = wca.run(wcaParameters); - for (OnlineApplicationListener l :listeners) - l.onWcaUpdate(new RunningSynthesis(id,false)); + for (OnlineApplicationListener l : listeners) + l.onWcaUpdate(new RunningSynthesis(id, false)); // ArrayList stables = new ArrayList(); for (WCACluster cluster : result.getClusters()) { logger.info("WCA: contingency {} in cluster {}", cluster.getContingency().getId(), cluster.getNum().toString()); oCtx.getWcaResults().addContingencyWithCluster(cluster.getContingency().getId(), cluster); - if ( parameters.validation() ) { // if validation + if (parameters.validation()) { // if validation // do not filter out the contingencies oCtx.getContingenciesToAnalyze().add(cluster.getContingency()); } else { - if ( cluster.getNum() != WCAClusterNum.ONE ) { // cluster 1 -> contingency classified as "stable" -> no need for further analysis + if (cluster.getNum() != WCAClusterNum.ONE) { // cluster 1 -> contingency classified as "stable" -> no need for further analysis // contingencies in clusters 2, 3 and 4 need further analysis oCtx.getContingenciesToAnalyze().add(cluster.getContingency()); @@ -229,7 +216,7 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { } // notify all contingency stable and unstable - for (OnlineApplicationListener l :listeners) + for (OnlineApplicationListener l : listeners) l.onWcaContingencies(new WcaContingenciesSynthesis(id, oCtx.getWcaResults().getContingenciesWithClusters())); @@ -238,7 +225,7 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { // create modules used in the states analysis MontecarloSampler sampler = montecarloSamplerFactory.create(oCtx.getNetwork(), computationManager, feDataStorage); OnlineRulesFacade rulesFacade = rulesFacadeFactory.create(rulesDbClient); - CorrectiveControlOptimizer optimizer = optimizerFactory.create(cadbClient,computationManager); + CorrectiveControlOptimizer optimizer = optimizerFactory.create(cadbClient, computationManager); Stabilization stabilization = simulatorFactory.createStabilization(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE); ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(oCtx.getNetwork(), computationManager, Integer.MAX_VALUE, cadbClient); LoadFlow loadflow = loadFlowFactory.create(oCtx.getNetwork(), computationManager, 0); @@ -246,9 +233,9 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { StateAnalizerListener stateListener = new StateAnalizerListener(); // initialize modules - rulesFacade.init(new RulesFacadeParameters(oCtx.getOfflineWorkflowId(), - oCtx.getContingenciesToAnalyze(), - parameters.getRulesPurityThreshold(), + rulesFacade.init(new RulesFacadeParameters(oCtx.getOfflineWorkflowId(), + oCtx.getContingenciesToAnalyze(), + parameters.getRulesPurityThreshold(), parameters.getSecurityIndexes(), parameters.validation(), parameters.isHandleViolationsInN())); @@ -257,19 +244,19 @@ public void start(OnlineWorkflowContext oCtx) throws Exception { stabilization.init(simulationParameters, simulationInitContext); impactAnalysis.init(simulationParameters, simulationInitContext); optimizer.init(new CorrectiveControlOptimizerParameters()); - if ( parameters.isHandleViolationsInN() && parameters.analyseBasecase() ) { // I need to analyze basecase before initializing the sampler - new StateAnalyzer(oCtx, sampler, loadflow, rulesFacade, optimizer, stabilization, impactAnalysis, onlineDb, stateListener, + if (parameters.isHandleViolationsInN() && parameters.analyseBasecase()) { // I need to analyze basecase before initializing the sampler + new StateAnalyzer(oCtx, sampler, loadflow, rulesFacade, optimizer, stabilization, impactAnalysis, onlineDb, stateListener, constraintsModifier, parameters).call(); } sampler.init(new MontecarloSamplerParameters(oCtx.getTimeHorizon(), parameters.getFeAnalysisId(), parameters.getStates())); // run states analysis int statesNumber = parameters.getStates(); - if ( parameters.isHandleViolationsInN() && parameters.analyseBasecase() ) // I already analyzed basecase + if (parameters.isHandleViolationsInN() && parameters.analyseBasecase()) // I already analyzed basecase statesNumber--; List> tasks = new ArrayList<>(statesNumber); - for ( int i=0; i statusMap = new HashMap(); - WorkSynthesis work=new WorkSynthesis(id,statusMap); - ContingencyStatesActionsSynthesis acts=new ContingencyStatesActionsSynthesis(id); - ContingencyStatesIndexesSynthesis stindex=new ContingencyStatesIndexesSynthesis(id); + HashMap statusMap = new HashMap(); + WorkSynthesis work = new WorkSynthesis(id, statusMap); + ContingencyStatesActionsSynthesis acts = new ContingencyStatesActionsSynthesis(id); + ContingencyStatesIndexesSynthesis stindex = new ContingencyStatesIndexesSynthesis(id); IndexSecurityRulesResultsSynthesis stateWithSecRulesResults = new IndexSecurityRulesResultsSynthesis(id); public void onUpdate(Integer stateId, EnumMap status, TimeHorizon t) { @@ -338,17 +326,16 @@ public void onUpdate(Integer stateId, EnumMap WorkStatus ws = statusMap.get(stateId); ws.setStatus(status); ws.setTimeHorizon(t.toString()); - statusMap.put(stateId,ws); - } else - statusMap.put(stateId,new WorkStatus(stateId, status,t.toString() )); + statusMap.put(stateId, ws); + } else + statusMap.put(stateId, new WorkStatus(stateId, status, t.toString())); for (OnlineApplicationListener l : listeners) l.onWorkflowStateUpdate(work); } - public void onSecurityRulesApplicationResults(String contingencyId, Integer stateId, OnlineWorkflowContext oCtx ) - { + public void onSecurityRulesApplicationResults(String contingencyId, Integer stateId, OnlineWorkflowContext oCtx) { SecurityRulesApplicationResults rulesApplicationResults = oCtx.getSecurityRulesResults(); stateWithSecRulesResults.addStateSecurityRuleIndexes(contingencyId, stateId, rulesApplicationResults); @@ -357,24 +344,23 @@ public void onSecurityRulesApplicationResults(String contingencyId, Integer stat } - public void onUpdate(Integer stateId, EnumMap status, TimeHorizon t, String detail) { // statusMap.put(stateId, new WorkStatus(stateId, status, t.toString(),detail)); - if (statusMap.containsKey(stateId)) { + if (statusMap.containsKey(stateId)) { WorkStatus ws = statusMap.get(stateId); StringBuffer sb = new StringBuffer(); - if (ws.getDetail() != null && !ws.getDetail().equals("")) + if (ws.getDetail() != null && !ws.getDetail().equals("")) sb.append(ws.getDetail()).append("
").append(detail); - else + else sb.append(detail); ws.setDetail(sb.toString()); ws.setStatus(status); ws.setTimeHorizon(t.toString()); - statusMap.put(stateId,ws); + statusMap.put(stateId, ws); } else - statusMap.put(stateId,new WorkStatus(stateId, status,t.toString(),detail )); + statusMap.put(stateId, new WorkStatus(stateId, status, t.toString(), detail)); for (OnlineApplicationListener l : listeners) l.onWorkflowStateUpdate(work); @@ -382,57 +368,52 @@ public void onUpdate(Integer stateId, EnumMap } - public void onImpactAnalysisResults(Integer stateId, OnlineWorkflowContext oCtx ) { + public void onImpactAnalysisResults(Integer stateId, OnlineWorkflowContext oCtx) { ForecastAnalysisResults res = oCtx.getResults(); - Collection unsafes= res.getUnsafeContingencies(); - for(String c :unsafes) - { - - List sts =res.getUnstableStates(c); - for(Integer s:sts) - { - List sec=res.getIndexes(c, s); - ArrayList indexes=new ArrayList(); - for(SecurityIndex idx: sec) - { + Collection unsafes = res.getUnsafeContingencies(); + for (String c : unsafes) { + + List sts = res.getUnstableStates(c); + for (Integer s : sts) { + List sec = res.getIndexes(c, s); + ArrayList indexes = new ArrayList(); + for (SecurityIndex idx : sec) { indexes.add(stindex.new SecurityIndexInfo(idx)); } - stindex.addStateIndexes(c,s, indexes); + stindex.addStateIndexes(c, s, indexes); } } - for (OnlineApplicationListener l :listeners) + for (OnlineApplicationListener l : listeners) l.onStatesWithIndexesUpdate(stindex); } - public void onOptimizerResults(Integer stateId, OnlineWorkflowContext oCtx ) { + public void onOptimizerResults(Integer stateId, OnlineWorkflowContext oCtx) { ForecastAnalysisResults res = oCtx.getResults(); - Collection conts =res.getContingenciesWithActions(); + Collection conts = res.getContingenciesWithActions(); - for(String c :conts) - { - Map unsafeStatesWithActions = res.getUnsafeStatesWithActions(c); - if ( unsafeStatesWithActions != null ) { - Set sts =unsafeStatesWithActions.keySet(); - for(Integer s:sts) - { + for (String c : conts) { + Map unsafeStatesWithActions = res.getUnsafeStatesWithActions(c); + if (unsafeStatesWithActions != null) { + Set sts = unsafeStatesWithActions.keySet(); + for (Integer s : sts) { List actiondIds = res.getActionsIds(c, s); - if ( actiondIds != null ) { - ArrayList infos=new ArrayList(); - for(String a : actiondIds) + if (actiondIds != null) { + ArrayList infos = new ArrayList(); + for (String a : actiondIds) infos.add(new ActionInfo(a)); - acts.addStateActions(c,s, infos); + acts.addStateActions(c, s, infos); } } } } - for (OnlineApplicationListener l :listeners) + for (OnlineApplicationListener l : listeners) l.onStatesWithActionsUpdate(acts); diff --git a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java index 87c4f2bf..cb1fd6c3 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java @@ -7,44 +7,7 @@ */ package eu.itesla_project.online.db; -import java.io.File; -import java.io.FileWriter; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.StringWriter; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; - -import org.apache.commons.io.FileUtils; -import org.h2.mvstore.MVMap; -import org.h2.mvstore.MVMapConcurrent; -import org.h2.mvstore.MVStore; -import org.joda.time.DateTime; -import org.joda.time.Interval; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.supercsv.io.CsvListWriter; -import org.supercsv.prefs.CsvPreference; - import com.csvreader.CsvWriter; - import eu.itesla_project.cases.CaseType; import eu.itesla_project.iidm.datasource.DataSource; import eu.itesla_project.iidm.datasource.FileDataSource; @@ -56,25 +19,35 @@ import eu.itesla_project.modules.contingencies.ActionParameters; import eu.itesla_project.modules.histo.HistoDbAttributeId; import eu.itesla_project.modules.histo.IIDM2DB; -import eu.itesla_project.modules.online.OnlineDb; -import eu.itesla_project.modules.online.OnlineStep; -import eu.itesla_project.modules.online.OnlineWorkflowDetails; -import eu.itesla_project.modules.online.OnlineWorkflowParameters; -import eu.itesla_project.modules.online.OnlineWorkflowResults; -import eu.itesla_project.modules.online.OnlineWorkflowRulesResults; -import eu.itesla_project.modules.online.OnlineWorkflowWcaResults; -import eu.itesla_project.modules.online.StateProcessingStatus; -import eu.itesla_project.modules.online.StateStatus; -import eu.itesla_project.modules.online.TimeHorizon; +import eu.itesla_project.modules.online.*; import eu.itesla_project.modules.optimizer.CCOFinalStatus; -import eu.itesla_project.security.LimitViolation; -import eu.itesla_project.simulation.securityindexes.SecurityIndexType; import eu.itesla_project.online.db.debug.NetworkData; import eu.itesla_project.online.db.debug.NetworkDataExporter; import eu.itesla_project.online.db.debug.NetworkDataExtractor; +import eu.itesla_project.security.LimitViolation; +import eu.itesla_project.simulation.securityindexes.SecurityIndexType; +import org.apache.commons.io.FileUtils; +import org.h2.mvstore.MVMap; +import org.h2.mvstore.MVMapConcurrent; +import org.h2.mvstore.MVStore; +import org.joda.time.DateTime; +import org.joda.time.Interval; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.supercsv.io.CsvListWriter; +import org.supercsv.prefs.CsvPreference; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; /** - * * @author Quinary */ public class OnlineDbMVStore implements OnlineDb { @@ -140,7 +113,8 @@ public class OnlineDbMVStore implements OnlineDb { private static final String STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX = "_pcviolationscontigencies"; private static final String STORED_PC_VIOLATIONS_MAP_PREFIX = "pcviolations_"; private static final String STORED_PC_LOADFLOW_CONTINGENCIES_MAP_NAME = "storedPCLoadflowContingencies"; - private static final String STORED_PC_LOADFLOW_STATES_MAP_SUFFIX = "_pcloadflowstates";; + private static final String STORED_PC_LOADFLOW_STATES_MAP_SUFFIX = "_pcloadflowstates"; + ; private static final String STORED_PC_LOADFLOW_STATES_MAP_NAME = "storedPCLoadflowStates"; private static final String STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX = "_pcloadflowcontigencies"; private static final String STORED_WCA_RULES_RESULTS_MAP_NAME = "wfWcaRulesResults"; @@ -149,7 +123,7 @@ public class OnlineDbMVStore implements OnlineDb { private static final String STORED_WCA_RULES_RESULTS_STATE_RULES_AVAILABLE_MAP_SUFFIX = "_wcarulesavailable"; private static final String STORED_WCA_RULES_RESULTS_STATE_INVALID_RULES_MAP_SUFFIX = "_wcarulesinvalid"; private static final String SERIALIZED_STATES_FILENAME = "network-states.csv"; - private final String[] XIIDMEXTENSIONS = { ".xiidm", ".iidm", ".xml" }; + private final String[] XIIDMEXTENSIONS = {".xiidm", ".iidm", ".xml"}; private static final Logger LOGGER = LoggerFactory.getLogger(OnlineDbMVStore.class); @@ -158,7 +132,7 @@ public class OnlineDbMVStore implements OnlineDb { private OnlineDbMVStoreConfig config = null; HashMap storedWFMetrics = new HashMap(); - ConcurrentHashMap>> workflowsStates = new ConcurrentHashMap>>(); + ConcurrentHashMap>> workflowsStates = new ConcurrentHashMap>>(); MVMapConcurrent.Builder mapBuilder; @@ -167,7 +141,7 @@ public OnlineDbMVStore(OnlineDbMVStoreConfig config) { this.config = config; LOGGER.info(config.toString()); Path storageFolder = config.getOnlineDbDir(); - if ( !Files.exists(storageFolder) ) { + if (!Files.exists(storageFolder)) { try { Files.createDirectories(storageFolder); } catch (IOException e) { @@ -197,7 +171,7 @@ private synchronized void closeStores() { private synchronized MVStore getStore(String workflowId) { MVStore wfMVStore; - if ( storedWFMetrics.containsKey(workflowId)) + if (storedWFMetrics.containsKey(workflowId)) wfMVStore = storedWFMetrics.get(workflowId); else { LOGGER.debug("Opening file for workflow {}", workflowId); @@ -218,7 +192,7 @@ public void storeMetrics(String workflowId, OnlineStep step, Map @Override public void storeMetrics(String workflowId, Integer stateId, OnlineStep step, Map metrics) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Storing metrics for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); storeMetrics(workflowId, step.name() + "_" + stateIdStr, metrics); LOGGER.info("Storing metadata for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); @@ -229,12 +203,12 @@ private void storeMetrics(String workflowId, String mapName, Map try { MVStore wfMVStore = getStore(workflowId); Map metricsMap = wfMVStore.openMap(mapName, mapBuilder); - for ( String parameter : metrics.keySet() ) { + for (String parameter : metrics.keySet()) { metricsMap.put(parameter, metrics.get(parameter)); } wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing metrics for wf " + workflowId + " in map " + mapName + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing metrics for wf " + workflowId + " in map " + mapName + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -251,12 +225,12 @@ private void storeStepMetadata(String workflowId, String stateId, OnlineStep ste stepStatesMap.putIfAbsent(stateId, ""); // save info about stored params per step MVMap stepParamsMap = wfMVStore.openMap(step.name() + STORED_METRICS_PARAMS_MAP_SUFFIX, mapBuilder); - for ( String parameter : metrics.keySet() ) { + for (String parameter : metrics.keySet()) { stepParamsMap.putIfAbsent(parameter, ""); } wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing metadata for wf " + workflowId + ", step "+ step.name() + ", state " + stateId + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing metadata for wf " + workflowId + ", step " + step.name() + ", state " + stateId + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -270,18 +244,18 @@ public Map getMetrics(String workflowId, OnlineStep step) { @Override public Map getMetrics(String workflowId, Integer stateId, OnlineStep step) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Getting metrics from wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); return getMetrics(workflowId, step.name() + "_" + stateIdStr); } private Map getMetrics(String workflowId, String mapName) { - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { HashMap metrics = new HashMap(); MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.getMapNames().contains(mapName) ) { + if (wfMVStore.getMapNames().contains(mapName)) { Map storedMap = wfMVStore.openMap(mapName, mapBuilder); - for ( String parameter : storedMap.keySet() ) { + for (String parameter : storedMap.keySet()) { metrics.put(parameter, storedMap.get(parameter)); } } @@ -295,37 +269,37 @@ private Map getMetrics(String workflowId, String mapName) { @Override public String getCsvMetrics(String workflowId, OnlineStep step) { LOGGER.info("Preparing CSV data for wf {} and step {}", workflowId, step.name()); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { StringWriter content = new StringWriter(); CsvWriter cvsWriter = new CsvWriter(content, ','); try { MVStore wfMVStore = getStore(workflowId); // check if there are stored metrics Map storedStepsMap = wfMVStore.openMap(STORED_METRICS_STEPS_MAP_NAME, mapBuilder); - if ( storedStepsMap.containsKey(step.name()) ) { + if (storedStepsMap.containsKey(step.name())) { MVMap stepParamsMap = wfMVStore.openMap(step.name() + STORED_METRICS_PARAMS_MAP_SUFFIX, mapBuilder); MVMap stepStatesMap = wfMVStore.openMap(step.name() + STORED_METRICS_STATES_MAP_SUFFIX, mapBuilder); // write headers //LOGGER.debug("Preparing CSV headers for wf {} and step {}", workflowId, step.name()); - String[] headers = new String[stepParamsMap.keySet().size()+1]; + String[] headers = new String[stepParamsMap.keySet().size() + 1]; headers[0] = "state"; - HashMap paramsIndexes = new HashMap<>(); + HashMap paramsIndexes = new HashMap<>(); int i = 1; - for ( String parameter : stepParamsMap.keySet() ) { + for (String parameter : stepParamsMap.keySet()) { headers[i] = parameter; paramsIndexes.put(parameter, i); i++; } cvsWriter.writeRecord(headers); // write step general metrics, if stored - if ( stepStatesMap.containsKey("_") ) { + if (stepStatesMap.containsKey("_")) { //LOGGER.debug("Preparing CSV data for wf {} and step {} - general step metrics", workflowId, step.name()); String[] values = getStoredMapValues(wfMVStore, "_", step, stepParamsMap.keySet().size(), paramsIndexes); cvsWriter.writeRecord(values); } // write step metrics for each state, if stored - for ( String stateId : stepStatesMap.keySet() ) { - if (!"_".equals(stateId) ) { + for (String stateId : stepStatesMap.keySet()) { + if (!"_".equals(stateId)) { //LOGGER.debug("Preparing CSV data for wf {} and step {} - state {} metrics", workflowId, step.name(), stateId); String[] values = getStoredMapValues(wfMVStore, stateId, step, stepParamsMap.keySet().size(), paramsIndexes); cvsWriter.writeRecord(values); @@ -345,11 +319,11 @@ public String getCsvMetrics(String workflowId, OnlineStep step) { } } - private String[] getStoredMapValues(MVStore wfMVStore, String stateId, OnlineStep step, int paramsN, HashMap paramsIndexes) { - String[] values = new String[paramsN+1]; + private String[] getStoredMapValues(MVStore wfMVStore, String stateId, OnlineStep step, int paramsN, HashMap paramsIndexes) { + String[] values = new String[paramsN + 1]; values[0] = stateId; Map storedMap = wfMVStore.openMap(step.name() + "_" + stateId, mapBuilder); - for ( String parameter : storedMap.keySet() ) { + for (String parameter : storedMap.keySet()) { int index = paramsIndexes.get(parameter); values[index] = storedMap.get(parameter); } @@ -358,7 +332,7 @@ private String[] getStoredMapValues(MVStore wfMVStore, String stateId, OnlineSte private DateTime getWorkflowDate(String workflowId) { DateTime workflowDate = null; - if ( workflowId.contains("_") ) { + if (workflowId.contains("_")) { String workflowStringDate = workflowId.substring(workflowId.lastIndexOf("_") + 1); workflowDate = DateTimeFormat.forPattern("yyyyMMddHHmmssSSS").parseDateTime(workflowStringDate); } @@ -375,7 +349,7 @@ public boolean accept(File dir, String name) { } }); for (File file : files) { - if ( file.isFile() ) { + if (file.isFile()) { String workflowId = file.getName().substring(STORED_WORKFLOW_PREFIX.length()); OnlineWorkflowDetails workflowDetails = new OnlineWorkflowDetails(workflowId); workflowDetails.setWorkflowDate(getWorkflowDate(workflowId)); @@ -399,11 +373,11 @@ public List listWorkflows(DateTime basecaseDate) { List workflowIds = new ArrayList(); File[] files = config.getOnlineDbDir().toFile().listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { - return name.toLowerCase().startsWith(STORED_WORKFLOW_PREFIX+wfNamePrefix); + return name.toLowerCase().startsWith(STORED_WORKFLOW_PREFIX + wfNamePrefix); } }); for (File file : files) { - if ( file.isFile() ) { + if (file.isFile()) { String workflowId = file.getName().substring(STORED_WORKFLOW_PREFIX.length()); OnlineWorkflowDetails workflowDetails = new OnlineWorkflowDetails(workflowId); workflowDetails.setWorkflowDate(getWorkflowDate(workflowId)); @@ -432,12 +406,12 @@ public boolean accept(File dir, String name) { } }); for (File file : files) { - if ( file.isFile() ) { + if (file.isFile()) { String workflowId = file.getName().substring(STORED_WORKFLOW_PREFIX.length()); - if ( workflowId.length() > dateFormatPattern.length() && workflowId.substring(dateFormatPattern.length(),dateFormatPattern.length()+1).equals("_")) { - String basecaseName = workflowId.substring(0, dateFormatPattern.length()-1); + if (workflowId.length() > dateFormatPattern.length() && workflowId.substring(dateFormatPattern.length(), dateFormatPattern.length() + 1).equals("_")) { + String basecaseName = workflowId.substring(0, dateFormatPattern.length() - 1); DateTime basecaseDate = DateTime.parse(basecaseName, formatter); - if ( basecaseInterval.contains(basecaseDate.getMillis())) { + if (basecaseInterval.contains(basecaseDate.getMillis())) { OnlineWorkflowDetails workflowDetails = new OnlineWorkflowDetails(workflowId); workflowDetails.setWorkflowDate(getWorkflowDate(workflowId)); workflowIds.add(workflowDetails); @@ -462,10 +436,10 @@ public OnlineWorkflowDetails getWorkflowDetails(String workflowId) { OnlineWorkflowDetails workflowDetails = null; File[] files = config.getOnlineDbDir().toFile().listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { - return name.toLowerCase().equals(STORED_WORKFLOW_PREFIX+workflowId); + return name.toLowerCase().equals(STORED_WORKFLOW_PREFIX + workflowId); } }); - if ( files != null && files.length == 1 ) { + if (files != null && files.length == 1) { workflowDetails = new OnlineWorkflowDetails(workflowId); workflowDetails.setWorkflowDate(new DateTime(files[0].lastModified())); } @@ -479,7 +453,7 @@ public void storeResults(String workflowId, OnlineWorkflowResults results) { LOGGER.info("Storing results for workflow {}", workflowId); MVStore wfMVStore = getStore(workflowId); // check if the results for this wf have already been stored - if ( wfMVStore.hasMap(STORED_RESULTS_MAP_NAME) ) + if (wfMVStore.hasMap(STORED_RESULTS_MAP_NAME)) removeWfResults(workflowId, wfMVStore); MVMap storedResultsMap = wfMVStore.openMap(STORED_RESULTS_MAP_NAME, mapBuilder); // store time horizon @@ -495,21 +469,21 @@ public void storeResults(String workflowId, OnlineWorkflowResults results) { MVMap storedActionsEquipmentsMap = wfMVStore.openMap(contingencyId + STORED_RESULTS_ACTIONS_EQUIPMENTS_MAP_SUFFIX, mapBuilder); MVMap storedActionsParametersMap = wfMVStore.openMap(contingencyId + STORED_RESULTS_ACTIONS_PARAMETERS_MAP_SUFFIX, mapBuilder); for (Integer stateId : results.getUnsafeStatesWithActions(contingencyId).keySet()) { - storedActionsInfosMap.put(stateId.toString() + STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX, + storedActionsInfosMap.put(stateId.toString() + STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX, Boolean.toString(results.getUnsafeStatesWithActions(contingencyId).get(stateId))); storedActionsInfosMap.put(stateId.toString() + STORED_RESULTS_ACTIONINFO_STATUS_KEY_SUFFIX, results.getStateStatus(contingencyId, stateId).name()); - if ( results.getCause(contingencyId, stateId) != null ) + if (results.getCause(contingencyId, stateId) != null) storedActionsInfosMap.put(stateId.toString() + STORED_RESULTS_ACTIONINFO_CAUSE_KEY_SUFFIX, results.getCause(contingencyId, stateId)); - if ( results.getActionPlan(contingencyId, stateId) != null ) + if (results.getActionPlan(contingencyId, stateId) != null) storedActionsInfosMap.put(stateId.toString() + STORED_RESULTS_ACTIONINFO_ACTIONPLAN_KEY_SUFFIX, results.getActionPlan(contingencyId, stateId)); List actionsIds = results.getActionsIds(contingencyId, stateId); - if ( actionsIds != null ) { - for(String actionId : actionsIds) { + if (actionsIds != null) { + for (String actionId : actionsIds) { List equipmentsIds = results.getEquipmentsIds(contingencyId, stateId, actionId); storedActionsEquipmentsMap.put(stateId + "_" + actionId, OnlineDbMVStoreUtils.actionsIdsToJson(equipmentsIds)); - for(String equipmentId : equipmentsIds) { + for (String equipmentId : equipmentsIds) { ActionParameters actionParameters = results.getParameters(contingencyId, stateId, actionId, equipmentId); - if ( actionParameters != null ) + if (actionParameters != null) storedActionsParametersMap.put(stateId + "_" + actionId + "_" + equipmentId, OnlineDbMVStoreUtils.actionParametersToJson(actionParameters)); } } @@ -557,13 +531,13 @@ private void removeWfResults(String workflowId, MVStore wfMVStore) { public OnlineWorkflowResults getResults(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting results of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_RESULTS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_RESULTS_MAP_NAME)) { MVMap storedResultsMap = wfMVStore.openMap(STORED_RESULTS_MAP_NAME, mapBuilder); // create workflow results OnlineWorkflowResultsImpl wfResults = new OnlineWorkflowResultsImpl( - workflowId, + workflowId, TimeHorizon.valueOf(storedResultsMap.get(STORED_RESULTS_TIMEHORIZON_KEY))); // add contingencies with actiions Collection contingenciesWithAction = OnlineDbMVStoreUtils.jsonToContingenciesIds( @@ -575,22 +549,22 @@ public OnlineWorkflowResults getResults(String workflowId) { MVMap storedActionsEquipmentsMap = wfMVStore.openMap(contingencyId + STORED_RESULTS_ACTIONS_EQUIPMENTS_MAP_SUFFIX, mapBuilder); for (String stateId : storedActionsMap.keySet()) { boolean actionsFound = true; - if ( storedActionsInfosMap.containsKey(stateId + STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX) ) + if (storedActionsInfosMap.containsKey(stateId + STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX)) actionsFound = Boolean.parseBoolean(storedActionsInfosMap.get(stateId + STORED_RESULTS_ACTIONINFO_ACTIONSFOUND_KEY_SUFFIX)); CCOFinalStatus status = CCOFinalStatus.MANUAL_CORRECTIVE_ACTION_FOUND; - if ( storedActionsInfosMap.containsKey(stateId + STORED_RESULTS_ACTIONINFO_STATUS_KEY_SUFFIX) ) + if (storedActionsInfosMap.containsKey(stateId + STORED_RESULTS_ACTIONINFO_STATUS_KEY_SUFFIX)) status = CCOFinalStatus.valueOf(storedActionsInfosMap.get(stateId + STORED_RESULTS_ACTIONINFO_STATUS_KEY_SUFFIX)); String cause = storedActionsInfosMap.get(stateId + STORED_RESULTS_ACTIONINFO_CAUSE_KEY_SUFFIX); String actionPlan = storedActionsInfosMap.get(stateId + STORED_RESULTS_ACTIONINFO_ACTIONPLAN_KEY_SUFFIX); Map> actions = null; - if ( storedActionsMap.containsKey(stateId) ) { + if (storedActionsMap.containsKey(stateId)) { List actionsIds = OnlineDbMVStoreUtils.jsonToActionsIds(storedActionsMap.get(stateId)); - actions = new HashMap>(); - for(String actionId : actionsIds) { + actions = new HashMap>(); + for (String actionId : actionsIds) { Map equipments = new HashMap(); List equipmentsIds = OnlineDbMVStoreUtils.jsonToActionsIds(storedActionsEquipmentsMap.get(stateId + "_" + actionId)); - if ( equipmentsIds != null ) { - for(String equipmentId : equipmentsIds) { + if (equipmentsIds != null) { + for (String equipmentId : equipmentsIds) { ActionParameters actionParameters = OnlineDbMVStoreUtils.jsonToActionParameters(storedActionsParametersMap.get(stateId + "_" + actionId + "_" + equipmentId)); equipments.put(equipmentId, actionParameters); } @@ -629,13 +603,13 @@ public void storeRulesResults(String workflowId, OnlineWorkflowRulesResults resu LOGGER.info("Storing results of rules for workflow {}", workflowId); MVStore wfMVStore = getStore(workflowId); // check if the results for this wf have already been stored - if ( wfMVStore.hasMap(STORED_RULES_RESULTS_MAP_NAME) ) + if (wfMVStore.hasMap(STORED_RULES_RESULTS_MAP_NAME)) removeWfRulesResults(workflowId, wfMVStore); MVMap storedRulesResultsMap = wfMVStore.openMap(STORED_RULES_RESULTS_MAP_NAME, mapBuilder); // store time horizon storedRulesResultsMap.put(STORED_RESULTS_TIMEHORIZON_KEY, results.getTimeHorizon().getName()); // store contingencies with security rules results - storedRulesResultsMap.put(STORED_RULES_RESULTS_CONTINGENCIES_WITH_RULES_KEY, + storedRulesResultsMap.put(STORED_RULES_RESULTS_CONTINGENCIES_WITH_RULES_KEY, OnlineDbMVStoreUtils.contingenciesIdsToJson(results.getContingenciesWithSecurityRulesResults())); // store rules results for contingencies for (String contingencyId : results.getContingenciesWithSecurityRulesResults()) { @@ -687,13 +661,13 @@ private void removeWfRulesResults(String workflowId, MVStore wfMVStore) { public OnlineWorkflowRulesResults getRulesResults(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting rules results of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_RULES_RESULTS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_RULES_RESULTS_MAP_NAME)) { MVMap storedRulesResultsMap = wfMVStore.openMap(STORED_RULES_RESULTS_MAP_NAME, mapBuilder); // create workflow rules results OnlineWorkflowRulesResultsImpl wfRulesResults = new OnlineWorkflowRulesResultsImpl( - workflowId, + workflowId, TimeHorizon.valueOf(storedRulesResultsMap.get(STORED_RESULTS_TIMEHORIZON_KEY))); // add contingencies with rules results Collection contingenciesWithRules = OnlineDbMVStoreUtils.jsonToContingenciesIds( @@ -707,10 +681,10 @@ public OnlineWorkflowRulesResults getRulesResults(String workflowId) { Map stateResults = OnlineDbMVStoreUtils.jsonToIndexesData(storedStateResultsMap.get(stateId)); StateStatus stateStatus = StateStatus.valueOf(storedStateStatusMap.get(stateId)); boolean rulesAvailable = true; - if ( storedStateAvailableRulesMap.containsKey(stateId) ) + if (storedStateAvailableRulesMap.containsKey(stateId)) rulesAvailable = Boolean.parseBoolean(storedStateAvailableRulesMap.get(stateId)); List invalidRules = new ArrayList(); - if ( storedStateInvalidRulesMap.containsKey(stateId) ) + if (storedStateInvalidRulesMap.containsKey(stateId)) invalidRules.addAll(OnlineDbMVStoreUtils.jsonToIndexesTypes(storedStateInvalidRulesMap.get(stateId))); wfRulesResults.addContingencyWithSecurityRulesResults(contingencyId, Integer.parseInt(stateId), stateStatus, stateResults, rulesAvailable, invalidRules); @@ -734,7 +708,7 @@ public void storeWcaResults(String workflowId, OnlineWorkflowWcaResults results) LOGGER.info("Storing results of WCA for workflow {}", workflowId); MVStore wfMVStore = getStore(workflowId); // check if the results for this wf have already been stored - if ( wfMVStore.hasMap(STORED_WCA_RESULTS_MAP_NAME) ) + if (wfMVStore.hasMap(STORED_WCA_RESULTS_MAP_NAME)) removeWfWcaResults(workflowId, wfMVStore); MVMap storedWcaResultsMap = wfMVStore.openMap(STORED_WCA_RESULTS_MAP_NAME, mapBuilder); // store time horizon @@ -769,18 +743,18 @@ private void removeWfWcaResults(String workflowId, MVStore wfMVStore) { public OnlineWorkflowWcaResults getWcaResults(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting WCA results of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_WCA_RESULTS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_WCA_RESULTS_MAP_NAME)) { MVMap storedRulesResultsMap = wfMVStore.openMap(STORED_WCA_RESULTS_MAP_NAME, mapBuilder); // create workflow rules results OnlineWorkflowWcaResultsImpl wfWcaResults = new OnlineWorkflowWcaResultsImpl( - workflowId, + workflowId, TimeHorizon.valueOf(storedRulesResultsMap.get(STORED_RESULTS_TIMEHORIZON_KEY))); // add classification of contingencies in clusters MVMap storedClustersMap = wfMVStore.openMap(STORED_WCA_RESULTS_CLUSTERS_MAP_NAME, mapBuilder); MVMap storedCausesMap = wfMVStore.openMap(STORED_WCA_RESULTS_CAUSES_MAP_NAME, mapBuilder); - for(String contingencyId : storedClustersMap.keySet()) { + for (String contingencyId : storedClustersMap.keySet()) { String cause = storedCausesMap.get(contingencyId); wfWcaResults.addContingencyWithCluster(contingencyId, Integer.valueOf(storedClustersMap.get(contingencyId)), @@ -805,7 +779,7 @@ public void storeWorkflowParameters(String workflowId, OnlineWorkflowParameters LOGGER.info("Storing configuration parameters for workflow {}", workflowId); MVStore wfMVStore = getStore(workflowId); // check if the parameters for this wf have already been stored - if ( wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME) ) + if (wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME)) removeWfParameters(workflowId, wfMVStore); MVMap storedParametersMap = wfMVStore.openMap(STORED_PARAMETERS_MAP_NAME, mapBuilder); // store basecase @@ -829,7 +803,7 @@ public void storeWorkflowParameters(String workflowId, OnlineWorkflowParameters // store flag validation storedParametersMap.put(STORED_PARAMETERS_VALIDATION_KEY, Boolean.toString(parameters.validation())); // store security indexes - if ( parameters.getSecurityIndexes() != null ) + if (parameters.getSecurityIndexes() != null) storedParametersMap.put(STORED_PARAMETERS_SECURITY_INDEXES_KEY, OnlineDbMVStoreUtils.indexesTypesToJson(parameters.getSecurityIndexes())); // store case type storedParametersMap.put(STORED_PARAMETERS_CASE_TYPE_KEY, parameters.getCaseType().name()); @@ -862,9 +836,9 @@ private void removeWfParameters(String workflowId, MVStore wfMVStore) { public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting configuration parameters of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME)) { MVMap storedParametersMap = wfMVStore.openMap(STORED_PARAMETERS_MAP_NAME, mapBuilder); DateTime baseCaseDate = DateTime.parse(storedParametersMap.get(STORED_PARAMETERS_BASECASE_KEY)); int states = Integer.parseInt(storedParametersMap.get(STORED_PARAMETERS_STATE_NUMBER_KEY)); @@ -877,27 +851,27 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { boolean analyseBasecase = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_ANALYSE_BASECASE_KEY)); boolean validation = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_VALIDATION_KEY)); Set securityIndexes = null; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_SECURITY_INDEXES_KEY) ) + if (storedParametersMap.containsKey(STORED_PARAMETERS_SECURITY_INDEXES_KEY)) securityIndexes = OnlineDbMVStoreUtils.jsonToIndexesTypes(storedParametersMap.get(STORED_PARAMETERS_SECURITY_INDEXES_KEY)); CaseType caseType = CaseType.valueOf(storedParametersMap.get(STORED_PARAMETERS_CASE_TYPE_KEY)); Set countries = OnlineDbMVStoreUtils.jsonToCountries(storedParametersMap.get(STORED_PARAMETERS_COUNTRIES_KEY)); boolean mergeOptimized = OnlineWorkflowParameters.DEFAULT_MERGE_OPTIMIZED; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)) + if (storedParametersMap.containsKey(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)) mergeOptimized = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)); float limitReduction = OnlineWorkflowParameters.DEFAULT_LIMIT_REDUCTION; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)) + if (storedParametersMap.containsKey(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)) limitReduction = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)); boolean handleViolations = OnlineWorkflowParameters.DEFAULT_HANDLE_VIOLATIONS_IN_N; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)) + if (storedParametersMap.containsKey(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)) handleViolations = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)); float constraintMargin = OnlineWorkflowParameters.DEFAULT_CONSTRAINT_MARGIN; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) + if (storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) constraintMargin = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)); - String caseFile=null; - if ( storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) + String caseFile = null; + if (storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) caseFile = storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY); - return new OnlineWorkflowParameters(baseCaseDate, + return new OnlineWorkflowParameters(baseCaseDate, states, histoInterval, offlineWorkflowId, @@ -915,7 +889,7 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { handleViolations, constraintMargin, caseFile - ); + ); } else { LOGGER.warn("No configuration parameters of wf {} stored in online db", workflowId); return null; @@ -933,7 +907,7 @@ public void storeStatesProcessingStatus(String workflowId, Map statesProcessingStatusMap = wfMVStore.openMap(STORED_STATES_PROCESSING_STATUS_MAP_NAME, mapBuilder); // store states with processing status @@ -970,9 +944,9 @@ private void removeStatesProcessingStatus(String workflowId, MVStore wfMVStore) public Map getStatesProcessingStatus(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting states processing status of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_STATES_PROCESSING_STATUS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_STATES_PROCESSING_STATUS_MAP_NAME)) { MVMap statesProcessingStatusMap = wfMVStore.openMap(STORED_STATES_PROCESSING_STATUS_MAP_NAME, mapBuilder); // create states processing status Map statesProcessingStatus = new HashMap(); @@ -980,9 +954,9 @@ private void removeStatesProcessingStatus(String workflowId, MVStore wfMVStore) Collection stateWithProcessingStatus = OnlineDbMVStoreUtils.jsonToStatesIds(statesProcessingStatusMap.get(STORED_STATES_LIST_KEY)); for (Integer stateId : stateWithProcessingStatus) { MVMap stateProcessingStatusMap = wfMVStore.openMap(stateId + STORED_STATE_PROCESSING_STATUS_MAP_SUFFIX, mapBuilder); - Map processingStatus = new HashMap(); + Map processingStatus = new HashMap(); for (String step : stateProcessingStatusMap.keySet()) { - if ( !step.equals(STORED_STATES_STATE_DETAILS_KEY) ) + if (!step.equals(STORED_STATES_STATE_DETAILS_KEY)) processingStatus.put(step, stateProcessingStatusMap.get(step)); } statesProcessingStatus.put(stateId, new StateProcessingStatusImpl(processingStatus, stateProcessingStatusMap.get(STORED_STATES_STATE_DETAILS_KEY))); @@ -1000,13 +974,13 @@ private void removeStatesProcessingStatus(String workflowId, MVStore wfMVStore) @Override public void storeState(String workflowId, Integer stateId, Network network) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Storing state {} of workflow {}", stateIdStr, workflowId); - if ( network.getStateManager().getStateIds().contains(stateIdStr) ) { + if (network.getStateManager().getStateIds().contains(stateIdStr)) { network.getStateManager().setWorkingState(stateIdStr); Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); Path stateFolder = Paths.get(workflowStatesFolder.toString(), STORED_STATE_PREFIX + stateId); - if ( Files.exists(stateFolder) ) { + if (Files.exists(stateFolder)) { //remove current state file, if it already exists for (int i = 0; i < XIIDMEXTENSIONS.length; i++) { Path stateFile = Paths.get(stateFolder.toString(), network.getId() + XIIDMEXTENSIONS[i]); @@ -1023,8 +997,8 @@ public void storeState(String workflowId, Integer stateId, Network network) { try { Files.createDirectories(stateFolder); } catch (IOException e) { - String errorMessage = "online db: folder " + workflowStatesFolder + " for workflow " + workflowId - + " and state " + stateIdStr + " cannot be created: " + e.getMessage(); + String errorMessage = "online db: folder " + workflowStatesFolder + " for workflow " + workflowId + + " and state " + stateIdStr + " cannot be created: " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1038,8 +1012,8 @@ public void storeState(String workflowId, Integer stateId, Network network) { Exporters.export("XIIDM", network, parameters, dataSource); // store network state values, for later serialization Map networkValues = IIDM2DB.extractCimValues(network, new IIDM2DB.Config(network.getId(), true, true)).getSingleValueMap(); - ConcurrentHashMap> workflowStates = new ConcurrentHashMap>(); - if ( workflowsStates.containsKey(workflowId) ) + ConcurrentHashMap> workflowStates = new ConcurrentHashMap>(); + if (workflowsStates.containsKey(workflowId)) workflowStates = workflowsStates.get(workflowId); workflowStates.put(stateId, networkValues); workflowsStates.put(workflowId, workflowStates); @@ -1052,18 +1026,18 @@ public void storeState(String workflowId, Integer stateId, Network network) { private void serializeStoredWorkflowsStates() { LOGGER.info("Serializing stored workflows states"); - for(String workflowId : workflowsStates.keySet()) { - if ( workflowStatesFolderExists(workflowId)) { + for (String workflowId : workflowsStates.keySet()) { + if (workflowStatesFolderExists(workflowId)) { LOGGER.info("Serializing network data of workflow {}", workflowId); ConcurrentHashMap> workflowStates = workflowsStates.get(workflowId); Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); Path csvFile = Paths.get(workflowStatesFolder.toString(), SERIALIZED_STATES_FILENAME); - try(FileWriter fileWriter = new FileWriter(csvFile.toFile()); - CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) { + try (FileWriter fileWriter = new FileWriter(csvFile.toFile()); + CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) { boolean printHeaders = true; - for(Integer stateId : workflowStates.keySet()) { - Map networkValues = workflowStates.get(stateId); - if ( printHeaders ) { + for (Integer stateId : workflowStates.keySet()) { + Map networkValues = workflowStates.get(stateId); + if (printHeaders) { List headers = new ArrayList<>(networkValues.size()); for (HistoDbAttributeId attrId : networkValues.keySet()) { headers.add(attrId.toString()); @@ -1071,8 +1045,8 @@ private void serializeStoredWorkflowsStates() { ArrayList headersList = new ArrayList<>(); headersList.add("workflow"); headersList.add("state"); - headersList.addAll(Arrays.asList(headers.toArray(new String[] {}))); - csvWriter.writeHeader(headersList.toArray(new String[] {})); + headersList.addAll(Arrays.asList(headers.toArray(new String[]{}))); + csvWriter.writeHeader(headersList.toArray(new String[]{})); printHeaders = false; } ArrayList valuesList = new ArrayList<>(); @@ -1092,7 +1066,7 @@ private void serializeStoredWorkflowsStates() { public List listStoredStates(String workflowId) { LOGGER.info("Getting list of stored states for workflow {}", workflowId); List storedStates = new ArrayList(); - if ( workflowStatesFolderExists(workflowId)) { + if (workflowStatesFolderExists(workflowId)) { Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); File[] files = workflowStatesFolder.toFile().listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { @@ -1100,7 +1074,7 @@ public boolean accept(File dir, String name) { } }); for (File file : files) { - if ( file.isDirectory() ) { + if (file.isDirectory()) { String stateId = file.getName().substring(STORED_STATE_PREFIX.length()); storedStates.add(Integer.parseInt(stateId)); } @@ -1119,12 +1093,12 @@ public int compare(Integer o1, Integer o2) { @Override public Network getState(String workflowId, Integer stateId) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Getting state {} of workflow {}", stateIdStr, workflowId); Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); Path stateFolder = Paths.get(workflowStatesFolder.toString(), STORED_STATE_PREFIX + stateIdStr); - if ( Files.exists(stateFolder) && Files.isDirectory(stateFolder) ) { - if ( stateFolder.toFile().list().length == 1 ) { + if (Files.exists(stateFolder) && Files.isDirectory(stateFolder)) { + if (stateFolder.toFile().list().length == 1) { File stateFile = stateFolder.toFile().listFiles()[0]; String basename = stateFile.getName(); int extIndex = basename.lastIndexOf("."); @@ -1157,14 +1131,14 @@ public boolean deleteWorkflow(String workflowId) { boolean workflowDeleted = false; boolean workflowStatesDeleted = true; // if stored states for this workflow exist - if ( workflowStatesFolderExists(workflowId) ) + if (workflowStatesFolderExists(workflowId)) // delete them workflowStatesDeleted = deleteStates(workflowId); // if stored states have been deleted - if ( workflowStatesDeleted ) { + if (workflowStatesDeleted) { // store workflow results Path workflowFile = Paths.get(config.getOnlineDbDir().toFile().toString(), STORED_WORKFLOW_PREFIX + workflowId); - if ( workflowFile.toFile().exists() && workflowFile.toFile().isFile() ) + if (workflowFile.toFile().exists() && workflowFile.toFile().isFile()) try { workflowDeleted = Files.deleteIfExists(workflowFile); } catch (IOException e) { @@ -1182,7 +1156,7 @@ public boolean deleteStates(String workflowId) { LOGGER.info("Deleting stored states of workflow {}", workflowId); boolean workflowStatesDeleted = false; Path workflowStatesFolder = Paths.get(config.getOnlineDbDir().toFile().toString(), STORED_WORKFLOW_STATES_FOLDER_PREFIX + workflowId); - if ( workflowStatesFolder.toFile().exists() && workflowStatesFolder.toFile().isDirectory() ) + if (workflowStatesFolder.toFile().exists() && workflowStatesFolder.toFile().isDirectory()) try { FileUtils.deleteDirectory(workflowStatesFolder.toFile()); workflowStatesDeleted = true; @@ -1197,20 +1171,20 @@ public boolean deleteStates(String workflowId) { @Override public void exportStates(String workflowId, Path file) { - if ( workflowStatesFolderExists(workflowId)) { + if (workflowStatesFolderExists(workflowId)) { LOGGER.info("Exporting states for workflow {}", workflowId); Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); Path csvFile = Paths.get(workflowStatesFolder.toString(), SERIALIZED_STATES_FILENAME); - if ( !csvFile.toFile().exists() ) { + if (!csvFile.toFile().exists()) { LOGGER.info("Serializing network data of workflow {}", workflowId); - try(FileWriter fileWriter = new FileWriter(csvFile.toFile()); - CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) { + try (FileWriter fileWriter = new FileWriter(csvFile.toFile()); + CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) { boolean printHeaders = true; - for(Integer stateId : listStoredStates(workflowId)) { + for (Integer stateId : listStoredStates(workflowId)) { Network network = getState(workflowId, stateId); - if ( network != null ) { + if (network != null) { Map networkValues = IIDM2DB.extractCimValues(network, new IIDM2DB.Config(network.getId(), true, true)).getSingleValueMap(); - if ( printHeaders ) { + if (printHeaders) { List headers = new ArrayList<>(networkValues.size()); for (HistoDbAttributeId attrId : networkValues.keySet()) { headers.add(attrId.toString()); @@ -1218,8 +1192,8 @@ public void exportStates(String workflowId, Path file) { ArrayList headersList = new ArrayList<>(); headersList.add("workflow"); headersList.add("state"); - headersList.addAll(Arrays.asList(headers.toArray(new String[] {}))); - csvWriter.writeHeader(headersList.toArray(new String[] {})); + headersList.addAll(Arrays.asList(headers.toArray(new String[]{}))); + csvWriter.writeHeader(headersList.toArray(new String[]{})); printHeaders = false; } ArrayList valuesList = new ArrayList<>(); @@ -1238,13 +1212,13 @@ public void exportStates(String workflowId, Path file) { } catch (IOException e) { throw new RuntimeException(e); } - } else + } else LOGGER.error("No stored states for workflow {}", workflowId); } @Override public void storeViolations(String workflowId, Integer stateId, OnlineStep step, List violations) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Storing violations for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); storeViolations(workflowId, STORED_VIOLATIONS_MAP_PREFIX + step.name() + "_" + stateIdStr, violations); LOGGER.info("Storing violations metadata for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); @@ -1256,14 +1230,14 @@ private void storeViolations(String workflowId, String mapName, List metricsMap = wfMVStore.openMap(mapName, mapBuilder); int violationIndex = 0; - for ( LimitViolation limitViolation : violations ) { + for (LimitViolation limitViolation : violations) { String violationId = limitViolation.getSubject().getId() + "_" + violationIndex; metricsMap.put(violationId, OnlineDbMVStoreUtils.limitViolationToJson(limitViolation)); violationIndex++; } wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing violations for wf " + workflowId + " in map " + mapName + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing violations for wf " + workflowId + " in map " + mapName + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1285,11 +1259,11 @@ private void storeViolationsMetadata(String workflowId, String stateId, OnlineSt storedStatesMap.putIfAbsent(stateId, "1"); // save info about stored steps per state MVMap stepStepMap = wfMVStore.openMap(stateId + STORED_VIOLATIONS_STEPS_MAP_SUFFIX, mapBuilder); - stepStepMap.putIfAbsent(step.name(), ""); + stepStepMap.putIfAbsent(step.name(), ""); wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing violations metadata for wf " + workflowId + ", step "+ step.name() + ", state " + stateId + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing violations metadata for wf " + workflowId + ", step " + step.name() + ", state " + stateId + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1297,31 +1271,31 @@ private void storeViolationsMetadata(String workflowId, String stateId, OnlineSt @Override public List getViolations(String workflowId, Integer stateId, OnlineStep step) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Getting violations for wf {}, step {} and state {}", workflowId, step.name(), stateIdStr); return getStoredViolations(workflowId, STORED_VIOLATIONS_MAP_PREFIX + step.name() + "_" + stateIdStr, null); } private List getStoredViolations(String workflowId, String mapName, Network network) { - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.getMapNames().contains(mapName) ) { - if ( network==null ) + if (wfMVStore.getMapNames().contains(mapName)) { + if (network == null) // load network: used to get equipment from equipment id, when creating limit violations network = getState(workflowId, 0); - if ( network!=null ) { + if (network != null) { List violations = new ArrayList(); Map storedMap = wfMVStore.openMap(mapName, mapBuilder); - for ( String violationId : storedMap.keySet() ) { + for (String violationId : storedMap.keySet()) { LimitViolation violation = OnlineDbMVStoreUtils.jsonToLimitViolation(storedMap.get(violationId), network); - if ( violation != null ) + if (violation != null) violations.add(violation); } return violations; } else { LOGGER.warn("No network data (states) stored for wf {}, cannot get violations", workflowId); return null; - } + } } else { LOGGER.warn("No map {} in wf {}", mapName, workflowId); return null; @@ -1336,20 +1310,20 @@ private List getStoredViolations(String workflowId, String mapNa public Map> getViolations(String workflowId, Integer stateId) { String stateIdStr = Integer.toString(stateId); LOGGER.info("Getting violations for wf {} and state {}", workflowId, stateIdStr); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedStatesMap = wfMVStore.openMap(STORED_VIOLATIONS_STATES_MAP_NAME, mapBuilder); - if ( storedStatesMap.containsKey(stateIdStr) ) { + if (storedStatesMap.containsKey(stateIdStr)) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { + if (network != null) { Map> stateViolations = new HashMap>(); MVMap storedStepsMap = wfMVStore.openMap(stateIdStr + STORED_VIOLATIONS_STEPS_MAP_SUFFIX, mapBuilder); - for ( String stepName : storedStepsMap.keySet() ) { + for (String stepName : storedStepsMap.keySet()) { OnlineStep step = OnlineStep.valueOf(stepName); List violations = getStoredViolations(workflowId, STORED_VIOLATIONS_MAP_PREFIX + step.name() + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) stateViolations.put(step, violations); } return stateViolations; @@ -1371,19 +1345,19 @@ public Map> getViolations(String workflowId, In @Override public Map> getViolations(String workflowId, OnlineStep step) { LOGGER.info("Getting violations for wf {} and step {}", workflowId, step.name()); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedStepsMap = wfMVStore.openMap(STORED_VIOLATIONS_STEPS_MAP_NAME, mapBuilder); - if ( storedStepsMap.containsKey(step.name()) ) { + if (storedStepsMap.containsKey(step.name())) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { + if (network != null) { Map> stepViolations = new HashMap>(); MVMap storedStatesMap = wfMVStore.openMap(step.name() + STORED_VIOLATIONS_STATES_MAP_SUFFIX, mapBuilder); - for ( String stateId : storedStatesMap.keySet() ) { + for (String stateId : storedStatesMap.keySet()) { List violations = getStoredViolations(workflowId, STORED_VIOLATIONS_MAP_PREFIX + step.name() + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) stepViolations.put(Integer.valueOf(stateId), violations); } return stepViolations; @@ -1404,24 +1378,24 @@ public Map> getViolations(String workflowId, Onlin @Override public Map>> getViolations(String workflowId) { LOGGER.info("Getting violations for wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedStatesMap = wfMVStore.openMap(STORED_VIOLATIONS_STATES_MAP_NAME, mapBuilder); - if ( !storedStatesMap.isEmpty() ) { + if (!storedStatesMap.isEmpty()) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { - Map>> wfViolations = new HashMap>>(); - for(String stateIdStr : storedStatesMap.keySet()) { + if (network != null) { + Map>> wfViolations = new HashMap>>(); + for (String stateIdStr : storedStatesMap.keySet()) { Integer stateId = Integer.parseInt(stateIdStr); Map> stateViolations = new HashMap>(); MVMap storedStepsMap = wfMVStore.openMap(stateIdStr + STORED_VIOLATIONS_STEPS_MAP_SUFFIX, mapBuilder); - if ( !storedStepsMap.isEmpty() ) { - for ( String stepName : storedStepsMap.keySet() ) { + if (!storedStepsMap.isEmpty()) { + for (String stepName : storedStepsMap.keySet()) { OnlineStep step = OnlineStep.valueOf(stepName); List violations = getStoredViolations(workflowId, STORED_VIOLATIONS_MAP_PREFIX + step.name() + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) stateViolations.put(step, violations); } wfViolations.put(stateId, stateViolations); @@ -1443,9 +1417,9 @@ public Map>> getViolations(String } @Override - public void storePostContingencyViolations(String workflowId, Integer stateId, String contingencyId, - boolean loadflowConverge, List violations) { - String stateIdStr=String.valueOf(stateId); + public void storePostContingencyViolations(String workflowId, Integer stateId, String contingencyId, + boolean loadflowConverge, List violations) { + String stateIdStr = String.valueOf(stateId); LOGGER.info("Storing post contingency violations for wf {}, contingency {} and state {}", workflowId, contingencyId, stateIdStr); storeViolations(workflowId, STORED_PC_VIOLATIONS_MAP_PREFIX + contingencyId + "_" + stateIdStr, violations); LOGGER.info("Storing post contingency violations metadata for wf {}, contingency {} and state {}", workflowId, contingencyId, stateIdStr); @@ -1470,18 +1444,18 @@ private synchronized void storePCViolationsMetadata(String workflowId, String st storedStatesMap.putIfAbsent(stateId, "1"); // save info about stored contingencies per state MVMap stateContingencyMap = wfMVStore.openMap(stateId + STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX, mapBuilder); - LOGGER.info("storePCViolationsMetadata: Adding contingency {} to map {} for workflow {}, state {}", + LOGGER.info("storePCViolationsMetadata: Adding contingency {} to map {} for workflow {}, state {}", contingencyId, - stateId + STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX, - workflowId, + stateId + STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX, + workflowId, stateId - ); + ); //stateContingencyMap.putIfAbsent(contingencyId, ""); stateContingencyMap.put(contingencyId, ""); wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing pc violations metadata for wf " + workflowId + ", contingency "+ contingencyId + ", state " + stateId + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing pc violations metadata for wf " + workflowId + ", contingency " + contingencyId + ", state " + stateId + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1503,18 +1477,18 @@ private synchronized void storePSLoadflowConvergence(String workflowId, String s storedStatesMap.putIfAbsent(stateId, "1"); // save info about stored contingencies per state MVMap stateContingencyMap = wfMVStore.openMap(stateId + STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX, mapBuilder); - LOGGER.info("storePSLoadflowConvergence: Adding contingency {} to map {} for workflow {}, state {}", + LOGGER.info("storePSLoadflowConvergence: Adding contingency {} to map {} for workflow {}, state {}", contingencyId, - stateId + STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX, - workflowId, + stateId + STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX, + workflowId, stateId - ); + ); //stateContingencyMap.putIfAbsent(contingencyId, Boolean.toString(loadflowConverge)); stateContingencyMap.put(contingencyId, Boolean.toString(loadflowConverge)); wfMVStore.commit(); - } catch(Throwable e) { - String errorMessage = "Error storing pc loadflow convergence for wf " + workflowId + ", contingency "+ contingencyId + ", state " + stateId + ": " + e.getMessage(); + } catch (Throwable e) { + String errorMessage = "Error storing pc loadflow convergence for wf " + workflowId + ", contingency " + contingencyId + ", state " + stateId + ": " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1522,7 +1496,7 @@ private synchronized void storePSLoadflowConvergence(String workflowId, String s @Override public List getPostContingencyViolations(String workflowId, Integer stateId, String contingencyId) { - String stateIdStr=String.valueOf(stateId); + String stateIdStr = String.valueOf(stateId); LOGGER.info("Getting post contingency violations for wf {}, contingency {} and state {}", workflowId, contingencyId, stateIdStr); return getStoredViolations(workflowId, STORED_PC_VIOLATIONS_MAP_PREFIX + contingencyId + "_" + stateIdStr, null); } @@ -1531,19 +1505,19 @@ public List getPostContingencyViolations(String workflowId, Inte public Map> getPostContingencyViolations(String workflowId, Integer stateId) { String stateIdStr = Integer.toString(stateId); LOGGER.info("Getting post contingency violations for wf {} and state {}", workflowId, stateIdStr); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedStatesMap = wfMVStore.openMap(STORED_PC_VIOLATIONS_STATES_MAP_NAME, mapBuilder); - if ( storedStatesMap.containsKey(stateIdStr) ) { + if (storedStatesMap.containsKey(stateIdStr)) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { + if (network != null) { Map> stateViolations = new HashMap>(); MVMap storedContingenciesMap = wfMVStore.openMap(stateIdStr + STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX, mapBuilder); - for ( String contingencyId : storedContingenciesMap.keySet() ) { + for (String contingencyId : storedContingenciesMap.keySet()) { List violations = getStoredViolations(workflowId, STORED_PC_VIOLATIONS_MAP_PREFIX + contingencyId + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) stateViolations.put(contingencyId, violations); } return stateViolations; @@ -1564,19 +1538,19 @@ public Map> getPostContingencyViolations(String wor @Override public Map> getPostContingencyViolations(String workflowId, String contingencyId) { LOGGER.info("Getting post contingency violations for wf {} and contingency {}", workflowId, contingencyId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedContingenciesMap = wfMVStore.openMap(STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_NAME, mapBuilder); - if ( storedContingenciesMap.containsKey(contingencyId) ) { + if (storedContingenciesMap.containsKey(contingencyId)) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { + if (network != null) { Map> contingencyViolations = new HashMap>(); MVMap storedStatesMap = wfMVStore.openMap(contingencyId + STORED_PC_VIOLATIONS_STATES_MAP_SUFFIX, mapBuilder); - for ( String stateId : storedStatesMap.keySet() ) { + for (String stateId : storedStatesMap.keySet()) { List violations = getStoredViolations(workflowId, STORED_PC_VIOLATIONS_MAP_PREFIX + contingencyId + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) contingencyViolations.put(Integer.valueOf(stateId), violations); } return contingencyViolations; @@ -1597,23 +1571,23 @@ public Map> getPostContingencyViolations(String wo @Override public Map>> getPostContingencyViolations(String workflowId) { LOGGER.info("Getting post contingency violations for wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); // check if there are stored violations Map storedStatesMap = wfMVStore.openMap(STORED_PC_VIOLATIONS_STATES_MAP_NAME, mapBuilder); - if ( !storedStatesMap.isEmpty() ) { + if (!storedStatesMap.isEmpty()) { // load network: used to get equipment from equipment id, when creating limit violations Network network = getState(workflowId, 0); - if ( network!=null ) { - Map>> wfViolations = new HashMap>>(); - for(String stateIdStr : storedStatesMap.keySet()) { + if (network != null) { + Map>> wfViolations = new HashMap>>(); + for (String stateIdStr : storedStatesMap.keySet()) { Integer stateId = Integer.parseInt(stateIdStr); Map> stateViolations = new HashMap>(); MVMap storedContingenciesMap = wfMVStore.openMap(stateIdStr + STORED_PC_VIOLATIONS_CONTINGENCIES_MAP_SUFFIX, mapBuilder); - if ( !storedContingenciesMap.isEmpty() ) { - for ( String contingencyId : storedContingenciesMap.keySet() ) { + if (!storedContingenciesMap.isEmpty()) { + for (String contingencyId : storedContingenciesMap.keySet()) { List violations = getStoredViolations(workflowId, STORED_PC_VIOLATIONS_MAP_PREFIX + contingencyId + "_" + stateId, network); - if ( violations!=null ) + if (violations != null) stateViolations.put(contingencyId, violations); } wfViolations.put(stateId, stateViolations); @@ -1637,14 +1611,14 @@ public Map>> getPostContingencyViolati @Override public Map getPostContingencyLoadflowConvergence(String workflowId, Integer stateId) { String stateIdStr = Integer.toString(stateId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { Map loadflowConvergence = new HashMap(); MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_STATES_MAP_NAME) ) { + if (wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_STATES_MAP_NAME)) { Map storedStatesMap = wfMVStore.openMap(STORED_PC_LOADFLOW_STATES_MAP_NAME, mapBuilder); - if ( storedStatesMap.containsKey(stateIdStr)) { + if (storedStatesMap.containsKey(stateIdStr)) { MVMap stateContingencyMap = wfMVStore.openMap(stateIdStr + STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX, mapBuilder); - for ( String contingencyId : stateContingencyMap.keySet() ) { + for (String contingencyId : stateContingencyMap.keySet()) { loadflowConvergence.put(contingencyId, Boolean.valueOf(stateContingencyMap.get(contingencyId))); } return loadflowConvergence; @@ -1664,14 +1638,14 @@ public Map getPostContingencyLoadflowConvergence(String workflo @Override public Map getPostContingencyLoadflowConvergence(String workflowId, String contingencyId) { - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { Map loadflowConvergence = new HashMap(); MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_CONTINGENCIES_MAP_NAME) ) { + if (wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_CONTINGENCIES_MAP_NAME)) { Map storedStatesMap = wfMVStore.openMap(STORED_PC_LOADFLOW_CONTINGENCIES_MAP_NAME, mapBuilder); - if ( storedStatesMap.containsKey(contingencyId)) { + if (storedStatesMap.containsKey(contingencyId)) { MVMap contingencyStateMap = wfMVStore.openMap(contingencyId + STORED_PC_LOADFLOW_STATES_MAP_SUFFIX, mapBuilder); - for ( String stateId : contingencyStateMap.keySet() ) { + for (String stateId : contingencyStateMap.keySet()) { loadflowConvergence.put(Integer.valueOf(stateId), Boolean.valueOf(contingencyStateMap.get(stateId))); } return loadflowConvergence; @@ -1691,15 +1665,15 @@ public Map getPostContingencyLoadflowConvergence(String workfl @Override public Map> getPostContingencyLoadflowConvergence(String workflowId) { - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { Map> loadflowConvergence = new HashMap>(); MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_STATES_MAP_NAME) ) { + if (wfMVStore.getMapNames().contains(STORED_PC_LOADFLOW_STATES_MAP_NAME)) { Map storedStatesMap = wfMVStore.openMap(STORED_PC_LOADFLOW_STATES_MAP_NAME, mapBuilder); - for ( String stateId : storedStatesMap.keySet() ) { + for (String stateId : storedStatesMap.keySet()) { MVMap stateContingencyMap = wfMVStore.openMap(stateId + STORED_PC_LOADFLOW_CONTINGENCIES_MAP_SUFFIX, mapBuilder); HashMap stateLoadflowConvergence = new HashMap(); - for ( String contingencyId : stateContingencyMap.keySet() ) { + for (String contingencyId : stateContingencyMap.keySet()) { stateLoadflowConvergence.put(contingencyId, Boolean.valueOf(stateContingencyMap.get(contingencyId))); } loadflowConvergence.put(Integer.valueOf(stateId), stateLoadflowConvergence); @@ -1723,13 +1697,13 @@ public void storeWcaRulesResults(String workflowId, OnlineWorkflowRulesResults r LOGGER.info("Storing results of wca rules for workflow {}", workflowId); MVStore wfMVStore = getStore(workflowId); // check if the results for this wf have already been stored - if ( wfMVStore.hasMap(STORED_WCA_RULES_RESULTS_MAP_NAME) ) + if (wfMVStore.hasMap(STORED_WCA_RULES_RESULTS_MAP_NAME)) removeWfWcaRulesResults(workflowId, wfMVStore); MVMap storedRulesResultsMap = wfMVStore.openMap(STORED_WCA_RULES_RESULTS_MAP_NAME, mapBuilder); // store time horizon storedRulesResultsMap.put(STORED_RESULTS_TIMEHORIZON_KEY, results.getTimeHorizon().getName()); // store contingencies with security rules results - storedRulesResultsMap.put(STORED_RULES_RESULTS_CONTINGENCIES_WITH_RULES_KEY, + storedRulesResultsMap.put(STORED_RULES_RESULTS_CONTINGENCIES_WITH_RULES_KEY, OnlineDbMVStoreUtils.contingenciesIdsToJson(results.getContingenciesWithSecurityRulesResults())); // store wca rules results for contingencies for (String contingencyId : results.getContingenciesWithSecurityRulesResults()) { @@ -1781,13 +1755,13 @@ private void removeWfWcaRulesResults(String workflowId, MVStore wfMVStore) { public OnlineWorkflowRulesResults getWcaRulesResults(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting wca rules results of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(STORED_WCA_RULES_RESULTS_MAP_NAME) ) { + if (wfMVStore.hasMap(STORED_WCA_RULES_RESULTS_MAP_NAME)) { MVMap storedRulesResultsMap = wfMVStore.openMap(STORED_WCA_RULES_RESULTS_MAP_NAME, mapBuilder); // create workflow rules results OnlineWorkflowRulesResultsImpl wfRulesResults = new OnlineWorkflowRulesResultsImpl( - workflowId, + workflowId, TimeHorizon.valueOf(storedRulesResultsMap.get(STORED_RESULTS_TIMEHORIZON_KEY))); // add contingencies with rules results Collection contingenciesWithRules = OnlineDbMVStoreUtils.jsonToContingenciesIds( @@ -1801,10 +1775,10 @@ public OnlineWorkflowRulesResults getWcaRulesResults(String workflowId) { Map stateResults = OnlineDbMVStoreUtils.jsonToIndexesData(storedStateResultsMap.get(stateId)); StateStatus stateStatus = StateStatus.valueOf(storedStateStatusMap.get(stateId)); boolean rulesAvailable = true; - if ( storedStateAvailableRulesMap.containsKey(stateId) ) + if (storedStateAvailableRulesMap.containsKey(stateId)) rulesAvailable = Boolean.parseBoolean(storedStateAvailableRulesMap.get(stateId)); List invalidRules = new ArrayList(); - if ( storedStateInvalidRulesMap.containsKey(stateId) ) + if (storedStateInvalidRulesMap.containsKey(stateId)) invalidRules.addAll(OnlineDbMVStoreUtils.jsonToIndexesTypes(storedStateInvalidRulesMap.get(stateId))); wfRulesResults.addContingencyWithSecurityRulesResults(contingencyId, Integer.parseInt(stateId), stateStatus, stateResults, rulesAvailable, invalidRules); @@ -1839,11 +1813,11 @@ private boolean workflowStatesFolderExists(String workflowId) { private Path getWorkflowStatesFolder(String workflowId) { Path workflowStatesFolder = Paths.get(config.getOnlineDbDir().toString(), STORED_WORKFLOW_STATES_FOLDER_PREFIX + workflowId); - if ( !workflowStatesFolderExists(workflowId) ) + if (!workflowStatesFolderExists(workflowId)) try { Files.createDirectories(workflowStatesFolder); } catch (IOException e) { - String errorMessage = "online db: folder " + workflowStatesFolder + " for workflow " + workflowId + " cannot be created: " + e.getMessage(); + String errorMessage = "online db: folder " + workflowStatesFolder + " for workflow " + workflowId + " cannot be created: " + e.getMessage(); LOGGER.error(errorMessage); throw new RuntimeException(errorMessage); } @@ -1859,7 +1833,7 @@ private Path getWorkflowStatesFolder(String workflowId) { public List getStoredMaps(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting stored maps of wf {}", workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); return new ArrayList(wfMVStore.getMapNames()); } else { @@ -1873,7 +1847,7 @@ public String exportStoredMapsList(String workflowId) { LOGGER.info("Exporting list of stored maps of wf {}", workflowId); StringBuffer storedMapList = new StringBuffer(); List storedMaps = getStoredMaps(workflowId); - if ( storedMaps != null ) { + if (storedMaps != null) { for (String map : storedMaps) { storedMapList.append(map + "\n"); } @@ -1885,9 +1859,9 @@ public String exportStoredMapContent(String workflowId, String mapName) { Objects.requireNonNull(workflowId, "workflow id is null"); Objects.requireNonNull(mapName, "map name id is null"); LOGGER.info("Exporting content of stored map {} of wf {}", mapName, workflowId); - if ( isWorkflowStored(workflowId) ) { + if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); - if ( wfMVStore.hasMap(mapName) ) { + if (wfMVStore.hasMap(mapName)) { StringBuffer storedMapContent = new StringBuffer(); storedMapContent.append("Map " + mapName + "\n"); MVMap storedMap = wfMVStore.openMap(mapName, mapBuilder); @@ -1910,7 +1884,7 @@ public String exportStoredMapsContent(String workflowId) { LOGGER.info("Exporting content of stored maps of wf {}", workflowId); StringBuffer storedMapList = new StringBuffer(); List storedMaps = getStoredMaps(workflowId); - if ( storedMaps != null ) { + if (storedMaps != null) { for (String map : storedMaps) { storedMapList.append(exportStoredMapContent(workflowId, map) + "\n"); } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java index c8f49611..86ce3d7c 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/ListOnlineWorkflowsTool.java @@ -7,16 +7,14 @@ */ package eu.itesla_project.online.tools; -import java.io.FileWriter; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import com.google.auto.service.AutoService; +import eu.itesla_project.commons.tools.Command; +import eu.itesla_project.commons.tools.Tool; +import eu.itesla_project.modules.online.OnlineConfig; +import eu.itesla_project.modules.online.OnlineDb; +import eu.itesla_project.modules.online.OnlineWorkflowDetails; +import eu.itesla_project.modules.online.OnlineWorkflowParameters; import net.sf.json.JSONSerializer; - import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; @@ -28,17 +26,15 @@ import org.nocrala.tools.texttablefmt.CellStyle; import org.nocrala.tools.texttablefmt.Table; -import com.google.auto.service.AutoService; - -import eu.itesla_project.commons.tools.Command; -import eu.itesla_project.commons.tools.Tool; -import eu.itesla_project.modules.online.OnlineConfig; -import eu.itesla_project.modules.online.OnlineDb; -import eu.itesla_project.modules.online.OnlineWorkflowDetails; -import eu.itesla_project.modules.online.OnlineWorkflowParameters; +import java.io.FileWriter; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** - * * @author Quinary */ @AutoService(Tool.class) @@ -107,29 +103,29 @@ public void run(CommandLine line) throws Exception { OnlineConfig config = OnlineConfig.load(); OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create(); List workflows = null; - if ( line.hasOption("basecase") ) { + if (line.hasOption("basecase")) { DateTime basecaseDate = DateTime.parse(line.getOptionValue("basecase")); workflows = onlinedb.listWorkflows(basecaseDate); - } else if ( line.hasOption("basecases-interval") ) { + } else if (line.hasOption("basecases-interval")) { Interval basecasesInterval = Interval.parse(line.getOptionValue("basecases-interval")); workflows = onlinedb.listWorkflows(basecasesInterval); - } else if ( line.hasOption("workflow") ) { + } else if (line.hasOption("workflow")) { String workflowId = line.getOptionValue("workflow"); OnlineWorkflowDetails workflowDetails = onlinedb.getWorkflowDetails(workflowId); workflows = new ArrayList(); - if ( workflowDetails != null ) + if (workflowDetails != null) workflows.add(workflowDetails); } else - workflows = onlinedb.listWorkflows(); + workflows = onlinedb.listWorkflows(); boolean printParameters = line.hasOption("parameters"); DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"); Table table = new Table(2, BorderStyle.CLASSIC_WIDE); - if ( printParameters ) + if (printParameters) table = new Table(3, BorderStyle.CLASSIC_WIDE); - List> jsonData = new ArrayList>(); + List> jsonData = new ArrayList>(); table.addCell("ID", new CellStyle(CellStyle.HorizontalAlign.center)); table.addCell("Date", new CellStyle(CellStyle.HorizontalAlign.center)); - if ( printParameters ) + if (printParameters) table.addCell("Parameters", new CellStyle(CellStyle.HorizontalAlign.center)); for (OnlineWorkflowDetails workflow : workflows) { Map wfJsonData = new HashMap(); @@ -137,71 +133,71 @@ public void run(CommandLine line) throws Exception { wfJsonData.put("id", workflow.getWorkflowId()); table.addCell(formatter.print(workflow.getWorkflowDate())); wfJsonData.put("date", formatter.print(workflow.getWorkflowDate())); - if ( printParameters ) { + if (printParameters) { OnlineWorkflowParameters parameters = onlinedb.getWorkflowParameters(workflow.getWorkflowId()); - if ( parameters != null ) { - table.addCell("Basecase = "+parameters.getBaseCaseDate().toString()); + if (parameters != null) { + table.addCell("Basecase = " + parameters.getBaseCaseDate().toString()); wfJsonData.put(OnlineWorkflowCommand.BASE_CASE, parameters.getBaseCaseDate().toString()); table.addCell(" "); table.addCell(" "); - table.addCell("Time Horizon = "+parameters.getTimeHorizon().getName()); + table.addCell("Time Horizon = " + parameters.getTimeHorizon().getName()); wfJsonData.put(OnlineWorkflowCommand.TIME_HORIZON, parameters.getTimeHorizon().getName()); table.addCell(" "); table.addCell(" "); - table.addCell("FE Analysis Id = "+parameters.getFeAnalysisId()); + table.addCell("FE Analysis Id = " + parameters.getFeAnalysisId()); wfJsonData.put(OnlineWorkflowCommand.FEANALYSIS_ID, parameters.getFeAnalysisId()); table.addCell(" "); table.addCell(" "); - table.addCell("Offline Workflow Id = "+parameters.getOfflineWorkflowId()); + table.addCell("Offline Workflow Id = " + parameters.getOfflineWorkflowId()); wfJsonData.put(OnlineWorkflowCommand.WORKFLOW_ID, parameters.getOfflineWorkflowId()); table.addCell(" "); table.addCell(" "); - table.addCell("Historical Interval = "+parameters.getHistoInterval().toString()); + table.addCell("Historical Interval = " + parameters.getHistoInterval().toString()); wfJsonData.put(OnlineWorkflowCommand.HISTODB_INTERVAL, parameters.getHistoInterval().toString()); table.addCell(" "); table.addCell(" "); - table.addCell("States = "+Integer.toString(parameters.getStates())); + table.addCell("States = " + Integer.toString(parameters.getStates())); wfJsonData.put(OnlineWorkflowCommand.STATES, Integer.toString(parameters.getStates())); table.addCell(" "); table.addCell(" "); - table.addCell("Rules Purity Threshold = "+Double.toString(parameters.getRulesPurityThreshold())); + table.addCell("Rules Purity Threshold = " + Double.toString(parameters.getRulesPurityThreshold())); wfJsonData.put(OnlineWorkflowCommand.RULES_PURITY, Double.toString(parameters.getRulesPurityThreshold())); table.addCell(" "); table.addCell(" "); - table.addCell("Store States = "+Boolean.toString(parameters.storeStates())); + table.addCell("Store States = " + Boolean.toString(parameters.storeStates())); wfJsonData.put(OnlineWorkflowCommand.STORE_STATES, Boolean.toString(parameters.storeStates())); table.addCell(" "); table.addCell(" "); - table.addCell("Analyse Basecase = "+Boolean.toString(parameters.analyseBasecase())); + table.addCell("Analyse Basecase = " + Boolean.toString(parameters.analyseBasecase())); wfJsonData.put(OnlineWorkflowCommand.ANALYSE_BASECASE, Boolean.toString(parameters.analyseBasecase())); table.addCell(" "); table.addCell(" "); - table.addCell("Validation = "+Boolean.toString(parameters.validation())); + table.addCell("Validation = " + Boolean.toString(parameters.validation())); wfJsonData.put(OnlineWorkflowCommand.VALIDATION, Boolean.toString(parameters.validation())); table.addCell(" "); table.addCell(" "); - String securityRulesString = parameters.getSecurityIndexes()==null ? "ALL" : parameters.getSecurityIndexes().toString(); - table.addCell("Security Rules = "+securityRulesString); + String securityRulesString = parameters.getSecurityIndexes() == null ? "ALL" : parameters.getSecurityIndexes().toString(); + table.addCell("Security Rules = " + securityRulesString); wfJsonData.put(OnlineWorkflowCommand.SECURITY_INDEXES, securityRulesString); table.addCell(" "); table.addCell(" "); - table.addCell("Case Type = "+parameters.getCaseType()); + table.addCell("Case Type = " + parameters.getCaseType()); wfJsonData.put(OnlineWorkflowCommand.CASE_TYPE, parameters.getCaseType().name()); table.addCell(" "); table.addCell(" "); - table.addCell("Countries = "+parameters.getCountries().toString()); + table.addCell("Countries = " + parameters.getCountries().toString()); wfJsonData.put(OnlineWorkflowCommand.COUNTRIES, parameters.getCountries().toString()); table.addCell(" "); table.addCell(" "); - table.addCell("Limits Reduction = "+Float.toString(parameters.getLimitReduction())); + table.addCell("Limits Reduction = " + Float.toString(parameters.getLimitReduction())); wfJsonData.put(OnlineWorkflowCommand.LIMIT_REDUCTION, Float.toString(parameters.getLimitReduction())); table.addCell(" "); table.addCell(" "); - table.addCell("Handle Violations in N = "+Boolean.toString(parameters.isHandleViolationsInN())); + table.addCell("Handle Violations in N = " + Boolean.toString(parameters.isHandleViolationsInN())); wfJsonData.put(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N, Boolean.toString(parameters.isHandleViolationsInN())); table.addCell(" "); table.addCell(" "); - table.addCell("Constrain Margin = "+Float.toString(parameters.getConstraintMargin())); + table.addCell("Constrain Margin = " + Float.toString(parameters.getConstraintMargin())); wfJsonData.put(OnlineWorkflowCommand.CONSTRAINT_MARGIN, Float.toString(parameters.getConstraintMargin())); if (parameters.getCaseFile() != null) { table.addCell(" "); @@ -215,7 +211,7 @@ public void run(CommandLine line) throws Exception { } jsonData.add(wfJsonData); } - if ( line.hasOption("json") ) { + if (line.hasOption("json")) { Path jsonFile = Paths.get(line.getOptionValue("json")); try (FileWriter jsonFileWriter = new FileWriter(jsonFile.toFile())) { //JSONSerializer.toJSON(jsonData).write(jsonFileWriter); diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java index b3e06715..3498843b 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java @@ -13,29 +13,28 @@ import org.apache.commons.cli.Options; /** - * * @author Quinary */ public class OnlineWorkflowCommand implements Command { - public static final OnlineWorkflowCommand INSTANCE =new OnlineWorkflowCommand(); + public static final OnlineWorkflowCommand INSTANCE = new OnlineWorkflowCommand(); - public static final String HOST="host"; - public static final String PORT="port"; - public static final String START_CMD="start-workflow"; - public static final String SHUTDOWN_CMD="shutdown"; - public static final String BASE_CASE="base-case"; + public static final String HOST = "host"; + public static final String PORT = "port"; + public static final String START_CMD = "start-workflow"; + public static final String SHUTDOWN_CMD = "shutdown"; + public static final String BASE_CASE = "base-case"; public static final String TIME_HORIZON = "time-horizon"; public static final String STATES = "states"; public static final String WORKFLOW_ID = "workflow"; public static final String HISTODB_INTERVAL = "histodb-interval"; public static final String THREADS = "threads"; - public static final String FEANALYSIS_ID ="fe-analysis-id"; - public static final String RULES_PURITY ="rules-purity"; - public static final String STORE_STATES ="store-states"; - public static final String ANALYSE_BASECASE ="analyse-basecase"; - public static final String VALIDATION ="validation"; - public static final String SECURITY_INDEXES ="security-indexes"; + public static final String FEANALYSIS_ID = "fe-analysis-id"; + public static final String RULES_PURITY = "rules-purity"; + public static final String STORE_STATES = "store-states"; + public static final String ANALYSE_BASECASE = "analyse-basecase"; + public static final String VALIDATION = "validation"; + public static final String SECURITY_INDEXES = "security-indexes"; public static final String BASECASES_INTERVAL = "basecases-interval"; public static final String CASE_TYPE = "case-type"; public static final String COUNTRIES = "countries"; @@ -43,7 +42,7 @@ public class OnlineWorkflowCommand implements Command { public static final String LIMIT_REDUCTION = "limits-reduction"; public static final String HANDLE_VIOLATION_IN_N = "handle-violations"; public static final String CONSTRAINT_MARGIN = "constraint-margin"; - public static final String CASE_FILE="case-file"; + public static final String CASE_FILE = "case-file"; @Override public String getName() { @@ -56,7 +55,7 @@ public String getTheme() { } @Override - public String getDescription() { + public String getDescription() { return "Online workflow application control"; } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java index ce06c87a..6717fc66 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java @@ -8,18 +8,18 @@ package eu.itesla_project.online.tools; import com.google.auto.service.AutoService; +import eu.itesla_project.cases.CaseRepository; +import eu.itesla_project.cases.CaseType; import eu.itesla_project.commons.tools.Command; import eu.itesla_project.commons.tools.Tool; import eu.itesla_project.computation.local.LocalComputationManager; import eu.itesla_project.iidm.network.Country; -import eu.itesla_project.cases.CaseRepository; -import eu.itesla_project.cases.CaseType; import eu.itesla_project.modules.online.OnlineConfig; import eu.itesla_project.modules.online.OnlineWorkflowParameters; import eu.itesla_project.modules.online.TimeHorizon; -import eu.itesla_project.simulation.securityindexes.SecurityIndexType; import eu.itesla_project.online.LocalOnlineApplicationMBean; import eu.itesla_project.online.OnlineWorkflowStartParameters; +import eu.itesla_project.simulation.securityindexes.SecurityIndexType; import org.apache.commons.cli.CommandLine; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -34,7 +34,6 @@ import java.util.stream.Collectors; /** - * * @author Quinary */ @AutoService(Tool.class) @@ -48,21 +47,21 @@ public Command getCommand() { @Override public void run(CommandLine line) throws Exception { - OnlineWorkflowStartParameters startconfig=OnlineWorkflowStartParameters.loadDefault(); + OnlineWorkflowStartParameters startconfig = OnlineWorkflowStartParameters.loadDefault(); - String host=line.getOptionValue(OnlineWorkflowCommand.HOST); - String port=line.getOptionValue(OnlineWorkflowCommand.PORT); - String threads=line.getOptionValue(OnlineWorkflowCommand.THREADS); - if(host!=null) + String host = line.getOptionValue(OnlineWorkflowCommand.HOST); + String port = line.getOptionValue(OnlineWorkflowCommand.PORT); + String threads = line.getOptionValue(OnlineWorkflowCommand.THREADS); + if (host != null) startconfig.setJmxHost(host); - if(port!=null) + if (port != null) startconfig.setJmxPort(Integer.valueOf(port)); - if(threads!=null) + if (threads != null) startconfig.setThreads(Integer.valueOf(threads)); - OnlineWorkflowParameters params=OnlineWorkflowParameters.loadDefault(); + OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); - if(line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) + if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); Set countries = null; @@ -70,65 +69,65 @@ public void run(CommandLine line) throws Exception { countries = Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) .map(Country::valueOf) .collect(Collectors.toSet()); - params.setCountries(countries);; + params.setCountries(countries); } - Set baseCasesSet=null; - if(line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { - Interval basecasesInterval=Interval.parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); - OnlineConfig oConfig=OnlineConfig.load(); - CaseRepository caseRepo=oConfig.getCaseRepositoryFactoryClass().newInstance().create(new LocalComputationManager()); - baseCasesSet=caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); - System.out.println("Base cases available for interval " + basecasesInterval.toString() ); + Set baseCasesSet = null; + if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { + Interval basecasesInterval = Interval.parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); + OnlineConfig oConfig = OnlineConfig.load(); + CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance().create(new LocalComputationManager()); + baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); + System.out.println("Base cases available for interval " + basecasesInterval.toString()); baseCasesSet.forEach(x -> { System.out.println(" " + x); }); } - if (baseCasesSet==null) { - baseCasesSet=new HashSet(); - String base=line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); - if (base!=null) { + if (baseCasesSet == null) { + baseCasesSet = new HashSet(); + String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); + if (base != null) { baseCasesSet.add(DateTime.parse(base)); } else { baseCasesSet.add(params.getBaseCaseDate()); } } - if(line.hasOption(OnlineWorkflowCommand.CASE_FILE)) + if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); - String histo=line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); - if(histo!=null) + String histo = line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); + if (histo != null) params.setHistoInterval(Interval.parse(histo)); - String states=line.getOptionValue(OnlineWorkflowCommand.STATES); - if(states!=null) + String states = line.getOptionValue(OnlineWorkflowCommand.STATES); + if (states != null) params.setStates(Integer.parseInt(states)); - String timeHorizon=line.getOptionValue(OnlineWorkflowCommand.TIME_HORIZON); - if(timeHorizon!=null) + String timeHorizon = line.getOptionValue(OnlineWorkflowCommand.TIME_HORIZON); + if (timeHorizon != null) params.setTimeHorizon(TimeHorizon.fromName(timeHorizon)); String workflowid = line.getOptionValue(OnlineWorkflowCommand.WORKFLOW_ID); - if(workflowid!=null) + if (workflowid != null) params.setOfflineWorkflowId(workflowid); String feAnalysisId = line.getOptionValue(OnlineWorkflowCommand.FEANALYSIS_ID); - if(feAnalysisId!=null) + if (feAnalysisId != null) params.setFeAnalysisId(feAnalysisId); - String rulesPurity=line.getOptionValue(OnlineWorkflowCommand.RULES_PURITY); - if(rulesPurity!=null) + String rulesPurity = line.getOptionValue(OnlineWorkflowCommand.RULES_PURITY); + if (rulesPurity != null) params.setRulesPurityThreshold(Double.parseDouble(rulesPurity)); - if(line.hasOption(OnlineWorkflowCommand.STORE_STATES)) + if (line.hasOption(OnlineWorkflowCommand.STORE_STATES)) params.setStoreStates(true); - if(line.hasOption(OnlineWorkflowCommand.ANALYSE_BASECASE)) + if (line.hasOption(OnlineWorkflowCommand.ANALYSE_BASECASE)) params.setAnalyseBasecase(true); - if(line.hasOption(OnlineWorkflowCommand.VALIDATION)) { + if (line.hasOption(OnlineWorkflowCommand.VALIDATION)) { params.setValidation(true); params.setStoreStates(true); // if validation then store states params.setAnalyseBasecase(true); // if validation then analyze base case @@ -136,41 +135,40 @@ public void run(CommandLine line) throws Exception { Set securityIndexes = null; if (line.hasOption(OnlineWorkflowCommand.SECURITY_INDEXES)) { - if ( !"ALL".equals(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES)) ) + if (!"ALL".equals(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES))) securityIndexes = Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES).split(",")) - .map(SecurityIndexType::valueOf) - .collect(Collectors.toSet()); + .map(SecurityIndexType::valueOf) + .collect(Collectors.toSet()); params.setSecurityIndexes(securityIndexes); } - if(line.hasOption(OnlineWorkflowCommand.MERGE_OPTIMIZED)) + if (line.hasOption(OnlineWorkflowCommand.MERGE_OPTIMIZED)) params.setMergeOptimized(true); - String limitReduction=line.getOptionValue(OnlineWorkflowCommand.LIMIT_REDUCTION); - if(limitReduction!=null) + String limitReduction = line.getOptionValue(OnlineWorkflowCommand.LIMIT_REDUCTION); + if (limitReduction != null) params.setLimitReduction(Float.parseFloat(limitReduction)); - if(line.hasOption(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N)) { + if (line.hasOption(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N)) { params.setHandleViolationsInN(true); params.setAnalyseBasecase(true); // if I need to handle violations in N, I need to analyze base case } - String constraintMargin=line.getOptionValue(OnlineWorkflowCommand.CONSTRAINT_MARGIN); - if(constraintMargin!=null) + String constraintMargin = line.getOptionValue(OnlineWorkflowCommand.CONSTRAINT_MARGIN); + if (constraintMargin != null) params.setConstraintMargin(Float.parseFloat(constraintMargin)); - String urlString = "service:jmx:rmi:///jndi/rmi://"+startconfig.getJmxHost()+":"+startconfig.getJmxPort()+"/jmxrmi"; + String urlString = "service:jmx:rmi:///jndi/rmi://" + startconfig.getJmxHost() + ":" + startconfig.getJmxPort() + "/jmxrmi"; JMXServiceURL serviceURL = new JMXServiceURL(urlString); Map jmxEnv = new HashMap<>(); - JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv) ; + JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv); MBeanServerConnection mbsc = connector.getMBeanServerConnection(); ObjectName name = new ObjectName(LocalOnlineApplicationMBean.BEAN_NAME); LocalOnlineApplicationMBean application = MBeanServerInvocationHandler.newProxyInstance(mbsc, name, LocalOnlineApplicationMBean.class, false); - if( line.hasOption(OnlineWorkflowCommand.START_CMD)) - { + if (line.hasOption(OnlineWorkflowCommand.START_CMD)) { if (params.getCaseFile() != null) { System.out.println("starting Online Workflow, caseFile " + params.getCaseFile()); application.startWorkflow(startconfig, params); @@ -182,9 +180,7 @@ public void run(CommandLine line) throws Exception { application.startWorkflow(startconfig, params); } } - } - else if(line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) - { + } else if (line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) { application.shutdown(); } From edf0b81fd32241f9490caad1d939294de80a06d2 Mon Sep 17 00:00:00 2001 From: Christian Biasuzzi Date: Mon, 21 Nov 2016 20:19:18 +0100 Subject: [PATCH 3/7] exclusive choice between parameters case-file and case-type, countries, base-case --- .../online/OnlineWorkflowParameters.java | 101 +++++++++++++++--- .../online/db/OnlineDbMVStore.java | 14 ++- .../online/tools/OnlineWorkflowCommand.java | 3 +- .../online/tools/OnlineWorkflowTool.java | 91 ++++++++++------ 4 files changed, 151 insertions(+), 58 deletions(-) diff --git a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java index 17ced399..1562f33e 100644 --- a/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java +++ b/modules/src/main/java/eu/itesla_project/modules/online/OnlineWorkflowParameters.java @@ -51,7 +51,7 @@ public class OnlineWorkflowParameters implements Serializable { public static OnlineWorkflowParameters loadDefault() { ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters"); - DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate")); + int states = config.getIntProperty("states"); String offlineWorkflowId = config.getStringProperty("offlineWorkflowId", null); TimeHorizon timeHorizon = TimeHorizon.fromName(config.getStringProperty("timeHorizon").trim()); @@ -62,14 +62,36 @@ public static OnlineWorkflowParameters loadDefault() { boolean analyseBasecase = config.getBooleanProperty("analyseBasecase", true); boolean validation = config.getBooleanProperty("validation", false); Set securityIndexes = config.getEnumSetProperty("securityIndexes", SecurityIndexType.class, null); - CaseType caseType = config.getEnumProperty("caseType", CaseType.class); - Set countries = config.getEnumSetProperty("countries", Country.class); boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DEFAULT_MERGE_OPTIMIZED); float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION); boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DEFAULT_HANDLE_VIOLATIONS_IN_N); float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN); - String caseFile = config.getStringProperty("caseFile", null); + String caseFile = config.getStringProperty("caseFile", null); + if (caseFile != null) { + if ((config.getStringProperty("baseCaseDate", null) != null) + || (config.getStringProperty("caseType", null) != null) + || (config.getStringProperty("countries", null) != null)) + throw new RuntimeException("caseFile and ( baseCaseDate, caseType, countries ) are mutually exclusive options"); + return new OnlineWorkflowParameters(states, + histoInterval, + offlineWorkflowId, + timeHorizon, + feAnalysisId, + rulesPurityThreshold, + storeStates, + analyseBasecase, + validation, + securityIndexes, + mergeOptimized, + limitReduction, + handleViolationsInN, + constraintMargin, + caseFile); + } + DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate")); + CaseType caseType = config.getEnumProperty("caseType", CaseType.class); + Set countries = config.getEnumSetProperty("countries", Country.class); return new OnlineWorkflowParameters(baseCaseDate, states, histoInterval, @@ -86,19 +108,13 @@ public static OnlineWorkflowParameters loadDefault() { mergeOptimized, limitReduction, handleViolationsInN, - constraintMargin, - caseFile - ); + constraintMargin); } - public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, - String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, - Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, - float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { - Objects.requireNonNull(baseCaseDate); - Objects.requireNonNull(histoInterval); - Objects.requireNonNull(countries); - Objects.requireNonNull(caseType); + private OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, + String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, + Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, + float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { this.baseCaseDate = baseCaseDate; this.states = states; this.histoInterval = histoInterval; @@ -119,6 +135,61 @@ public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval hist this.caseFile = caseFile; } + + public OnlineWorkflowParameters(DateTime baseCaseDate, int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, + String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, + Set securityIndexes, CaseType caseType, Set countries, boolean mergeOptimized, + float limitReduction, boolean handleViolationsInN, float constraintMargin) { + this(baseCaseDate, + states, + histoInterval, + offlineWorkflowId, + timeHorizon, + feAnalysisId, + rulesPurityThreshold, + storeStates, + analyseBasecase, + validation, + securityIndexes, + caseType, + countries, + mergeOptimized, + limitReduction, + handleViolationsInN, + constraintMargin, + null); + Objects.requireNonNull(this.baseCaseDate); + Objects.requireNonNull(this.countries); + Objects.requireNonNull(this.caseType); + Objects.requireNonNull(this.histoInterval); + } + + public OnlineWorkflowParameters(int states, Interval histoInterval, String offlineWorkflowId, TimeHorizon timeHorizon, + String feAnalysisId, double rulesPurityThreshold, boolean storeStates, boolean analyseBasecase, boolean validation, + Set securityIndexes, boolean mergeOptimized, + float limitReduction, boolean handleViolationsInN, float constraintMargin, String caseFile) { + this(null, + states, + histoInterval, + offlineWorkflowId, + timeHorizon, + feAnalysisId, + rulesPurityThreshold, + storeStates, + analyseBasecase, + validation, + securityIndexes, + null, + null, + mergeOptimized, + limitReduction, + handleViolationsInN, + constraintMargin, + caseFile); + Objects.requireNonNull(this.caseFile); + Objects.requireNonNull(this.histoInterval); + } + public DateTime getBaseCaseDate() { return baseCaseDate; } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java index cb1fd6c3..28349ca2 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/db/OnlineDbMVStore.java @@ -867,11 +867,7 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { float constraintMargin = OnlineWorkflowParameters.DEFAULT_CONSTRAINT_MARGIN; if (storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) constraintMargin = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)); - String caseFile = null; - if (storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) - caseFile = storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY); - - return new OnlineWorkflowParameters(baseCaseDate, + OnlineWorkflowParameters onlineWfPars = new OnlineWorkflowParameters(baseCaseDate, states, histoInterval, offlineWorkflowId, @@ -887,9 +883,11 @@ public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { mergeOptimized, limitReduction, handleViolations, - constraintMargin, - caseFile - ); + constraintMargin); + if (storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) { + onlineWfPars.setCaseFile(storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY)); + } + return onlineWfPars; } else { LOGGER.warn("No configuration parameters of wf {} stored in online db", workflowId); return null; diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java index 3498843b..7e99d30f 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowCommand.java @@ -197,7 +197,7 @@ public Options getOptions() { .build()); opts.addOption(Option.builder().longOpt(CASE_FILE) - .desc("case file - Note: override (base-case, case-type, countries) parameters") + .desc("case file: Note: parameter " + CASE_FILE + "cannot be used together with parameters " + BASE_CASE + ", " + CASE_TYPE + ", " + COUNTRIES + ", " + BASECASES_INTERVAL) .hasArg() .argName(CASE_FILE) .build()); @@ -207,7 +207,6 @@ public Options getOptions() { @Override public String getUsageFooter() { - return null; } diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java index 6717fc66..168d786d 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/OnlineWorkflowTool.java @@ -21,6 +21,7 @@ import eu.itesla_project.online.OnlineWorkflowStartParameters; import eu.itesla_project.simulation.securityindexes.SecurityIndexType; import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.HelpFormatter; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -44,6 +45,14 @@ public Command getCommand() { return OnlineWorkflowCommand.INSTANCE; } + private void showHelp(String message) { + System.err.println(message); + System.err.println(); + HelpFormatter formatter = new HelpFormatter(); + // it would be nice to have access to the private method eu.itesla_project.commons.tools.Main.printCommandUsage + formatter.printHelp(80, getCommand().getName(), "", getCommand().getOptions(), "\n" + Objects.toString(getCommand().getUsageFooter(), ""), true); + } + @Override public void run(CommandLine line) throws Exception { @@ -59,43 +68,57 @@ public void run(CommandLine line) throws Exception { if (threads != null) startconfig.setThreads(Integer.valueOf(threads)); - OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); - - if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) - params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); - - Set countries = null; - if (line.hasOption(OnlineWorkflowCommand.COUNTRIES)) { - countries = Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) - .map(Country::valueOf) - .collect(Collectors.toSet()); - params.setCountries(countries); - } - Set baseCasesSet = null; - if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { - Interval basecasesInterval = Interval.parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); - OnlineConfig oConfig = OnlineConfig.load(); - CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance().create(new LocalComputationManager()); - baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); - System.out.println("Base cases available for interval " + basecasesInterval.toString()); - baseCasesSet.forEach(x -> { - System.out.println(" " + x); - }); - } - if (baseCasesSet == null) { - baseCasesSet = new HashSet(); - String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); - if (base != null) { - baseCasesSet.add(DateTime.parse(base)); - } else { - baseCasesSet.add(params.getBaseCaseDate()); + OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); + boolean atLeastOneBaseCaseLineParam = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) || line.hasOption(OnlineWorkflowCommand.COUNTRIES) + || line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL); + boolean allNeededBaseCaseLineParams = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) && line.hasOption(OnlineWorkflowCommand.COUNTRIES) + && (line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)); + + if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) { + if (atLeastOneBaseCaseLineParam) { + showHelp("parameter " + OnlineWorkflowCommand.CASE_FILE + " cannot be used together with parameters: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + ", " + OnlineWorkflowCommand.BASECASES_INTERVAL); + return; } - } - - if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); + } else { + if (params.getCaseFile() != null) { + if (atLeastOneBaseCaseLineParam) { + if (!allNeededBaseCaseLineParams) { + showHelp("to override default parameter " + OnlineWorkflowCommand.CASE_FILE + ", all these parameters must be specified: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + " or " + OnlineWorkflowCommand.BASECASES_INTERVAL); + return; + } + params.setCaseFile(null); + } + } + if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) + params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); + if (line.hasOption(OnlineWorkflowCommand.COUNTRIES)) { + params.setCountries(Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) + .map(Country::valueOf) + .collect(Collectors.toSet())); + } + if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { + Interval basecasesInterval = Interval.parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); + OnlineConfig oConfig = OnlineConfig.load(); + CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance().create(new LocalComputationManager()); + baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); + System.out.println("Base cases available for interval " + basecasesInterval.toString()); + baseCasesSet.forEach(x -> { + System.out.println(" " + x); + }); + } + if (baseCasesSet == null) { + baseCasesSet = new HashSet<>(); + String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); + if (base != null) { + baseCasesSet.add(DateTime.parse(base)); + } else { + baseCasesSet.add(params.getBaseCaseDate()); + } + } + } String histo = line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); if (histo != null) @@ -182,6 +205,8 @@ public void run(CommandLine line) throws Exception { } } else if (line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) { application.shutdown(); + } else { + showHelp(""); } } From fda602b85f837502ef02760c2f927e7b995e490c Mon Sep 17 00:00:00 2001 From: massimoferraro Date: Thu, 24 Nov 2016 17:07:39 +0100 Subject: [PATCH 4/7] Added itools command for exporting, in AMPL format, network data of sampled states of an online workflow --- .../AmplExportOnlineWorkflowStatesTool.java | 119 ++++++++++++++++++ 1 file changed, 119 insertions(+) create mode 100644 online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java new file mode 100644 index 00000000..0cec29b8 --- /dev/null +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java @@ -0,0 +1,119 @@ +/** + * Copyright (c) 2016, RTE (http://www.rte-france.com) + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + */ +package eu.itesla_project.online.tools; + +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; + +import com.google.auto.service.AutoService; + +import eu.itesla_project.commons.tools.Command; +import eu.itesla_project.commons.tools.Tool; +import eu.itesla_project.iidm.datasource.DataSource; +import eu.itesla_project.iidm.datasource.FileDataSource; +import eu.itesla_project.iidm.export.Exporters; +import eu.itesla_project.iidm.network.Network; +import eu.itesla_project.modules.online.OnlineConfig; +import eu.itesla_project.modules.online.OnlineDb; + +/** + * + * @author Massimo Ferraro + */ +@AutoService(Tool.class) +public class AmplExportOnlineWorkflowStatesTool implements Tool { + + private static Command COMMAND = new Command() { + + @Override + public String getName() { + return "ampl-export-online-workflow-states"; + } + + @Override + public String getTheme() { + return Themes.ONLINE_WORKFLOW; + } + + @Override + public String getDescription() { + return "Export network data of stored states of an online workflow, in AMPL format"; + } + + @Override + public Options getOptions() { + Options options = new Options(); + options.addOption(Option.builder().longOpt("workflow") + .desc("the workflow id") + .hasArg() + .required() + .argName("ID") + .build()); + options.addOption(Option.builder().longOpt("state") + .desc("the state id") + .hasArg() + .argName("STATE") + .build()); + options.addOption(Option.builder().longOpt("folder") + .desc("the folder where to export the network data") + .hasArg() + .required() + .argName("FOLDER") + .build()); + return options; + } + + @Override + public String getUsageFooter() { + return null; + } + + }; + + @Override + public Command getCommand() { + return COMMAND; + } + + @Override + public void run(CommandLine line) throws Exception { + OnlineConfig config = OnlineConfig.load(); + OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create(); + String workflowId = line.getOptionValue("workflow"); + List states = line.hasOption("state") ? Arrays.asList(Integer.valueOf(line.getOptionValue("state"))) : onlinedb.listStoredStates(workflowId); + Path folder = Paths.get(line.getOptionValue("folder")); + System.out.println("Exporting in AMPL format network data of workflow " + workflowId +", " + states.size() + " state(s), to folder " + folder); + states.stream().forEach(state -> exportState(onlinedb, workflowId, state, folder)); + onlinedb.close(); + } + + private void exportState(OnlineDb onlinedb, String workflowId, Integer stateId, Path folder) { + System.out.println("Exporting network data of workflow " + workflowId +", state " + stateId); + Network network = onlinedb.getState(workflowId, stateId); + if ( network == null ) { + System.out.println("Cannot export network data: no stored state " + stateId + " for workflow " + workflowId); + return; + } + Path stateFolder = Paths.get(folder.toString(), "wf_" + workflowId + "_state_" + stateId); + System.out.println("Exporting network data of workflow " + workflowId +", state " + stateId + " to folder "+ stateFolder); + if ( stateFolder.toFile().exists() ) { + System.out.println("Cannot export network data of workflow " + workflowId +", state " + stateId + ": folder "+ stateFolder + " already exists"); + return; + } else + stateFolder.toFile().mkdir(); + DataSource dataSource = new FileDataSource(stateFolder, "wf_" + workflowId + "_state_" + stateId); + Exporters.export("AMPL", network, new Properties(), dataSource); + } + +} From be6aa25045ed309e424956fb875f5102b1b780cb Mon Sep 17 00:00:00 2001 From: Christian Biasuzzi Date: Fri, 25 Nov 2016 20:07:54 +0100 Subject: [PATCH 5/7] more meaningful error message when there are no contingencies to simulate --- .../itesla_project/eurostag/EurostagScenario.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/eurostag-integration/src/main/java/eu/itesla_project/eurostag/EurostagScenario.java b/eurostag-integration/src/main/java/eu/itesla_project/eurostag/EurostagScenario.java index 130c6c14..396a0b72 100644 --- a/eurostag-integration/src/main/java/eu/itesla_project/eurostag/EurostagScenario.java +++ b/eurostag-integration/src/main/java/eu/itesla_project/eurostag/EurostagScenario.java @@ -7,10 +7,10 @@ package eu.itesla_project.eurostag; import com.google.common.base.Strings; -import eu.itesla_project.iidm.eurostag.export.EurostagDictionary; -import eu.itesla_project.iidm.network.*; import eu.itesla_project.contingency.Contingency; import eu.itesla_project.contingency.ContingencyElement; +import eu.itesla_project.iidm.eurostag.export.EurostagDictionary; +import eu.itesla_project.iidm.network.*; import eu.itesla_project.simulation.SimulationParameters; import org.jboss.shrinkwrap.api.Domain; import org.jboss.shrinkwrap.api.GenericArchive; @@ -136,8 +136,7 @@ public void writeFaultSeq(BufferedWriter writer, Contingency contingency, Networ writer.newLine(); for (ContingencyElement element : contingency.getElements()) { switch (element.getType()) { - case LINE: - { + case LINE: { Line l = network.getLine(element.getId()); if (l == null) { throw new RuntimeException("Line '" + element.getId() + "' not found"); @@ -194,8 +193,7 @@ public void writeFaultSeq(BufferedWriter writer, Contingency contingency, Networ } break; - case GENERATOR: - { + case GENERATOR: { Generator g = network.getGenerator(element.getId()); if (g == null) { throw new RuntimeException("Generator '" + element.getId() + "' not found"); @@ -252,6 +250,9 @@ public GenericArchive writeFaultSeqArchive(List contingencies, Netw } public GenericArchive writeFaultSeqArchive(Domain domain, List contingencies, Network network, EurostagDictionary dictionary, Function seqFileNameFct) throws IOException { + if ((contingencies == null) || (contingencies.isEmpty())) { + throw new RuntimeException("contingencies list is empty, cannot write .seq scenario files"); + } GenericArchive archive = domain.getArchiveFactory().create(GenericArchive.class); try (FileSystem fileSystem = ShrinkWrapFileSystems.newFileSystem(archive)) { Path rootDir = fileSystem.getPath("/"); From 1a190779984799d3908841896287117047f1c9f8 Mon Sep 17 00:00:00 2001 From: Mathieu BAGUE Date: Sat, 26 Nov 2016 13:41:18 +0100 Subject: [PATCH 6/7] Fix hdfgroup packs URL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit HDFGroup packs URL have been permanently moved from HTTP to HTTPS. See the wget output below: ``` mathbagu@Debian:/tmp$ wget http://www.hdfgroup.org/ftp/HDF5//releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz --2016-11-26 13:38:26-- http://www.hdfgroup.org/ftp/HDF5//releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz Résolution de www.hdfgroup.org (www.hdfgroup.org)… 161.47.20.161, 2001:4801:7905:100:aff8:a2d8:0:1019 Connexion à www.hdfgroup.org (www.hdfgroup.org)|161.47.20.161|:80… connecté. requête HTTP transmise, en attente de la réponse… 301 Moved Permanently Emplacement : https://www.hdfgroup.org/ftp/HDF5//releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz [suivant] ``` --- thirdparty/CMakeLists.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/thirdparty/CMakeLists.txt b/thirdparty/CMakeLists.txt index 105c222c..2a440270 100644 --- a/thirdparty/CMakeLists.txt +++ b/thirdparty/CMakeLists.txt @@ -40,9 +40,9 @@ if (download) set(libarchive_url http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz) set(boost_url http://sourceforge.net/projects/boost/files/boost/1.57.0/boost_1_57_0.tar.gz/download) set(log4cpp_url http://sourceforge.net/projects/log4cpp/files/log4cpp-1.1.x%20%28new%29/log4cpp-1.1/log4cpp-1.1.1.tar.gz) - set(szip_url http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz) - set(zlib_url http://www.hdfgroup.org/ftp/lib-external/zlib/zlib-1.2.5.tar.gz) - set(hdf5_url http://www.hdfgroup.org/ftp/HDF5//releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz) + set(szip_url https://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz) + set(zlib_url https://www.hdfgroup.org/ftp/lib-external/zlib/zlib-1.2.5.tar.gz) + set(hdf5_url https://www.hdfgroup.org/ftp/HDF5//releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz) set(matio_url http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz) # set(openmpi_url http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.3.tar.bz2) else() From bc9492631736dbd1de8432ac55e29fedb71a087c Mon Sep 17 00:00:00 2001 From: mathbagu Date: Sat, 26 Nov 2016 15:56:16 +0100 Subject: [PATCH 7/7] Change mkdir to mkdirs and check the return value. --- .../AmplExportOnlineWorkflowStatesTool.java | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java b/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java index 0cec29b8..d37bed0b 100644 --- a/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java +++ b/online-workflow/src/main/java/eu/itesla_project/online/tools/AmplExportOnlineWorkflowStatesTool.java @@ -93,25 +93,28 @@ public void run(CommandLine line) throws Exception { String workflowId = line.getOptionValue("workflow"); List states = line.hasOption("state") ? Arrays.asList(Integer.valueOf(line.getOptionValue("state"))) : onlinedb.listStoredStates(workflowId); Path folder = Paths.get(line.getOptionValue("folder")); - System.out.println("Exporting in AMPL format network data of workflow " + workflowId +", " + states.size() + " state(s), to folder " + folder); - states.stream().forEach(state -> exportState(onlinedb, workflowId, state, folder)); + System.out.println("Exporting in AMPL format network data of workflow " + workflowId + ", " + states.size() + " state(s), to folder " + folder); + states.forEach(state -> exportState(onlinedb, workflowId, state, folder)); onlinedb.close(); } private void exportState(OnlineDb onlinedb, String workflowId, Integer stateId, Path folder) { System.out.println("Exporting network data of workflow " + workflowId +", state " + stateId); Network network = onlinedb.getState(workflowId, stateId); - if ( network == null ) { + if (network == null) { System.out.println("Cannot export network data: no stored state " + stateId + " for workflow " + workflowId); return; } Path stateFolder = Paths.get(folder.toString(), "wf_" + workflowId + "_state_" + stateId); - System.out.println("Exporting network data of workflow " + workflowId +", state " + stateId + " to folder "+ stateFolder); - if ( stateFolder.toFile().exists() ) { - System.out.println("Cannot export network data of workflow " + workflowId +", state " + stateId + ": folder "+ stateFolder + " already exists"); + System.out.println("Exporting network data of workflow " + workflowId + ", state " + stateId + " to folder " + stateFolder); + if (stateFolder.toFile().exists()) { + System.out.println("Cannot export network data of workflow " + workflowId + ", state " + stateId + ": folder " + stateFolder + " already exists"); return; - } else - stateFolder.toFile().mkdir(); + } + if (! stateFolder.toFile().mkdirs()) { + System.out.println("Cannot export network data of workflow " + workflowId + ", state " + stateId + ": unable to create " + stateFolder + " folder"); + return; + } DataSource dataSource = new FileDataSource(stateFolder, "wf_" + workflowId + "_state_" + stateId); Exporters.export("AMPL", network, new Properties(), dataSource); }