List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.capeTown.SyntheticPopCT.java
private void runIPUbyCityAndCounty() { //IPU process for independent municipalities (only household attributes) logger.info(" Starting to prepare the data for IPU"); //Read the frequency matrix int[] microDataIds = frequencyMatrix.getColumnAsInt("ID"); frequencyMatrix.buildIndex(frequencyMatrix.getColumnPosition("ID")); //Create the weights table (for all the municipalities) TableDataSet weightsMatrix = new TableDataSet(); weightsMatrix.appendColumn(microDataIds, "ID"); //Create errors by county TableDataSet errorsCounty = new TableDataSet(); TableDataSet errorsMunicipality = new TableDataSet(); TableDataSet errorsSummary = new TableDataSet(); String[] labels = new String[] { "error", "iterations", "time" }; errorsCounty = initializeErrors(errorsCounty, attributesCounty, countyID); errorsMunicipality = initializeErrors(errorsMunicipality, attributesMunicipality, cityID); errorsSummary = initializeErrors(errorsSummary, labels, countyID); //For each county--------------------------------------------- for (int county : counties) { long startTime = System.nanoTime(); municipalities = municipalitiesByCounty.get(county); //weights, values, control totals Map<Integer, double[]> weightsByMun = Collections.synchronizedMap(new HashMap<>()); Map<Integer, double[]> minWeightsByMun = Collections.synchronizedMap(new HashMap<>()); Map<String, int[]> valuesByHousehold = Collections.synchronizedMap(new HashMap<>()); Map<String, Integer> totalCounty = Collections.synchronizedMap(new HashMap<>()); Map<Integer, Map<String, Integer>> totalMunicipality = Collections.synchronizedMap(new HashMap<>()); Map<Integer, Map<String, Double>> errorByMun = Collections.synchronizedMap(new HashMap<>()); Map<String, Double> errorByRegion = Collections.synchronizedMap(new HashMap<>()); double weightedSum0 = 0f; //parameters of the IPU int finish = 0; int iteration = 0; double maxError = 0.00001; int maxIterations = 500; double minError = 100000; double initialError = 10000; double improvementError = 0.001; double iterationError = 2; double increaseError = 1.05; //initialize errors, considering the first weight (equal to 1) for (String attribute : attributesCounty) { int[] values = new int[frequencyMatrix.getRowCount()]; for (int i = 1; i <= frequencyMatrix.getRowCount(); i++) { values[i - 1] = (int) frequencyMatrix.getValueAt(i, attribute); if (attribute.equals(attributesCounty[0])) { weightedSum0 = weightedSum0 + values[i - 1] * municipalities.size(); }//from w w w.ja va2 s . c o m } valuesByHousehold.put(attribute, values); int total = (int) marginalsCounty.getIndexedValueAt(county, attribute); totalCounty.put(attribute, total); errorByRegion.put(attribute, 0.); } for (String attribute : attributesMunicipality) { int[] values = new int[frequencyMatrix.getRowCount()]; for (int i = 1; i <= frequencyMatrix.getRowCount(); i++) { values[i - 1] = (int) frequencyMatrix.getValueAt(i, attribute); } valuesByHousehold.put(attribute, values); Iterator<Integer> iterator = municipalities.iterator(); while (iterator.hasNext()) { Integer municipality = iterator.next(); double[] dummy = SiloUtil.createArrayWithValue(frequencyMatrix.getRowCount(), 1.); weightsByMun.put(municipality, dummy); double[] dummy1 = SiloUtil.createArrayWithValue(frequencyMatrix.getRowCount(), 1.); minWeightsByMun.put(municipality, dummy1); if (totalMunicipality.containsKey(municipality)) { Map<String, Integer> innerMap = totalMunicipality.get(municipality); innerMap.put(attribute, (int) marginalsMunicipality.getIndexedValueAt(municipality, attribute)); totalMunicipality.put(municipality, innerMap); Map<String, Double> inner1 = errorByMun.get(municipality); inner1.put(attribute, 0.); errorByMun.put(municipality, inner1); } else { HashMap<String, Integer> inner = new HashMap<>(); inner.put(attribute, (int) marginalsMunicipality.getIndexedValueAt(municipality, attribute)); totalMunicipality.put(municipality, inner); HashMap<String, Double> inner1 = new HashMap<>(); inner1.put(attribute, 0.); errorByMun.put(municipality, inner1); } } } //for each iteration while (finish == 0 & iteration < maxIterations) { //For each municipality, obtain the weight matching each attribute ConcurrentExecutor executor = ConcurrentExecutor.cachedService(); Iterator<Integer> iterator = municipalities.iterator(); while (iterator.hasNext()) { Integer municipality = iterator.next(); executor.addTaskToQueue(() -> { for (String attribute : attributesMunicipality) { double weightedSumMunicipality = SiloUtil.sumProduct(weightsByMun.get(municipality), valuesByHousehold.get(attribute)); if (weightedSumMunicipality > 0.001) { double updatingFactor = totalMunicipality.get(municipality).get(attribute) / weightedSumMunicipality; double[] previousWeights = weightsByMun.get(municipality); int[] values = valuesByHousehold.get(attribute); double[] updatedWeights = new double[previousWeights.length]; IntStream.range(0, previousWeights.length).parallel() .forEach(id -> updatedWeights[id] = multiplyIfNotZero(previousWeights[id], values[id], updatingFactor)); weightsByMun.put(municipality, updatedWeights); } } return null; }); } executor.execute(); //For each attribute at the region level (landkreise), we obtain the weights double weightedSumRegion = 0; for (String attribute : attributesCounty) { Iterator<Integer> iterator1 = municipalities.iterator(); while (iterator1.hasNext()) { Integer municipality = iterator1.next(); weightedSumRegion = weightedSumRegion + SiloUtil.sumProduct(weightsByMun.get(municipality), valuesByHousehold.get(attribute)); } if (weightedSumRegion > 0.001) { double updatingFactor = totalCounty.get(attribute) / weightedSumRegion; Iterator<Integer> iterator2 = municipalities.iterator(); while (iterator2.hasNext()) { Integer municipality = iterator2.next(); double[] previousWeights = weightsByMun.get(municipality); int[] values = valuesByHousehold.get(attribute); double[] updatedWeights = new double[previousWeights.length]; IntStream.range(0, previousWeights.length).parallel() .forEach(id -> updatedWeights[id] = multiplyIfNotZero(previousWeights[id], values[id], updatingFactor)); weightsByMun.put(municipality, updatedWeights); } } //logger.info("Attribute " + attribute + ": sum is " + weightedSumRegion); weightedSumRegion = 0; } //obtain the errors by municipality double averageErrorIteration = 0.; int counter = 0; ConcurrentExecutor executor1 = ConcurrentExecutor.cachedService(); Iterator<Integer> iterator1 = municipalities.iterator(); while (iterator1.hasNext()) { Integer municipality = iterator1.next(); Map<String, Double> errorsByMunicipality = Collections.synchronizedMap(new HashMap<>()); executor1.addTaskToQueue(() -> { for (String attribute : attributesMunicipality) { double weightedSumMunicipality = SiloUtil.sumProduct(weightsByMun.get(municipality), valuesByHousehold.get(attribute)); double errorByAttributeAndMunicipality = 0; if (totalMunicipality.get(municipality).get(attribute) > 0) { errorByAttributeAndMunicipality = Math.abs((weightedSumMunicipality - totalMunicipality.get(municipality).get(attribute)) / totalMunicipality.get(municipality).get(attribute)); errorsByMunicipality.put(attribute, errorByAttributeAndMunicipality); } } return null; }); errorByMun.put(municipality, errorsByMunicipality); } executor1.execute(); for (int municipality : municipalities) { averageErrorIteration = averageErrorIteration + errorByMun.get(municipality).values().stream().mapToDouble(Number::doubleValue).sum(); counter = counter + errorByMun.get(municipality).entrySet().size(); } //obtain errors by county for (String attributeC : attributesCounty) { double errorByCounty = 0.; double weightedSumCounty = 0.; if (totalCounty.get(attributeC) > 0) { Iterator<Integer> iterator3 = municipalities.iterator(); while (iterator3.hasNext()) { Integer municipality = iterator3.next(); double weightedSum = SiloUtil.sumProduct(weightsByMun.get(municipality), valuesByHousehold.get(attributeC)); weightedSumCounty += weightedSum; } errorByCounty = errorByCounty + Math.abs( (weightedSumCounty - totalCounty.get(attributeC)) / totalCounty.get(attributeC)); errorByRegion.put(attributeC, errorByCounty); averageErrorIteration += errorByCounty; counter++; } } averageErrorIteration = averageErrorIteration / counter; logger.info(" County " + county + ". Iteration " + iteration + ". Average error: " + averageErrorIteration * 100 + " %."); //Stopping criteria: exceeds the maximum number of iterations or the maximum error is lower than the threshold if (averageErrorIteration < maxError) { finish = 1; logger.info(" IPU finished after :" + iteration + " iterations with a minimum average error of: " + minError * 100 + " %."); iteration = maxIterations + 1; } else if ((iteration / iterationError) % 1 == 0) { if (Math.abs((initialError - averageErrorIteration) / initialError) < improvementError) { finish = 1; logger.info(" IPU finished after " + iteration + " iterations because the error does not improve. The minimum average error is: " + minError * 100 + " %."); } else if (averageErrorIteration == 0) { finish = 1; logger.info(" IPU finished after " + iteration + " iterations because the error starts increasing. The minimum average error is: " + minError * 100 + " %."); } else { initialError = averageErrorIteration; iteration = iteration + 1; } } else if (iteration == maxIterations) { finish = 1; logger.info( " IPU finished after the total number of iterations. The minimum average error is: " + minError * 100 + " %."); } else { iteration = iteration + 1; } if (averageErrorIteration < minError) { for (int municipality : municipalities) { double[] minW = weightsByMun.get(municipality); minWeightsByMun.put(municipality, minW); } minError = averageErrorIteration; } long estimatedTime = (System.nanoTime() - startTime) / 1000000000; errorsSummary.setIndexedValueAt(county, "error", (float) minError); errorsSummary.setIndexedValueAt(county, "iterations", iteration); errorsSummary.setIndexedValueAt(county, "time", estimatedTime); } //Write the weights after finishing IPU for each municipality (saved each time over the previous version) for (int municipality : municipalities) { weightsMatrix.appendColumn(minWeightsByMun.get(municipality), Integer.toString(municipality)); } SiloUtil.writeTableDataSet(weightsMatrix, "input/syntheticPopulation/weights1.csv"); //Copy the errors per attribute for (String attribute : attributesCounty) { errorsCounty.setIndexedValueAt(county, attribute, errorByRegion.get(attribute).floatValue()); } for (int municipality : municipalities) { for (String attribute : attributesMunicipality) { if (totalMunicipality.get(municipality).get(attribute) > 0) { errorsMunicipality.setIndexedValueAt(municipality, attribute, errorByMun.get(municipality).get(attribute).floatValue()); } } } //Write the weights after finishing IPU for each county SiloUtil.writeTableDataSet(weightsMatrix, rb.getString(PROPERTIES_WEIGHTS_MATRIX)); SiloUtil.writeTableDataSet(errorsMunicipality, "microData/interimFiles/errorsHouseholdIPU.csv"); SiloUtil.writeTableDataSet(errorsCounty, "microData/interimFiles/errorsRegionIPU.csv"); SiloUtil.writeTableDataSet(errorsSummary, "microData/interimFiles/summaryIPU.csv"); } //Write the weights final table weightsTable = weightsMatrix; weightsTable.buildIndex(weightsTable.getColumnPosition("ID")); logger.info(" IPU finished"); }
From source file:org.hawkular.metrics.core.service.MetricsServiceITest.java
private static void assertNumericBucketsEquals(List<NumericBucketPoint> actual, List<NumericBucketPoint> expected) { String msg = String.format("%nExpected:%n%s%nActual:%n%s%n", expected, actual); assertEquals(actual.size(), expected.size(), msg); IntStream.range(0, actual.size()).forEach(i -> { NumericBucketPoint actualPoint = actual.get(i); NumericBucketPoint expectedPoint = expected.get(i); assertNumericBucketEquals(actualPoint, expectedPoint, msg); });//from w w w. j av a2s .c o m }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the replicated static set of variables * @param dynamicVariables a {@link DynamicVariables} object. * @param modelNumber a {@code int} that indicates the number of the model being constructed. * @return a {@link Variables} object./*from w ww . j a v a2 s. co m*/ */ private Variables obtainReplicatedStaticVariables(DynamicVariables dynamicVariables, int modelNumber) { Variables variables = new Variables(); // REPLICATIONS OF THE MAP VARIABLE (EACH 'nMergedClassVars' CONSECUTIVE ARE GROUPED) int replicationsMAPVariable = (modelNumber == 0 ? 0 : 1) + (nTimeSteps - modelNumber) / nMergedClassVars + ((nTimeSteps - modelNumber) % nMergedClassVars == 0 ? 0 : 1); IntStream.range(0, replicationsMAPVariable).forEach(mergedClassVarIndex -> { int nStatesMAPVariable = (int) Math.pow(MAPvariable.getNumberOfStates(), nMergedClassVars); // If it is the first merged variable and not the first model (not 'complete') if ((modelNumber != 0) && (mergedClassVarIndex == 0)) { nStatesMAPVariable = (int) Math.pow(MAPvariable.getNumberOfStates(), modelNumber); } // If it is the last merged variable and not 'complete' if (((nTimeSteps - modelNumber) % nMergedClassVars != 0) && (mergedClassVarIndex == replicationsMAPVariable - 1)) { nStatesMAPVariable = (int) Math.pow(MAPvariable.getNumberOfStates(), (nTimeSteps - modelNumber) % nMergedClassVars); } variables.newMultinomialVariable(groupedClassName + "_t" + Integer.toString(mergedClassVarIndex), nStatesMAPVariable); }); // REPLICATIONS OF THE REST OF VARIABLES (EACH ONE REPEATED 'nTimeSteps' TIMES) dynamicVariables.getListOfDynamicVariables().stream().filter(var -> !var.equals(MAPvariable)) .forEach(dynVar -> IntStream.range(0, nTimeSteps).forEach(i -> { VariableBuilder aux = dynVar.getVariableBuilder(); aux.setName(dynVar.getName() + "_t" + Integer.toString(i)); variables.newVariable(aux); })); return variables; }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the static DAG structure.//from w w w. j a v a 2s . co m * @param dynamicDAG a {@link DynamicDAG} object. * @param variables a {@link Variables} object. * @param modelNumber an integer * @return a {@link DAG} object. */ private DAG obtainStaticDAG(DynamicDAG dynamicDAG, Variables variables, int modelNumber) { DAG dag = new DAG(variables); DynamicVariables dynamicVariables = dynamicDAG.getDynamicVariables(); /* * PARENTS OF THE MAP VARIABLE (ONLY THE PREVIOUS TEMPORAL COPY OF ITSELF) */ int replicationsMAPVariable = (modelNumber == 0 ? 0 : 1) + (nTimeSteps - modelNumber) / nMergedClassVars + ((nTimeSteps - modelNumber) % nMergedClassVars == 0 ? 0 : 1); IntStream.range(1, replicationsMAPVariable).forEach(mergedClassVarIndex -> { Variable staticVar = variables .getVariableByName(groupedClassName + "_t" + Integer.toString(mergedClassVarIndex)); dag.getParentSet(staticVar).addParent(variables .getVariableByName(groupedClassName + "_t" + Integer.toString(mergedClassVarIndex - 1))); }); /* * PARENTS OF THE REST OF VARIABLES */ dynamicVariables.getListOfDynamicVariables().stream().filter(var -> !var.equals(MAPvariable)) .forEach(dynVar -> { // ADD PARENTS AT TIME T=0 Variable staticVar0 = variables.getVariableByName(dynVar.getName() + "_t0"); List<Variable> parents0 = dynamicDAG.getParentSetTime0(dynVar).getParents(); parents0.stream().filter(parent -> parent.equals(MAPvariable)) .forEach(parentaux2 -> dag.getParentSet(staticVar0) .addParent(variables.getVariableByName(groupedClassName + "_t0"))); parents0.stream().filter(parent -> !parent.equals(MAPvariable)) .forEach(parentaux2 -> dag.getParentSet(staticVar0) .addParent(variables.getVariableByName(parentaux2.getName() + "_t0"))); // ADD PARENTS AT TIMES T>0 IntStream.range(1, nTimeSteps).forEach(timeStep -> { Variable staticVar = variables .getVariableByName(dynVar.getName() + "_t" + Integer.toString(timeStep)); List<Variable> parents = dynamicDAG.getParentSetTimeT(dynVar).getParents(); int indexMAPReplication = (timeStep >= modelNumber) ? (modelNumber == 0 ? 0 : 1) + (timeStep - modelNumber) / nMergedClassVars : (timeStep - modelNumber) / nMergedClassVars; if (indexMAPReplication >= replicationsMAPVariable) { System.out.println("Error in obtainStaticDAG: Bad MAP variable index"); System.exit(-50); } // PARENTS WHICH ARE INTERFACE VARIABLES List<Variable> parentsInterface = parents.stream() .filter(parentVar -> parentVar.isInterfaceVariable()).collect(Collectors.toList()); parentsInterface.stream().filter(parent -> parent.equals(MAPvariable)).forEach( parentVar -> dag.getParentSet(staticVar).addParent(variables.getVariableByName( groupedClassName + "_t" + Integer.toString(indexMAPReplication - 1)))); parentsInterface.stream().filter(parent -> !parent.equals(MAPvariable)) .forEach(parentVar -> dag.getParentSet(staticVar) .addParent(variables.getVariableByName(parentVar.getName() .replace("_Interface", "_t" + Integer.toString(timeStep - 1))))); // PARENTS WHICH ARE NOT INTERFACE VARIABLES List<Variable> parentsNotInterface = parents.stream() .filter(parentVar -> !parentVar.isInterfaceVariable()).collect(Collectors.toList()); parentsNotInterface.stream().filter(parent -> parent.equals(MAPvariable)).forEach( parentVar -> dag.getParentSet(staticVar).addParent(variables.getVariableByName( groupedClassName + "_t" + Integer.toString(indexMAPReplication)))); parentsNotInterface.stream().filter(parent -> !parent.equals(MAPvariable)).forEach( parentVar -> dag.getParentSet(staticVar).addParent(variables.getVariableByName( parentVar.getName() + "_t" + Integer.toString(timeStep)))); }); }); return dag; }
From source file:com.orange.ngsi2.server.Ngsi2BaseControllerTest.java
@Test public void checkPattern() { assertTrue(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn_Welt")); assertTrue(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn-Welt")); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn Welt")); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn&Welt")); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn?Welt")); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn/Welt")); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", "Bcn#Welt")); String p257times = IntStream.range(0, 257).mapToObj(x -> "p").collect(Collectors.joining()); assertTrue(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", p257times)); String invalid256times = IntStream.range(0, 256).mapToObj(x -> "?").collect(Collectors.joining()); assertFalse(Pattern.matches("[\\x21\\x22\\x24\\x25\\x27-\\x2E\\x30-\\x3E\\x40-\\x7E]*", invalid256times)); }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the {@link BayesianNetwork} related to the static grouped class. * @param dag a {@link DAG} object./* w w w . ja v a 2 s . c o m*/ * @param variables a {@link Variables} obejct. * @param modelNumber an integer * @return a {@link BayesianNetwork} object. */ private BayesianNetwork obtainStaticMergedClassVarNetwork(DAG dag, Variables variables, int modelNumber) { DynamicDAG dynamicDAG = model.getDynamicDAG(); BayesianNetwork bn = new BayesianNetwork(dag); Variable staticVar, dynVar; ConditionalDistribution conDist0, conDistT; int replicationsMAPVariable = (modelNumber == 0 ? 0 : 1) + (nTimeSteps - modelNumber) / nMergedClassVars + ((nTimeSteps - modelNumber) % nMergedClassVars == 0 ? 0 : 1); /* * OBTAIN AND SET THE CONDITIONAL (UNIVARIATE) DISTRIBUTION FOR THE GROUPED MAP/CLASS VARIABLE AT TIME T=0 */ staticVar = variables.getVariableByName(groupedClassName + "_t0"); dynVar = model.getDynamicVariables().getVariableByName(MAPvarName); conDist0 = Serialization.deepCopy(model.getConditionalDistributionsTime0().get(dynVar.getVarID())); conDistT = Serialization.deepCopy(model.getConditionalDistributionsTimeT().get(dynVar.getVarID())); Multinomial multinomial = groupedDistributionMAPVariableTime0(dynVar, staticVar, conDist0, conDistT, modelNumber); multinomial.setVar(staticVar); bn.setConditionalDistribution(staticVar, multinomial); /* * CREATE THE GENERAL (TIME T) CONDITIONAL DISTRIBUTION OF THE GROUPED MAP/CLASS VARIABLE, IF NEEDED */ Multinomial_MultinomialParents generalConditionalDistTimeT; // ToDo: Review Condition: nTimeSteps>= 4,5 if (modelNumber == 0 && (replicationsMAPVariable > 2 || (replicationsMAPVariable == 2 && nTimeSteps >= 4))) { Variable staticVar_current = variables.getVariableByName(groupedClassName + "_t1"); Variable staticVar_interface = variables.getVariableByName(groupedClassName + "_t0"); List<Variable> parents = bn.getDAG().getParentSet(staticVar_current).getParents(); ConditionalDistribution conDist_dynamic = Serialization.deepCopy(conDistT); generalConditionalDistTimeT = groupedDistributionMAPVariableTimeT(dynVar, staticVar_current, staticVar_interface.getNumberOfStates(), parents, conDist_dynamic, modelNumber); } else if (modelNumber > 0 && (replicationsMAPVariable > 3 || replicationsMAPVariable == 3 && nTimeSteps >= 5)) { Variable staticVar_current = variables.getVariableByName(groupedClassName + "_t2"); Variable staticVar_interface = variables.getVariableByName(groupedClassName + "_t1"); List<Variable> parents = bn.getDAG().getParentSet(staticVar_current).getParents(); ConditionalDistribution conDist_dynamic = Serialization.deepCopy(conDistT); generalConditionalDistTimeT = groupedDistributionMAPVariableTimeT(dynVar, staticVar_current, staticVar_interface.getNumberOfStates(), parents, conDist_dynamic, modelNumber); } else { // In this case, 'generalConditionalDistTimeT' will never be used. generalConditionalDistTimeT = new Multinomial_MultinomialParents(staticVar, bn.getDAG().getParentSet(staticVar).getParents()); } /* * ADD CONDITIONAL DISTRIBUTIONS FOR THE REPLICATIONS OF THE GROUPED MAP/CLASS VARIABLE */ // FIRST CONDITIONAL DISTRIBUTION, t_1 | t_0, IF IT'S NOT THE GENERAL ONE if (modelNumber != 0) { Variable staticVar0 = variables.getVariableByName(groupedClassName + "_t1"); Variable staticVar0_interface = variables.getVariableByName(groupedClassName + "_t0"); List<Variable> parents = bn.getDAG().getParentSet(staticVar0).getParents(); ConditionalDistribution conDist_dynamic = Serialization .deepCopy(model.getConditionalDistributionsTimeT().get(dynVar.getVarID())); ConditionalDistribution conditionalDistTime1 = groupedDistributionMAPVariableTimeT(dynVar, staticVar0, staticVar0_interface.getNumberOfStates(), parents, conDist_dynamic, modelNumber); conditionalDistTime1.setVar(staticVar0); bn.setConditionalDistribution(staticVar0, conditionalDistTime1); } // INTERMEDIATE COMPLETE CONDITIONAL DISTRIBUTIONS, t_i | t_{i-1}, FOLLOWING THE GENERAL CONDITIONAL DISTRIBUTION int initialTimeStep = 2; int finalTimeStep = replicationsMAPVariable - 1; if (modelNumber == 0) initialTimeStep = 1; if ((nTimeSteps - modelNumber) % nMergedClassVars == 0) finalTimeStep = replicationsMAPVariable; IntStream.range(initialTimeStep, finalTimeStep).forEach(timeStep -> { Variable staticVar1 = variables.getVariableByName(groupedClassName + "_t" + Integer.toString(timeStep)); ConditionalDistribution conditionalDistribution = Serialization.deepCopy(generalConditionalDistTimeT); conditionalDistribution.setConditioningVariables(dag.getParentSet(staticVar1).getParents()); conditionalDistribution.setVar(staticVar1); bn.setConditionalDistribution(staticVar1, conditionalDistribution); }); // LAST CONDITIONAL DISTRIBUTION, t_{nTimeSteps} | t_{nTimeSteps-1}, IF IT'S NOT THE GENERAL ONE if ((nTimeSteps - modelNumber) % nMergedClassVars != 0) { Variable staticVar1 = variables .getVariableByName(groupedClassName + "_t" + Integer.toString(replicationsMAPVariable - 1)); Variable staticVar1_interface = variables .getVariableByName(groupedClassName + "_t" + Integer.toString(replicationsMAPVariable - 2)); List<Variable> parents1 = bn.getDAG().getParentSet(staticVar1).getParents(); Multinomial_MultinomialParents lastConDist = groupedDistributionMAPVariableTimeT(dynVar, staticVar1, staticVar1_interface.getNumberOfStates(), parents1, conDistT, modelNumber); bn.setConditionalDistribution(staticVar1, lastConDist); } /* * ADD CONDITIONAL DISTRIBUTIONS FOR VARIABLES HAVING AS A PARENT THE GROUPED MAP/CLASS VARIABLE, AT TIME T=0 */ List<Variable> dynVariables = model.getDynamicVariables().getListOfDynamicVariables(); List<Variable> dynVariablesWithClassParent = dynVariables.stream().filter(var -> !var.equals(MAPvariable)) .filter(var -> dynamicDAG.getParentSetTime0(var).contains(MAPvariable)) .collect(Collectors.toList()); List<Variable> dynVariablesNoClassParent = dynVariables.stream().filter(var -> !var.equals(MAPvariable)) .filter(var -> !dynamicDAG.getParentSetTime0(var).contains(MAPvariable)) .collect(Collectors.toList()); // dynVariablesWithClassParent.stream().forEach(dynVariable -> { // ConditionalDistribution conditionalDistribution = Serialization.deepCopy(model.getConditionalDistributionTime0(dynVariable)); // // Variable staticMAPVar1 = variables.getVariableByName(groupedClassName + "_t0"); // Variable staticVar1 = variables.getVariableByName(dynVariable.getName() + "_t0"); // List<Variable> thisVarParents = conditionalDistribution.getConditioningVariables(); // List<Variable> parentList0 = bn.getDAG().getParentSet(staticVar1).getParents(); // int indexMAPvariable = thisVarParents.indexOf(MAPvariable); // // thisVarParents.set(indexMAPvariable, staticMAPVar1); // // // conditionalDistribution.setConditioningVariables(parentList0); // conditionalDistribution.setVar(staticVar1); // //// if(modelNumber==1) //// bn.setConditionalDistribution(staticVar1, conditionalDistribution); //// else { // BaseDistribution_MultinomialParents staticVar2Distribution = obtainDistributionOfMAPChildren(staticVar1, conditionalDistribution, parentList0, modelNumber, 0); // bn.setConditionalDistribution(staticVar1, staticVar2Distribution); //// } // // }); /* * ADD CONDITIONAL DISTRIBUTIONS FOR VARIABLES HAVING AS A PARENT THE GROUPED MAP/CLASS VARIABLE, AT ANY TIME T */ dynVariablesWithClassParent.stream().forEach(dynVariable -> { IntStream.range(0, nTimeSteps).forEachOrdered(timeStep -> { ConditionalDistribution dynamicConDist; dynamicConDist = Serialization .deepCopy(timeStep == 0 ? model.getConditionalDistributionTime0(dynVariable) : model.getConditionalDistributionTimeT(dynVariable)); // )if(timeStep==0) { // = Serialization.deepCopy(model.getConditionalDistributionTime0(dynVariable)); // } // else { // dynamicConDist = Serialization.deepCopy(model.getConditionalDistributionTimeT(dynVariable)); // } Variable staticVar2 = variables .getVariableByName(dynVariable.getName() + "_t" + Integer.toString(timeStep)); List<Variable> parentList = bn.getDAG().getParentSet(staticVar2).getParents(); ConditionalDistribution staticVar2Distribution = obtainDistributionOfMAPChildren(staticVar2, dynamicConDist, parentList, modelNumber, timeStep); bn.setConditionalDistribution(staticVar2, staticVar2Distribution); }); }); /* * ADD CONDITIONAL DISTRIBUTIONS FOR VARIABLES NOT HAVING AS A PARENT THE GROUPED MAP/CLASS VARIABLE, AT ANY TIME T */ dynVariablesNoClassParent.stream().forEach(dynVariable -> { // TIME T=0 ConditionalDistribution conditionalDistribution = Serialization .deepCopy(model.getConditionalDistributionTime0(dynVariable)); Variable staticVar1 = variables.getVariableByName(dynVariable.getName() + "_t0"); List<Variable> thisVarParents = conditionalDistribution.getConditioningVariables(); thisVarParents = thisVarParents.stream() .map(parent -> variables.getVariableByName(parent.getName() + "_t0")) .collect(Collectors.toList()); conditionalDistribution.setConditioningVariables(thisVarParents); conditionalDistribution.setVar(staticVar1); bn.setConditionalDistribution(staticVar1, conditionalDistribution); // TIMES T>0 IntStream.range(1, nTimeSteps).forEach(i -> { ConditionalDistribution conditionalDistribution1 = Serialization .deepCopy(model.getConditionalDistributionTimeT(dynVariable)); Variable staticVar2 = variables .getVariableByName(dynVariable.getName() + "_t" + Integer.toString(i)); List<Variable> thisVarParents1 = conditionalDistribution1.getConditioningVariables(); thisVarParents1 = thisVarParents1.stream().map(parent -> { if (parent.getName().contains("_Interface")) { return variables.getVariableByName( parent.getName().replace("_Interface", "_t" + Integer.toString(i - 1))); } else { return variables.getVariableByName(parent.getName() + "_t" + Integer.toString(i)); } }).collect(Collectors.toList()); conditionalDistribution1.setConditioningVariables(thisVarParents1); conditionalDistribution1.setVar(staticVar2); bn.setConditionalDistribution(staticVar2, conditionalDistribution1); }); }); return bn; }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the distribution of MAP Children at time T. * @param staticVariable the static {@link Variable} object. * @param dynamicConditionalDistribution the dynamic {@link ConditionalDistribution} at time T. * @param parentList the {@code List} of parent {@link Variable}s. * @param modelNumber an integer//from w w w . j ava 2 s .co m * @param time_step an integer with the time step. * @return a {@link BaseDistribution_MultinomialParents} distribution. */ private ConditionalDistribution obtainDistributionOfMAPChildren(Variable staticVariable, ConditionalDistribution dynamicConditionalDistribution, List<Variable> parentList, int modelNumber, int time_step) { boolean allParentsMultinomial = parentList.stream().allMatch(parent -> parent.isMultinomial()); List<Variable> multinomialParents = parentList.stream().filter(parent -> parent.isMultinomial()) .collect(Collectors.toList()); List<Variable> continuousParents = parentList.stream().filter(parent -> !parent.isMultinomial()) .collect(Collectors.toList()); //BaseDistribution_MultinomialParents staticVarConDist = new BaseDistribution_MultinomialParents(staticVariable, parentList); ConditionalDistribution staticVarConDist; // In this method, all variables have at least one parent (either discrete or continuous) int distributionType = -1; if (staticVariable.isMultinomial()) { distributionType = 0; staticVarConDist = new Multinomial_MultinomialParents(staticVariable, parentList); } else if (staticVariable.isNormal()) { int nMultinomialParents = multinomialParents.size(); int nNormalParents = continuousParents.size(); if (nNormalParents > 0 && nMultinomialParents == 0) { distributionType = 1; staticVarConDist = new ConditionalLinearGaussian(staticVariable, parentList); } else if (nNormalParents == 0 && nMultinomialParents > 0) { distributionType = 2; staticVarConDist = new Normal_MultinomialParents(staticVariable, parentList); } else if (nNormalParents > 0 && nMultinomialParents > 0) { distributionType = 3; staticVarConDist = new Normal_MultinomialNormalParents(staticVariable, parentList); } else { throw new IllegalArgumentException("Unrecognized DistributionType. "); } } else { throw new IllegalArgumentException("Unrecognized DistributionType. "); } int nStatesMultinomialParents = (int) Math.round(Math.exp( multinomialParents.stream().mapToDouble(parent -> Math.log(parent.getNumberOfStates())).sum())); int nStatesMAPVariable = MAPvariable.getNumberOfStates(); for (int m = 0; m < nStatesMultinomialParents; m++) { Assignment staticParentsConfiguration = MultinomialIndex .getVariableAssignmentFromIndex(multinomialParents, m); Assignment dynamicParentsConfiguration = new HashMapAssignment(multinomialParents.size()); IntStream.range(0, multinomialParents.size()).forEach(k -> { Variable currentParent = multinomialParents.get(k); int parentValue = (int) staticParentsConfiguration.getValue(currentParent); String parentName; if (currentParent.getName().contains(groupedClassName)) { parentName = currentParent.getName().replace(groupedClassName, MAPvarName).replaceAll("_t\\d+", ""); Variable dynCurrentParent = model.getDynamicVariables().getVariableByName(parentName); int dynParentValue; int nMergedStates = currentParent.getNumberOfStates(); int repetitionsConDistT = (int) Math .round(Math.log(nMergedStates) / Math.log(nStatesMAPVariable)); int indexCurrentParentState; if (time_step >= modelNumber) indexCurrentParentState = (time_step - modelNumber) % nMergedClassVars; else indexCurrentParentState = time_step; String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(parentValue), 10), nStatesMAPVariable); m_base_nStates = StringUtils.leftPad(m_base_nStates, repetitionsConDistT, '0'); int dynamicParentState = Integer.parseInt( m_base_nStates.substring(indexCurrentParentState, indexCurrentParentState + 1)); dynParentValue = dynamicParentState; dynamicParentsConfiguration.setValue(dynCurrentParent, dynParentValue); // System.out.println("Variable: " + staticVariable.getName() + " with " + staticVariable.getNumberOfStates() + " states and " + parentList.size() + " parents"); // System.out.println("Parent " + parentName + " with " + nMergedStates + " states"); // System.out.println("Time step " + time_step + " and model number " + modelNumber); // System.out.println("Parent state number " + parentValue + " which is " + m_base_nStates); // System.out.println("Index parent state " + indexCurrentParentState); // System.out.println("Dynamic parent state number " + dynamicParentState); // System.out.println(); // if (time_step==0) { // Variable at time t=0 // if(modelNumber!=1) { // //dynParentValue = parentValue / (int) Math.pow(nStatesMAPVariable, nMergedClassVars - 1); //// System.out.println(currentParent.getNumberOfStates()); //// System.out.println(nStatesMAPVariable); //// System.out.println(parentValue); //// System.out.println(parentValue / (currentParent.getNumberOfStates()/nStatesMAPVariable)); // dynParentValue = parentValue / (int) Math.pow(nStatesMAPVariable, nMergedClassVars - 1); // } // else { // dynParentValue = parentValue; // } // } // Variable at time t=nTimeSteps-1 (last copy) and not complete // else { // // // if ((time_step - modelNumber) % nMergedClassVars != 0 && (time_step == nTimeSteps - 1)) { // dynParentValue = parentValue % nStatesMAPVariable; // } else { // if ((time_step - modelNumber) % nMergedClassVars == 0) { // dynParentValue = parentValue / (currentParent.getNumberOfStates() / nStatesMAPVariable); // ; // } else { // dynParentValue = parentValue % nStatesMAPVariable; // } // } // } // if ((!even_partition && nTimeSteps % 2 == 0 && (time_step == nTimeSteps - 1)) || (even_partition && nTimeSteps % 2 == 1 && (time_step == nTimeSteps - 1))) { // dynParentValue = parentValue; // } else { // if ((!even_partition && (time_step % 2 == 1)) || (even_partition && (time_step % 2 == 0))) { // dynParentValue = parentValue / MAPvariable.getNumberOfStates(); // } else { // dynParentValue = parentValue % MAPvariable.getNumberOfStates(); // } // } } else { if (multinomialParents.get(k).getName().endsWith("_t" + Integer.toString(time_step - 1))) { parentName = multinomialParents.get(k).getName().replaceFirst("_t\\d+", ""); Variable dynParent = model.getDynamicVariables().getVariableByName(parentName); dynamicParentsConfiguration.setValue(dynParent.getInterfaceVariable(), parentValue); } else { parentName = multinomialParents.get(k).getName().replaceFirst("_t\\d+", ""); Variable dynParent = model.getDynamicVariables().getVariableByName(parentName); dynamicParentsConfiguration.setValue(dynParent, parentValue); } } }); // System.out.println(dynamicParentsConfiguration.outputString()); // if (allParentsMultinomial && staticVariable.isMultinomial()) { //// try { // // Multinomial_MultinomialParents multinomial_multinomialParents = (Multinomial_MultinomialParents) dynamicConditionalDistribution; // Multinomial multinomial1 = (Multinomial) multinomial_multinomialParents.getMultinomial(dynamicParentsConfiguration); // // multinomial1.setVar(staticVariable); // multinomial1.setConditioningVariables(multinomialParents); // //// System.out.println(multinomial1.toString()+"\n\n"); // staticVarConDist.setBaseDistribution(m, multinomial1); // staticVarConDist.set //// } //// catch(Exception e) { //// System.out.println("Exception"); //// System.out.println(e.getMessage()); //// System.out.println(staticVariable.getName()); //// System.out.println(dynamicParentsConfiguration.outputString()); //// } // } // else if (allParentsMultinomial && staticVariable.isNormal() ){ // Normal_MultinomialParents normal_multinomialParents = (Normal_MultinomialParents) dynamicConditionalDistribution; // Normal clg = normal_multinomialParents.getNormal(dynamicParentsConfiguration); // clg.setConditioningVariables(multinomialParents); // //clg.setConditioningVariables(continuousParents); // clg.setVar(staticVariable); // // staticVarConDist.setBaseDistribution(m, clg); // } // else { // Normal_MultinomialNormalParents normal_multinomialNormalParents = (Normal_MultinomialNormalParents) dynamicConditionalDistribution; // ConditionalLinearGaussian clg = normal_multinomialNormalParents.getNormal_NormalParentsDistribution(dynamicParentsConfiguration); // clg.setConditioningVariables(continuousParents); // clg.setVar(staticVariable); // // staticVarConDist.setBaseDistribution(m, clg); // } if (distributionType == 0) { // Multinomial_Multinomial Multinomial_MultinomialParents multinomial_multinomialParents = (Multinomial_MultinomialParents) dynamicConditionalDistribution; Multinomial multinomial1 = (Multinomial) multinomial_multinomialParents .getMultinomial(dynamicParentsConfiguration); multinomial1.setVar(staticVariable); multinomial1.setConditioningVariables(multinomialParents); ((Multinomial_MultinomialParents) staticVarConDist).setMultinomial(m, multinomial1); } else if (distributionType == 2) { // Normal_Multinomial Normal_MultinomialParents normal_multinomialParents = (Normal_MultinomialParents) dynamicConditionalDistribution; Normal normal1 = normal_multinomialParents.getNormal(dynamicParentsConfiguration); normal1.setConditioningVariables(multinomialParents); //clg.setConditioningVariables(continuousParents); normal1.setVar(staticVariable); ((Normal_MultinomialParents) staticVarConDist).setNormal(m, normal1); } else if (distributionType == 3) { // Normal_MultinomialNormal Normal_MultinomialNormalParents normal_multinomialNormalParents = (Normal_MultinomialNormalParents) dynamicConditionalDistribution; ConditionalLinearGaussian clg = normal_multinomialNormalParents .getNormal_NormalParentsDistribution(dynamicParentsConfiguration); clg.setConditioningVariables(continuousParents); clg.setVar(staticVariable); ((Normal_MultinomialNormalParents) staticVarConDist).setNormal_NormalParentsDistribution(m, clg); } else { // ConditionalLinearGaussian, distributionType==1 ConditionalLinearGaussian clg = (ConditionalLinearGaussian) dynamicConditionalDistribution; //((ConditionalLinearGaussian)staticVarConDist) staticVarConDist = clg; } } // if (allParentsMultinomial && staticVariable.isNormal()) // return (Normal_MultinomialParents)staticVarConDist; // else { // return staticVarConDist; // } return staticVarConDist; }
From source file:com.gemstone.gemfire.internal.cache.OplogJUnitTest.java
@Test public void testMagicSeqPresence() throws Exception { final int MAX_OPLOG_SIZE = 200; diskProps.setMaxOplogSize(MAX_OPLOG_SIZE); diskProps.setPersistBackup(true);/*www . j a v a 2 s. com*/ diskProps.setRolling(true); diskProps.setSynchronous(true); diskProps.setOverflow(false); diskProps.setDiskDirsAndSizes(new File[] { dirs[0] }, new int[] { 4000 }); region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, diskProps, Scope.LOCAL); // 3 types of oplog files will be verified verifyOplogHeader(dirs[0], ".if", ".crf", ".drf"); try { LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = true; IntStream.range(0, 20).forEach(i -> region.put("key-" + i, "value-" + i)); // krf is created, so 4 types of oplog files will be verified verifyOplogHeader(dirs[0], ".if", ".crf", ".drf", ".krf"); region.close(); region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, diskProps, Scope.LOCAL); verifyOplogHeader(dirs[0], ".if", ".crf", ".drf", ".krf"); region.close(); } finally { LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = false; } }
From source file:io.prestosql.tests.AbstractTestQueries.java
@Test public void testAssignUniqueId() { String unionLineitem25Times = IntStream.range(0, 25).mapToObj(i -> "SELECT * FROM lineitem") .collect(joining(" UNION ALL ")); assertQuery("SELECT count(*) FROM (" + "SELECT * FROM (" + " SELECT (SELECT count(*) WHERE c = 1) " + " FROM (SELECT CASE orderkey WHEN 1 THEN orderkey ELSE 1 END " + " FROM (" + unionLineitem25Times + ")) o(c)) result(a) " + "WHERE a = 1)", "VALUES 1504375"); }