List of usage examples for org.apache.commons.lang3 StringUtils leftPad
public static String leftPad(final String str, final int size, String padStr)
Left pad a String with a specified String.
Pad to a size of size .
StringUtils.leftPad(null, *, *) = null StringUtils.leftPad("", 3, "z") = "zzz" StringUtils.leftPad("bat", 3, "yz") = "bat" StringUtils.leftPad("bat", 5, "yz") = "yzbat" StringUtils.leftPad("bat", 8, "yz") = "yzyzybat" StringUtils.leftPad("bat", 1, "yz") = "bat" StringUtils.leftPad("bat", -1, "yz") = "bat" StringUtils.leftPad("bat", 5, null) = " bat" StringUtils.leftPad("bat", 5, "") = " bat"
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
private List<double[]> obtainMAPVariableConditionalDistributions( List<List<UnivariateDistribution>> posteriorMAPVariableDistributions) { List<double[]> listCondDistributions = new ArrayList<>(nTimeSteps); int nStates = MAPvariable.getNumberOfStates(); // Univariate distribution Y_0 // UnivariateDistribution dist0_1 = posteriorMAPDistributionsEvenModel.get(0); // This variable Z_0 groups Y_0 and Y_1 // UnivariateDistribution dist0 = posteriorMAPDistributionsOddModel.get(0); // This variable is just Y_0 (not a group) // double[] dist0_probs = new double[nStates]; // System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n"); IntStream.range(0, nTimeSteps).forEachOrdered(timeStep -> { // System.out.println("\n\nTime step " + timeStep); double[] combinedConditionalDistributionProbabilities, baseDistributionProbabilities; int baseModelIndex = (timeStep + 1) % nMergedClassVars; int baseDistributionIndex = (timeStep >= baseModelIndex) ? (baseModelIndex == 0 ? 0 : 1) + (timeStep - baseModelIndex) / nMergedClassVars : (timeStep - baseModelIndex) / nMergedClassVars; baseDistributionProbabilities = posteriorMAPVariableDistributions.get(baseModelIndex) .get(baseDistributionIndex).getParameters(); int nStatesBaseDistribution = baseDistributionProbabilities.length; int baseDistrib_nMergedVars = (int) Math.round(Math.log(nStatesBaseDistribution) / Math.log(nStates)); combinedConditionalDistributionProbabilities = IntStream.range(0, nMergedClassVars).mapToObj(modelNumber -> { if (modelNumber == baseModelIndex) { // System.out.println("\nModel number " + modelNumber); //System.out.println(Arrays.toString(baseDistributionProbabilities)); return baseDistributionProbabilities; }/*from w w w . j a va 2s .co m*/ // System.out.println("\nModel number " + modelNumber); int distributionIndex = (timeStep >= modelNumber) ? (modelNumber == 0 ? 0 : 1) + (timeStep - modelNumber) / nMergedClassVars : (timeStep - modelNumber) / nMergedClassVars; int currentVarIndex = (timeStep >= modelNumber) ? (timeStep - modelNumber) % nMergedClassVars : timeStep; // System.out.println("CurrentVarIndex " + currentVarIndex); UnivariateDistribution currentDistrib = posteriorMAPVariableDistributions.get(modelNumber) .get(distributionIndex); //System.out.println(currentDistrib.toString()); double[] probabilities = new double[nStatesBaseDistribution]; int currentDistrib_nMergedVars = (int) Math.round( Math.log(currentDistrib.getVariable().getNumberOfStates()) / Math.log(nStates)); int current_nMergedVarsBaseDist = (int) Math .round(Math.log(baseDistributionProbabilities.length) / Math.log(nStates)); if (distributionIndex == 0) { // System.out.println("Current nMergedVars " + currentDistrib_nMergedVars + ", current nMergedVarsBaseDist " + current_nMergedVarsBaseDist); for (int m = 0; m < Math.pow(nStates, currentDistrib_nMergedVars); m++) { String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(m), 10), nStates); m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars, '0'); // int index_init = currentVarIndex - ((timeStep >= nMergedClassVars) ? nMergedClassVars : timeStep); int index_init = currentVarIndex + 1 - baseDistrib_nMergedVars; int index_end = currentVarIndex + 1; // String statesSequence = m_base_nStates.substring(currentVarIndex, currentVarIndex + current_nMergedVarsBaseDist); String statesSequence = m_base_nStates.substring(index_init, index_end); int currentState = Integer.parseInt(statesSequence, nStates); // System.out.println("Current state " + currentState); probabilities[currentState] += currentDistrib.getParameters()[m]; } } else { UnivariateDistribution previousDistrib = posteriorMAPVariableDistributions .get(modelNumber).get(distributionIndex - 1); int previousDistrib_nMergedVars = (int) Math .round(Math.log(previousDistrib.getVariable().getNumberOfStates()) / Math.log(nStates)); // System.out.println("Current nMergedVars " + currentDistrib_nMergedVars + ", previous nMergedVars " + previousDistrib_nMergedVars + ", current nMergedVarsBaseDist " + current_nMergedVarsBaseDist); for (int n = 0; n < Math.pow(nStates, previousDistrib_nMergedVars); n++) { String n_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(n), 10), nStates); n_base_nStates = StringUtils.leftPad(n_base_nStates, previousDistrib_nMergedVars, '0'); for (int m = 0; m < Math.pow(nStates, currentDistrib_nMergedVars); m++) { String m_base_nStates = Integer .toString(Integer.parseInt(Integer.toString(m), 10), nStates); m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars, '0'); // String statesSequence = m_base_nStates.substring(currentVarIndex, currentVarIndex + current_nMergedVarsBaseDist); // int currentState = Integer.parseInt(statesSequence, nStates); String n_concat_m_base_nStates = n_base_nStates.concat(m_base_nStates); int index_init = previousDistrib_nMergedVars + currentVarIndex + 1 - baseDistrib_nMergedVars; int index_end = previousDistrib_nMergedVars + currentVarIndex + 1; String statesSequence = n_concat_m_base_nStates.substring(index_init, index_end); // System.out.println("Complete sequence: " + n_concat_m_base_nStates + ", statesSequence:" + statesSequence); // int subIndices_m = currentVarIndex; // int subIndices_n = 1 + ((timeStep >= nMergedClassVars) ? (previousDistrib_nMergedVars - nMergedClassVars + currentVarIndex) : (previousDistrib_nMergedVars - (nMergedClassVars-timeStep) + currentVarIndex)); // // System.out.println("n_base_nStates: " + n_base_nStates + "m_base_nStates: " + m_base_nStates ); // // System.out.println("subIndices_m: " + Integer.toString(subIndices_m)); // System.out.println("subIndices_n: " + Integer.toString(subIndices_n)); // // String statesSequence_m = m_base_nStates.substring(0, subIndices_m); // String statesSequence_n = n_base_nStates.substring(subIndices_n, previousDistrib_nMergedVars); // // // System.out.println("statesSequence n: " + statesSequence_n + ", statesSequence m: " + statesSequence_m ); // // String statesSequence = statesSequence_n.concat(statesSequence_m); // System.out.println("States sequence length: " + statesSequence.length() + ", sequence: " + statesSequence); int currentState = Integer.parseInt(statesSequence, nStates); // System.out.println("Current state " + currentState); probabilities[currentState] += previousDistrib.getParameters()[n] * currentDistrib.getParameters()[m]; } } } // System.out.println("Model distribution: " + Arrays.toString(probabilities)); return probabilities; }).reduce(new double[baseDistributionProbabilities.length], (doubleArray1, doubleArray2) -> { if (doubleArray1.length != doubleArray2.length) { // System.out.println("Problem with lengths"); System.exit(-40); } for (int i = 0; i < doubleArray1.length; i++) doubleArray1[i] += ((double) 1 / nMergedClassVars) * doubleArray2[i]; return doubleArray1; }); //System.out.println("Combined distribution " + Arrays.toString(combinedConditionalDistributionProbabilities)); listCondDistributions.add(combinedConditionalDistributionProbabilities); }); // System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n"); return listCondDistributions; }
From source file:ca.phon.ipamap.IpaMap.java
private JButton getMapButton(Cell cell) { PhonUIAction action = new PhonUIAction(this, "onCellClicked", cell); action.putValue(Action.NAME, cell.getText()); action.putValue(Action.SHORT_DESCRIPTION, cell.getText()); JButton retVal = new CellButton(cell); retVal.setAction(action);// w w w . j ava2 s. c o m final Cell cellData = cell; retVal.addMouseListener(new MouseInputAdapter() { @Override public void mouseEntered(MouseEvent me) { String txt = cellData.getText(); txt = txt.replaceAll("\u25cc", ""); final IPATokens tokens = IPATokens.getSharedInstance(); String uniVal = ""; String name = ""; for (Character c : txt.toCharArray()) { String cText = "0x" + StringUtils.leftPad(Integer.toHexString((int) c), 4, '0'); uniVal += (uniVal.length() > 0 ? " + " : "") + cText; String cName = tokens.getCharacterName(c); name += (name.length() > 0 ? " + " : "") + cName; } String infoTxt = "[" + uniVal + "] " + name; infoLabel.setText(infoTxt); } @Override public void mouseExited(MouseEvent me) { infoLabel.setText("[]"); } }); retVal.addMouseListener(new ContextMouseHandler()); // set tooltip delay to 10 minutes for the buttons retVal.addMouseListener(new MouseAdapter() { final int defaultDismissTimeout = ToolTipManager.sharedInstance().getDismissDelay(); final int dismissDelayMinutes = (int) TimeUnit.MINUTES.toMillis(10); // 10 minutes @Override public void mouseEntered(MouseEvent me) { ToolTipManager.sharedInstance().setDismissDelay(dismissDelayMinutes); } @Override public void mouseExited(MouseEvent me) { ToolTipManager.sharedInstance().setDismissDelay(defaultDismissTimeout); } }); return retVal; }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Computes the Most Probable Sequence given the posterior distributions of the MAP variable. * @param posteriorDistributionsMAPvariable a {@code List} of conditional distribution values. *//*from w ww.j a v a 2 s . com*/ private void computeMostProbableSequence(List<double[]> posteriorDistributionsMAPvariable) { int[] MAPsequence = new int[nTimeSteps]; int nStates = MAPvariable.getNumberOfStates(); int[] argMaxValues = new int[nTimeSteps - 1]; double MAPsequenceProbability = -1; double[] current_probs; double[] current_max_probs = new double[(int) Math.pow(nStates, nMergedClassVars - 1)]; double[] previous_max_probs = new double[(int) Math.pow(nStates, nMergedClassVars - 1)]; for (int t = nTimeSteps - 1; t >= 1; t--) { // System.out.println("Time:" + t); double[] currentDistProbabilities = posteriorDistributionsMAPvariable.get(t); if (Arrays.stream(currentDistProbabilities).anyMatch(Double::isNaN)) { MAPsequence = new int[nTimeSteps]; for (int i = 0; i < MAPsequence.length; i++) { MAPsequence[i] = -1; return; } } int currentDistrib_nMergedVars = (int) Math .round(Math.log(currentDistProbabilities.length) / Math.log(nStates)); // System.out.println("Current Probabilities:" + Arrays.toString(currentDistProbabilities)); current_max_probs = new double[(int) Math.pow(nStates, currentDistrib_nMergedVars - 1)]; if (t == nTimeSteps - 1) { previous_max_probs = Arrays.stream(previous_max_probs).map(d -> 1).toArray(); } // System.out.println("Current Max Probs Length: " + current_max_probs.length); // System.out.println("Previous Max Probs: " + Arrays.toString(previous_max_probs)); for (int m = 0; m < Math.pow(nStates, currentDistrib_nMergedVars); m++) { // System.out.println("State: " + m ); String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(m), 10), nStates); m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars, '0'); int currentStateFirstVars; int currentStateLastVars; if (t > 0) { String stateFirstVars = m_base_nStates.substring(0, currentDistrib_nMergedVars - 1); currentStateFirstVars = Integer.parseInt(stateFirstVars, nStates); String stateLastVars = m_base_nStates.substring(1, currentDistrib_nMergedVars); currentStateLastVars = Integer.parseInt(stateLastVars, nStates); } else { currentStateFirstVars = 0; currentStateLastVars = Integer.parseInt(m_base_nStates, nStates); } // System.out.println("FirstVars:" + currentStateFirstVars + ", LastVars:" + currentStateLastVars); double currentProb = currentDistProbabilities[m] * previous_max_probs[currentStateLastVars]; double maxProb = current_max_probs[currentStateFirstVars]; if (currentProb > maxProb) { current_max_probs[currentStateFirstVars] = currentProb; } } // System.out.println("Current Max Probabilities:" + Arrays.toString(current_max_probs)); argMaxValues[t - 1] = (int) argMax(current_max_probs)[1]; previous_max_probs = current_max_probs; if (t == 1) { MAPsequenceProbability = argMax(current_max_probs)[0]; } // System.out.println("MAP Sequence Prob:" + MAPsequenceProbability); // System.out.println("Arg Max Value: " + argMaxValues[t-1]+ "\n\n\n"); } // for (int t = nTimeSteps-1; t >= 0; t--) { // // current_probs = posteriorDistributionsMAPvariable.get(t); // double maxProb=-1; // // current_max_probs = new double[MAPvarNStates]; // // if (t==(nTimeSteps-1)) { // There are no previous_max_probs // for (int j = 0; j < MAPvarNStates; j++) { // To go over all values of Y_{t-1} // maxProb=-1; // for (int k = 0; k < MAPvarNStates; k++) { // To go over all values of Y_t // if (current_probs[j * MAPvarNStates + k] > maxProb) { // maxProb = current_probs[j * MAPvarNStates + k]; // // } // } // current_max_probs[j]=maxProb; // } // argMaxValues[t] = (int)argMax(current_max_probs)[1]; // previous_max_probs = current_max_probs; // } // else if (t>0 && t<(nTimeSteps-1)) { // for (int j = 0; j < MAPvarNStates; j++) { // To go over all values of Y_{t-1} // maxProb=-1; // for (int k = 0; k < MAPvarNStates; k++) { // To go over all values of Y_t // if (current_probs[j * MAPvarNStates + k]*previous_max_probs[j] > maxProb) { // maxProb = current_probs[j * MAPvarNStates + k]*previous_max_probs[k]; // } // } // current_max_probs[j]=maxProb; // } // argMaxValues[t] = (int)argMax(current_max_probs)[1]; // previous_max_probs = current_max_probs; // } // else { // Here, t=0 // for (int j = 0; j < MAPvarNStates; j++) { // To go over all values of Y_0 // maxProb=-1; // for (int k = 0; k < MAPvarNStates; k++) { // To go over all values of Y_1 // if (current_probs[j]*previous_max_probs[j] > maxProb) { // maxProb = current_probs[j]*previous_max_probs[j]; // } // } // current_max_probs[j]=maxProb; // } // MAPsequenceProbability = argMax(current_max_probs)[0]; // argMaxValues[t] = (int)argMax(current_max_probs)[1]; // previous_max_probs = current_max_probs; // } // } //System.out.println(Arrays.toString(argMaxValues)); // System.out.println("\n\n TRACEBACK \n\n"); //int previousVarMAPState = argMaxValues[0]; MAPsequence[0] = argMaxValues[0]; int thisVarMAPState = 0; for (int t = 1; t < nTimeSteps; t++) { // System.out.println("Time Step: " + t); current_probs = posteriorDistributionsMAPvariable.get(t); StringBuilder prevVarsStateBuilder = new StringBuilder(); int j_max = Math.min(nMergedClassVars - 1, t); for (int j = 0; j < Math.min(nMergedClassVars - 1, t); j++) { // System.out.println("Append: " + Integer.toString(MAPsequence[t-j_max+j]) ); prevVarsStateBuilder.append(Integer.toString(MAPsequence[t - j_max + j])); } //previousVarMAPState = argMaxValues[t-1]; // System.out.println("PrevVarsState: " + prevVarsStateBuilder.toString()); // String prevVarsState = Integer.toString(Integer.parseInt(prevVarsStateBuilder.toString()), nStates); String prevVarsState = prevVarsStateBuilder.toString(); // System.out.println("Prev Vars State:" + prevVarsState); // String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(previousVarMAPState), 10), nStates); // m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars, '0'); // // String stateConditioningVars = m_base_nStates.substring(0, currentDistrib_nMergedVars-1); // int currentStateConditioningVars = Integer.parseInt(stateConditioningVars, nStates); // // String stateLastVar = m_base_nStates.substring(currentDistrib_nMergedVars-1, currentDistrib_nMergedVars); // int currentStateLastVar = Integer.parseInt(stateLastVar, nStates); // // double currentProb=currentDistProbabilities[m] * previous_max_probs[currentStateLastVar]; // double maxProb=current_max_probs[currentStateConditioningVars]; // // if (currentProb>maxProb) { // current_max_probs[currentStateConditioningVars] = currentProb ; // } double maxProb = -1; for (int j = 0; j < nStates; j++) { // To go over all values of Y_t int currentState = Integer.parseInt(prevVarsState.concat(Integer.toString(j)), nStates); // System.out.println("Current state:" + currentState); if (current_probs[currentState] > maxProb) { maxProb = current_probs[currentState]; thisVarMAPState = j; } } MAPsequence[t] = thisVarMAPState; // System.out.println("Currente Sequence Value: " + MAPsequence[t] + "\n\n"); } // int previousVarMAPState = argMaxValues[0]; // MAPsequence[0] = argMaxValues[0]; // // int thisVarMAPState = 0; // for (int t = 1; t < nTimeSteps; t++) { // current_probs = posteriorDistributionsMAPvariable.get(t); // previousVarMAPState = argMaxValues[t-1]; // // double maxProb = -1; // for (int j = 0; j < nStates; j++) { // To go over all values of Y_t // // if (current_probs[previousVarMAPState * nStates + j] >= maxProb) { // maxProb = current_probs[previousVarMAPState * nStates + j]; // thisVarMAPState = j; // } // } // MAPsequence[t]=thisVarMAPState; // } if (Arrays.stream(MAPsequence).anyMatch(value -> value < 0)) { MAPestimateLogProbability = Double.NaN; } else { MAPestimateLogProbability = Math.log(MAPsequenceProbability); } this.MAPsequence = MAPsequence; // System.out.println("FINAL SEQUENCE: " + Arrays.toString(MAPsequence)); }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the grouped Distribution of the MAP Variable at Time 0. * @param dynVar the dynamic {@link Variable} object. * @param staticVar the static {@link Variable} object. * @param conDist0 the {@link ConditionalDistribution} at time 0. * @param conDistT the {@link ConditionalDistribution} at time T. * @return a {@link Multinomial} distribution. *///from w w w . j ava 2s . c o m private Multinomial groupedDistributionMAPVariableTime0(Variable dynVar, Variable staticVar, ConditionalDistribution conDist0, ConditionalDistribution conDistT, int modelNumber) { if (modelNumber == 1) { return (Multinomial) conDist0; } Assignment assignment0; assignment0 = new HashMapAssignment(1); int nStates = dynVar.getNumberOfStates(); int nMergedStates = staticVar.getNumberOfStates(); Multinomial multinomial = new Multinomial(staticVar); double[] probs = new double[nMergedStates]; int probs_index = 0; int repetitionsConDistT = (int) Math.round(Math.log(nMergedStates) / Math.log(nStates) - 1); Assignment assignment1; for (int k = 0; k < nStates; k++) { // Probabilities at t=0 assignment0.setValue(dynVar, k); double prob0 = conDist0.getConditionalProbability(assignment0); for (int m = 0; m < Math.pow(nStates, repetitionsConDistT); m++) { String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(m), 10), nStates); m_base_nStates = StringUtils.leftPad(m_base_nStates, repetitionsConDistT, '0'); //System.out.println(m_base_nStates); double probT = 1; for (int n = 0; n < m_base_nStates.length(); n++) { int currentState = Integer.parseInt(m_base_nStates.substring(n, n + 1)); int previousState; if (n >= 1) previousState = Integer.parseInt(m_base_nStates.substring(n - 1, n)); else previousState = k; assignment1 = new HashMapAssignment(2); assignment1.setValue(dynVar.getInterfaceVariable(), previousState); assignment1.setValue(dynVar, currentState); probT = probT * conDistT.getConditionalProbability(assignment1); } probs[probs_index] = prob0 * probT; probs_index++; } } multinomial.setProbabilities(probs); return multinomial; }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the grouped Distribution of the MAP Variable at Time 0. * @param dynVar the dynamic {@link Variable} object. * @param staticVar the static {@link Variable} object. * @param nStatesStaticVarParent the number of static variable parents. * @param parents the {@code List} of parent {@link Variable}s. * @param conDistT the {@link ConditionalDistribution} at time T. * @return a {@link Multinomial_MultinomialParents} distribution. *///from w w w . j a va 2s.c o m private Multinomial_MultinomialParents groupedDistributionMAPVariableTimeT(Variable dynVar, Variable staticVar, int nStatesStaticVarParent, List<Variable> parents, ConditionalDistribution conDistT, int modelNumber) { Multinomial_MultinomialParents multinomial_multinomialParents = new Multinomial_MultinomialParents( staticVar, parents); Assignment assignment1; Multinomial multinomial; int nStates = dynVar.getNumberOfStates(); int nMergedStates = staticVar.getNumberOfStates(); int repetitionsConDistT = (int) Math.round(Math.log(nMergedStates) / Math.log(nStates)); for (int s = 0; s < nStatesStaticVarParent; s++) { int parentState = s % nStates; double[] probs1 = new double[nMergedStates]; int probs_index1 = 0; for (int m = 0; m < Math.pow(nStates, repetitionsConDistT); m++) { String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(m), 10), nStates); m_base_nStates = StringUtils.leftPad(m_base_nStates, repetitionsConDistT, '0'); double probT = 1; for (int n = 0; n < m_base_nStates.length(); n++) { int currentState = Integer.parseInt(m_base_nStates.substring(n, n + 1)); int previousState; if (n >= 1) previousState = Integer.parseInt(m_base_nStates.substring(n - 1, n)); else previousState = parentState; assignment1 = new HashMapAssignment(2); assignment1.setValue(dynVar.getInterfaceVariable(), previousState); assignment1.setValue(dynVar, currentState); probT = probT * conDistT.getConditionalProbability(assignment1); } probs1[probs_index1] = probT; probs_index1++; } multinomial = new Multinomial(staticVar); multinomial.setProbabilities(probs1); multinomial_multinomialParents.setMultinomial(s, multinomial); } return multinomial_multinomialParents; }
From source file:au.org.theark.core.service.ArkCommonServiceImpl.java
public String generateNaturalUID(String UID) { StringBuilder natBuilder = new StringBuilder(); Matcher matcher = Pattern.compile("\\d+").matcher(UID); int last_end = 0; while (matcher.find()) { if (matcher.start() > last_end) { natBuilder.append(UID.substring(last_end, matcher.start())); }/* w ww. ja v a 2s . c o m*/ String subjectUIDNumber = StringUtils.leftPad(UID.substring(matcher.start(), matcher.end()), 20, '0'); natBuilder.append(subjectUIDNumber); last_end = matcher.end(); } return natBuilder.toString(); }
From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java
/** * Returns the distribution of MAP Children at time T. * @param staticVariable the static {@link Variable} object. * @param dynamicConditionalDistribution the dynamic {@link ConditionalDistribution} at time T. * @param parentList the {@code List} of parent {@link Variable}s. * @param modelNumber an integer/*from w w w.j ava2 s.c om*/ * @param time_step an integer with the time step. * @return a {@link BaseDistribution_MultinomialParents} distribution. */ private ConditionalDistribution obtainDistributionOfMAPChildren(Variable staticVariable, ConditionalDistribution dynamicConditionalDistribution, List<Variable> parentList, int modelNumber, int time_step) { boolean allParentsMultinomial = parentList.stream().allMatch(parent -> parent.isMultinomial()); List<Variable> multinomialParents = parentList.stream().filter(parent -> parent.isMultinomial()) .collect(Collectors.toList()); List<Variable> continuousParents = parentList.stream().filter(parent -> !parent.isMultinomial()) .collect(Collectors.toList()); //BaseDistribution_MultinomialParents staticVarConDist = new BaseDistribution_MultinomialParents(staticVariable, parentList); ConditionalDistribution staticVarConDist; // In this method, all variables have at least one parent (either discrete or continuous) int distributionType = -1; if (staticVariable.isMultinomial()) { distributionType = 0; staticVarConDist = new Multinomial_MultinomialParents(staticVariable, parentList); } else if (staticVariable.isNormal()) { int nMultinomialParents = multinomialParents.size(); int nNormalParents = continuousParents.size(); if (nNormalParents > 0 && nMultinomialParents == 0) { distributionType = 1; staticVarConDist = new ConditionalLinearGaussian(staticVariable, parentList); } else if (nNormalParents == 0 && nMultinomialParents > 0) { distributionType = 2; staticVarConDist = new Normal_MultinomialParents(staticVariable, parentList); } else if (nNormalParents > 0 && nMultinomialParents > 0) { distributionType = 3; staticVarConDist = new Normal_MultinomialNormalParents(staticVariable, parentList); } else { throw new IllegalArgumentException("Unrecognized DistributionType. "); } } else { throw new IllegalArgumentException("Unrecognized DistributionType. "); } int nStatesMultinomialParents = (int) Math.round(Math.exp( multinomialParents.stream().mapToDouble(parent -> Math.log(parent.getNumberOfStates())).sum())); int nStatesMAPVariable = MAPvariable.getNumberOfStates(); for (int m = 0; m < nStatesMultinomialParents; m++) { Assignment staticParentsConfiguration = MultinomialIndex .getVariableAssignmentFromIndex(multinomialParents, m); Assignment dynamicParentsConfiguration = new HashMapAssignment(multinomialParents.size()); IntStream.range(0, multinomialParents.size()).forEach(k -> { Variable currentParent = multinomialParents.get(k); int parentValue = (int) staticParentsConfiguration.getValue(currentParent); String parentName; if (currentParent.getName().contains(groupedClassName)) { parentName = currentParent.getName().replace(groupedClassName, MAPvarName).replaceAll("_t\\d+", ""); Variable dynCurrentParent = model.getDynamicVariables().getVariableByName(parentName); int dynParentValue; int nMergedStates = currentParent.getNumberOfStates(); int repetitionsConDistT = (int) Math .round(Math.log(nMergedStates) / Math.log(nStatesMAPVariable)); int indexCurrentParentState; if (time_step >= modelNumber) indexCurrentParentState = (time_step - modelNumber) % nMergedClassVars; else indexCurrentParentState = time_step; String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(parentValue), 10), nStatesMAPVariable); m_base_nStates = StringUtils.leftPad(m_base_nStates, repetitionsConDistT, '0'); int dynamicParentState = Integer.parseInt( m_base_nStates.substring(indexCurrentParentState, indexCurrentParentState + 1)); dynParentValue = dynamicParentState; dynamicParentsConfiguration.setValue(dynCurrentParent, dynParentValue); // System.out.println("Variable: " + staticVariable.getName() + " with " + staticVariable.getNumberOfStates() + " states and " + parentList.size() + " parents"); // System.out.println("Parent " + parentName + " with " + nMergedStates + " states"); // System.out.println("Time step " + time_step + " and model number " + modelNumber); // System.out.println("Parent state number " + parentValue + " which is " + m_base_nStates); // System.out.println("Index parent state " + indexCurrentParentState); // System.out.println("Dynamic parent state number " + dynamicParentState); // System.out.println(); // if (time_step==0) { // Variable at time t=0 // if(modelNumber!=1) { // //dynParentValue = parentValue / (int) Math.pow(nStatesMAPVariable, nMergedClassVars - 1); //// System.out.println(currentParent.getNumberOfStates()); //// System.out.println(nStatesMAPVariable); //// System.out.println(parentValue); //// System.out.println(parentValue / (currentParent.getNumberOfStates()/nStatesMAPVariable)); // dynParentValue = parentValue / (int) Math.pow(nStatesMAPVariable, nMergedClassVars - 1); // } // else { // dynParentValue = parentValue; // } // } // Variable at time t=nTimeSteps-1 (last copy) and not complete // else { // // // if ((time_step - modelNumber) % nMergedClassVars != 0 && (time_step == nTimeSteps - 1)) { // dynParentValue = parentValue % nStatesMAPVariable; // } else { // if ((time_step - modelNumber) % nMergedClassVars == 0) { // dynParentValue = parentValue / (currentParent.getNumberOfStates() / nStatesMAPVariable); // ; // } else { // dynParentValue = parentValue % nStatesMAPVariable; // } // } // } // if ((!even_partition && nTimeSteps % 2 == 0 && (time_step == nTimeSteps - 1)) || (even_partition && nTimeSteps % 2 == 1 && (time_step == nTimeSteps - 1))) { // dynParentValue = parentValue; // } else { // if ((!even_partition && (time_step % 2 == 1)) || (even_partition && (time_step % 2 == 0))) { // dynParentValue = parentValue / MAPvariable.getNumberOfStates(); // } else { // dynParentValue = parentValue % MAPvariable.getNumberOfStates(); // } // } } else { if (multinomialParents.get(k).getName().endsWith("_t" + Integer.toString(time_step - 1))) { parentName = multinomialParents.get(k).getName().replaceFirst("_t\\d+", ""); Variable dynParent = model.getDynamicVariables().getVariableByName(parentName); dynamicParentsConfiguration.setValue(dynParent.getInterfaceVariable(), parentValue); } else { parentName = multinomialParents.get(k).getName().replaceFirst("_t\\d+", ""); Variable dynParent = model.getDynamicVariables().getVariableByName(parentName); dynamicParentsConfiguration.setValue(dynParent, parentValue); } } }); // System.out.println(dynamicParentsConfiguration.outputString()); // if (allParentsMultinomial && staticVariable.isMultinomial()) { //// try { // // Multinomial_MultinomialParents multinomial_multinomialParents = (Multinomial_MultinomialParents) dynamicConditionalDistribution; // Multinomial multinomial1 = (Multinomial) multinomial_multinomialParents.getMultinomial(dynamicParentsConfiguration); // // multinomial1.setVar(staticVariable); // multinomial1.setConditioningVariables(multinomialParents); // //// System.out.println(multinomial1.toString()+"\n\n"); // staticVarConDist.setBaseDistribution(m, multinomial1); // staticVarConDist.set //// } //// catch(Exception e) { //// System.out.println("Exception"); //// System.out.println(e.getMessage()); //// System.out.println(staticVariable.getName()); //// System.out.println(dynamicParentsConfiguration.outputString()); //// } // } // else if (allParentsMultinomial && staticVariable.isNormal() ){ // Normal_MultinomialParents normal_multinomialParents = (Normal_MultinomialParents) dynamicConditionalDistribution; // Normal clg = normal_multinomialParents.getNormal(dynamicParentsConfiguration); // clg.setConditioningVariables(multinomialParents); // //clg.setConditioningVariables(continuousParents); // clg.setVar(staticVariable); // // staticVarConDist.setBaseDistribution(m, clg); // } // else { // Normal_MultinomialNormalParents normal_multinomialNormalParents = (Normal_MultinomialNormalParents) dynamicConditionalDistribution; // ConditionalLinearGaussian clg = normal_multinomialNormalParents.getNormal_NormalParentsDistribution(dynamicParentsConfiguration); // clg.setConditioningVariables(continuousParents); // clg.setVar(staticVariable); // // staticVarConDist.setBaseDistribution(m, clg); // } if (distributionType == 0) { // Multinomial_Multinomial Multinomial_MultinomialParents multinomial_multinomialParents = (Multinomial_MultinomialParents) dynamicConditionalDistribution; Multinomial multinomial1 = (Multinomial) multinomial_multinomialParents .getMultinomial(dynamicParentsConfiguration); multinomial1.setVar(staticVariable); multinomial1.setConditioningVariables(multinomialParents); ((Multinomial_MultinomialParents) staticVarConDist).setMultinomial(m, multinomial1); } else if (distributionType == 2) { // Normal_Multinomial Normal_MultinomialParents normal_multinomialParents = (Normal_MultinomialParents) dynamicConditionalDistribution; Normal normal1 = normal_multinomialParents.getNormal(dynamicParentsConfiguration); normal1.setConditioningVariables(multinomialParents); //clg.setConditioningVariables(continuousParents); normal1.setVar(staticVariable); ((Normal_MultinomialParents) staticVarConDist).setNormal(m, normal1); } else if (distributionType == 3) { // Normal_MultinomialNormal Normal_MultinomialNormalParents normal_multinomialNormalParents = (Normal_MultinomialNormalParents) dynamicConditionalDistribution; ConditionalLinearGaussian clg = normal_multinomialNormalParents .getNormal_NormalParentsDistribution(dynamicParentsConfiguration); clg.setConditioningVariables(continuousParents); clg.setVar(staticVariable); ((Normal_MultinomialNormalParents) staticVarConDist).setNormal_NormalParentsDistribution(m, clg); } else { // ConditionalLinearGaussian, distributionType==1 ConditionalLinearGaussian clg = (ConditionalLinearGaussian) dynamicConditionalDistribution; //((ConditionalLinearGaussian)staticVarConDist) staticVarConDist = clg; } } // if (allParentsMultinomial && staticVariable.isNormal()) // return (Normal_MultinomialParents)staticVarConDist; // else { // return staticVarConDist; // } return staticVarConDist; }
From source file:nl.sidn.dnslib.util.IPUtil.java
public static String expandIPv6(String ip) { String[] sections = StringUtils.splitByWholeSeparatorPreserveAllTokens(ip, ":"); StringBuilder sb = new StringBuilder(); for (String section : sections) { if (section.length() == 0) { int missing = (8 - sections.length) + 1; for (int i = 0; i < missing; i++) { sb.append("0000:"); }/*from w w w . j a va 2s . com*/ } else if (section.length() < 4) { String paddedSection = StringUtils.leftPad(section, 4, "0"); sb.append(paddedSection + ":"); } else { sb.append(section + ":"); } } String expanded = sb.toString(); return StringUtils.removeEnd(expanded, ":"); }
From source file:nl.vpro.jcr.criteria.query.xpath.utils.XPathTextUtils.java
/** * @param path to encode eg //my//path/2009//* * @return String encoded path eg //my//path/_x0032_009//* *//*from www . j a v a 2 s . co m*/ public static String encodeDigitsInPath(String path) { LOG.debug("path to encode is {}", path); if (StringUtils.isBlank(path)) { String msg = "path cannot be a null or empty string"; LOG.error(msg); throw new IllegalArgumentException(msg); } StringBuilder encodedPath = new StringBuilder(path.length()); int inXpathCondition = 0; boolean xpathWithFunction = false; // TODO maybe a more robust check is needed for (int i = 0; i < path.length(); ++i) { char ch = path.charAt(i); if (i > 0 && path.charAt(i - 1) == '/' && Character.isDigit(ch)) { encodedPath.append("_x").append(StringUtils.leftPad(Integer.toHexString(ch), 4, '0')).append("_"); } else if (i > 0 && path.charAt(i - 1) == '/' && ch == '-') { encodedPath.append("_x002d_"); } else if (inXpathCondition <= 0 && ch == ' ') { encodedPath.append("_x0020_"); } else if (inXpathCondition <= 0 && ch == ',') { encodedPath.append("_x002c_"); } else if (inXpathCondition <= 0 && ch == '\u00b0') { // CRIT-53 encodedPath.append("_x00b0_"); } else if (inXpathCondition <= 0 && ch == '$') { // CRIT-54 encodedPath.append("_x0024_"); } else { if (ch == '[') { inXpathCondition++; } else if (ch == '(') { // "(" is the beginning of an expression only when used with the element() function if (StringUtils.endsWith(StringUtils.substring(path, 0, i), "element")) { inXpathCondition++; xpathWithFunction = true; } else if (inXpathCondition == 0) { encodedPath.append("_x0028_"); continue; } } else if (inXpathCondition > 0 && ch == ']') { inXpathCondition--; } else if (ch == ')') { if (inXpathCondition > 0 && xpathWithFunction) { inXpathCondition--; xpathWithFunction = false; } else if (inXpathCondition == 0) { encodedPath.append("_x0029_"); continue; } } encodedPath.append(ch); } } LOG.debug("returning encoded path {}", encodedPath); return encodedPath.toString(); }
From source file:ontopoly.conversion.ConversionUtils.java
private static void inferAndCreateSchema(TopicMapIF tm, TopicIF reifier) throws InvalidQueryException, MalformedURLException { // create tracker instance SchemaTracker tracker = new SchemaTracker(); // track topics and associations tracker.trackTopics(tm.getTopics()); tracker.trackAssociations(tm.getAssociations()); TopicMapBuilderIF tmbuilder = tm.getBuilder(); QueryProcessorIF queryProcessor = QueryUtils.getQueryProcessor(tm); QueryResultIF result = null;/*from w ww . ja v a 2 s . com*/ // declaration context DeclarationContextIF dc = QueryUtils.parseDeclarations(tm, "using on for i\"http://psi.ontopia.net/ontology/\" " + "using xtm for i\"http://www.topicmaps.org/xtm/1.0/core.xtm#\" " + "supertype-of($SUB, $SUP) :-" + " { xtm:superclass-subclass($SUB : xtm:subclass, $SUP : xtm:superclass) |" + " xtm:superclass-subclass($SUB : xtm:subclass, $X : xtm:superclass)," + " supertype-of($X, $SUP) }. " + "descendant-of($ANC, $DES) :- " + " { xtm:superclass-subclass($ANC : xtm:superclass, $DES : xtm:subclass) |" + " xtm:superclass-subclass($ANC : xtm:superclass, $MID : xtm:subclass)," + " descendant-of($MID, $DES) }."); // load superclass-subclass hierarchy Map<TopicIF, Collection<TopicIF>> subsup = new HashMap<TopicIF, Collection<TopicIF>>(); Map<TopicIF, Collection<TopicIF>> supsub = new HashMap<TopicIF, Collection<TopicIF>>(); result = queryProcessor.execute( "select $SUP, $SUB from xtm:superclass-subclass($SUB : xtm:subclass, $SUP : xtm:superclass)?", dc); try { while (result.next()) { TopicIF sup = (TopicIF) result.getValue(0); TopicIF sub = (TopicIF) result.getValue(1); // subtype to supertype mapping Collection<TopicIF> x = subsup.get(sub); if (x == null) { x = new HashSet<TopicIF>(); subsup.put(sub, x); } x.add(sup); // supertype to subtype mapping Collection<TopicIF> y = supsub.get(sup); if (y == null) { y = new HashSet<TopicIF>(); supsub.put(sup, y); } y.add(sub); } } finally { result.close(); } // aggregate all ontology types Collection<TopicIF> onto_types = tracker.getOntologyTypes(); // translate name scopes into name types ScopeIndexIF sindex = (ScopeIndexIF) tm.getIndex("net.ontopia.topicmaps.core.index.ScopeIndexIF"); Collection<TopicIF> nstypes = new HashSet<TopicIF>(); Iterator<TopicIF> nsiter = tracker.getSuspectNameScopes().iterator(); while (nsiter.hasNext()) { TopicIF ntheme = nsiter.next(); if (onto_types.contains(ntheme) || isTEDTopic(ntheme) || ntheme.equals(reifier)) continue; nstypes.add(ntheme); // translate name scope into name type Iterator<TopicNameIF> tniter = sindex.getTopicNames(ntheme).iterator(); while (tniter.hasNext()) { TopicNameIF tn = tniter.next(); tn.setType(ntheme); tn.removeTheme(ntheme); // WARN: what if basename have other themes in its name scope? } // register name field on topic type Iterator<TopicIF> nstiter = tracker.getNameScopeTopicTypes(ntheme).iterator(); while (nstiter.hasNext()) { TopicIF ttype = nstiter.next(); if (ttype == null) continue; // HACK: don't know what to do here. TopicIF nfield = registerNameType(ntheme, tm); registerNameField(ttype, nfield, topicByPSI(psibase.resolveAbsolute("cardinality-0-M"), tm), tm); } } // untyped topics Iterator<TopicIF> utyped = tracker.getUntypedTopics().iterator(); while (utyped.hasNext()) { TopicIF untyped = utyped.next(); if (untyped == null || onto_types.contains(untyped) || isTEDTopic(untyped) || untyped == reifier || nstypes.contains(untyped) || supsub.containsKey(untyped) || subsup.containsKey(untyped)) continue; registerUntypedTopic(untyped, tm); } // add super types to list of topic types Collection<TopicIF> all_topic_types = new HashSet<TopicIF>(tracker.getTopicTypes()); all_topic_types.addAll(supsub.keySet()); // get topmost super types Collection<TopicIF> topmost_types = getTopMostTypes(all_topic_types, subsup); // create schema Iterator<TopicIF> ttypes = all_topic_types.iterator(); while (ttypes.hasNext()) { TopicIF ttype = ttypes.next(); if (ttype == null || isTEDTopic(ttype)) continue; // topic type registerTopicType(ttype, tm); // register default name if (topmost_types.contains(ttype)) registerDefaultNameField(ttype, tm); // subject locator if (!isSubjectLocatorDeclaredOnSuperType(ttype, tracker, subsup)) { int maxcard = getBroadestSubjectLocatorMaxCardinality(ttype, tracker, supsub); if (maxcard > 0) { TopicIF subloc_card = getCardinalityTopic( getBroadestSubjectLocatorMinCardinality(ttype, tracker, supsub), maxcard, tm); registerSubjectLocatorField(ttype, subloc_card, tm); } } // subject indicator if (!isSubjectIndicatorDeclaredOnSuperType(ttype, tracker, subsup)) { int maxcard = getBroadestSubjectIndicatorMaxCardinality(ttype, tracker, supsub); if (maxcard > 0) { TopicIF subind_card = getCardinalityTopic( getBroadestSubjectIndicatorMinCardinality(ttype, tracker, supsub), maxcard, tm); registerSubjectIndicatorField(ttype, subind_card, tm); } } // name types Collection<TopicIF> n_decl_on_supertype = getNamesDeclaredOnSuperType(ttype, tracker, subsup); Iterator<TopicIF> ntypes = tracker.getNameTypes(ttype).iterator(); while (ntypes.hasNext()) { TopicIF ntype = ntypes.next(); if (ntype == null || isTEDTopic(ntype) || n_decl_on_supertype.contains(ntype)) continue; TopicIF cardinality = getCardinalityTopic( getBroadestNameTypeMinCardinality(ttype, ntype, tracker, supsub), getBroadestNameTypeMaxCardinality(ttype, ntype, tracker, supsub), tm); TopicIF nfield = registerNameType(ntype, tm); registerNameField(ttype, nfield, cardinality, tm); } // external occurrence types Collection<TopicIF> oe_decl_on_supertype = getExternalOccurrencesDeclaredOnSuperType(ttype, tracker, subsup); TopicIF datatype_uri = topicByPSI(xsdbase.resolveAbsolute("#anyURI"), tm); Iterator<TopicIF> oetypes = tracker.getExternalOccurrenceTypes(ttype).iterator(); while (oetypes.hasNext()) { TopicIF oetype = oetypes.next(); if (oetype == null || isTEDTopic(oetype) || oe_decl_on_supertype.contains(oetype)) continue; TopicIF cardinality = getCardinalityTopic( getBroadestExternalOccurrenceTypeMinCardinality(ttype, oetype, tracker, supsub), getBroadestExternalOccurrenceTypeMaxCardinality(ttype, oetype, tracker, supsub), tm); oetype = registerOccurrenceType(oetype, datatype_uri, tm); registerOccurrenceField(ttype, oetype, cardinality, tm); } // internal occurrence types Collection<TopicIF> oi_decl_on_supertype = getInternalOccurrencesDeclaredOnSuperType(ttype, tracker, subsup); TopicIF datatype_string = topicByPSI(xsdbase.resolveAbsolute("#string"), tm); Iterator<TopicIF> oitypes = tracker.getInternalOccurrenceTypes(ttype).iterator(); while (oitypes.hasNext()) { TopicIF oitype = oitypes.next(); if (oitype == null || isTEDTopic(oitype) || oi_decl_on_supertype.contains(oitype)) continue; TopicIF cardinality = getCardinalityTopic( getBroadestInternalOccurrenceTypeMinCardinality(ttype, oitype, tracker, supsub), getBroadestInternalOccurrenceTypeMaxCardinality(ttype, oitype, tracker, supsub), tm); oitype = registerOccurrenceType(oitype, datatype_string, tm); registerOccurrenceField(ttype, oitype, cardinality, tm); } } // association types Collection<TopicIF> excluded_atypes = new HashSet<TopicIF>(); excluded_atypes.add(topicByPSI(xtmbase.resolveAbsolute("#superclass-subclass"), tm)); excluded_atypes.add(topicByPSI(teqbase.resolveAbsolute("#hierarchical-relation-type"), tm)); Iterator<TopicIF> atypes = tracker.getAssociationTypes().iterator(); while (atypes.hasNext()) { TopicIF atype = atypes.next(); if (atype == null || isTEDTopic(atype) || excluded_atypes.contains(atype)) continue; // association type atype = registerAssociationType(atype, tm); // symmetric association if (tracker.isSymmetricAssociationType(atype)) addAssociation1(psibase.resolveAbsolute("is-symmetric"), atype, psibase.resolveAbsolute("association-type"), tm); // role types Iterator<TopicIF> rtypes = tracker.getRoleTypes(atype).iterator(); while (rtypes.hasNext()) { TopicIF rtype = rtypes.next(); if (rtype == null || isTEDTopic(rtype)) continue; // role type rtype = registerRoleType(rtype, tm); // count player type instances Collection<TopicIF> ptypes = tracker.getPlayerTypes(atype, rtype); int ptypes_count = 0; Iterator<TopicIF> ptiter = ptypes.iterator(); while (ptiter.hasNext()) { ptypes_count += tracker.getTopicTypeInstances(ptiter.next()); } // use search-dialog if drop-down list too long (> 50 elements) TopicIF interfaceControl; if (ptypes_count > 50) interfaceControl = topicByPSI(psibase.resolveAbsolute("search-dialog"), tm); else interfaceControl = topicByPSI(psibase.resolveAbsolute("drop-down-list"), tm); TopicIF rfield = registerRoleField(atype, rtype, interfaceControl, tm); ptiter = ptypes.iterator(); while (ptiter.hasNext()) { TopicIF ptype = ptiter.next(); if (ptype == null) ptype = getUntypedTopic(psibase, tm); else if (isTEDTopic(ptype)) continue; if (!isRoleDeclaredOnSuperType(ptype, ptypes, subsup)) { TopicIF cardinality = getCardinalityTopic( getBroadestPlayerTypeMinCardinality(atype, rtype, ptype, tracker, supsub), getBroadestPlayerTypeMaxCardinality(atype, rtype, ptype, tracker, supsub), tm); registerPlayerTypeField(rfield, ptype, cardinality, tm); } } } } // remove identity fields duplicated on subtypes // remove name fields duplicated on subtypes // remove occurrence fields duplicated on subtypes // remove fields duplicated on subtypes result = queryProcessor.execute( "select $A1 from direct-instance-of($TTYPE, on:topic-type), descendant-of($XTYPE, $TTYPE), " + "role-player($R1, $TTYPE), type($R1, on:field-owner), " + "association-role($A1, $R1), type($A1, on:has-field), " + "association-role($A1, $R2), $R1 /= $R2, type($R2, on:field-definition), role-player($R2, $FIELD), " + "role-player($R3, $XTYPE), type($R3, on:field-owner), " + "association-role($A2, $R3), type($A2, on:has-field), " + "association-role($A2, $R4), $R3 /= $R4, type($R4, on:field-definition), role-player($R4, $FIELD)?", dc); try { while (result.next()) { AssociationIF assoc = (AssociationIF) result.getValue(0); assoc.remove(); } } finally { result.close(); } // generate field order TopicIF ted_field_order = topicByPSI(psibase.resolveAbsolute("field-order"), tm); List<Object[]> fields = new ArrayList<Object[]>(); result = queryProcessor.execute("/* #OPTION: optimizer.reorder=false */ " + "select $topic, $owner, $field from " + "direct-instance-of($topic, on:topic-type), { descendant-of($owner, $topic) | $owner = $topic }, " + "role-player($R1, $owner), type($R1, on:field-owner), " + "association-role($A, $R1), type($A, on:has-field), " + "association-role($A, $R2), $R1 /= $R2, type($R2, on:field-definition), role-player($R2, $field), " + "not(instance-of($topic, on:system-topic))" + "order by $topic ?", dc); try { while (result.next()) { fields.add(result.getValues()); } } finally { result.close(); } // sort the fields Collections.sort(fields, new FieldsComparator(tm, psibase)); // update/create field order values int fOrder = 1000; TopicIF prevTopic = null; Iterator<Object[]> fiter = fields.iterator(); while (fiter.hasNext()) { Object[] f = fiter.next(); TopicIF curTopic = (TopicIF) f[0]; if (prevTopic != curTopic) { fOrder = 1000; prevTopic = curTopic; } // create new field order OccurrenceIF occ = tmbuilder.makeOccurrence(curTopic, ted_field_order, StringUtils.leftPad(Integer.toString(fOrder), 9, '0')); occ.addTheme((TopicIF) f[2]); fOrder = fOrder + 1000; } }