List of usage examples for java.util HashSet size
public int size()
From source file:exm.stc.frontend.ASTWalker.java
private void waitStmt(Context context, SwiftAST tree) throws UserException { Wait wait = Wait.fromAST(context, tree); ArrayList<Var> waitEvaled = new ArrayList<Var>(); for (SwiftAST expr : wait.getWaitExprs()) { Type waitExprType = TypeChecker.findExprType(context, expr); if (Types.isUnion(waitExprType)) { // Choose first alternative type for (Type alt : UnionType.getAlternatives(waitExprType)) { if (Types.canWaitForFinalize(alt)) { waitExprType = alt;// ww w . j av a 2 s . c om break; } } } if (!Types.canWaitForFinalize(waitExprType)) { throw new TypeMismatchException(context, "Waiting for type " + waitExprType.typeName() + " is not supported"); } Var res = exprWalker.eval(context, expr, waitExprType, false, null); waitEvaled.add(res); } ArrayList<Var> keepOpenVars = new ArrayList<Var>(); summariseBranchVariableUsage(context, Arrays.asList(wait.getBlock().getVariableUsage()), keepOpenVars); // Quick sanity check to see we're not directly blocking // on any arrays written inside HashSet<String> waitVarSet = new HashSet<String>(Var.nameList(waitEvaled)); waitVarSet.retainAll(Var.nameList(keepOpenVars)); if (waitVarSet.size() > 0) { throw new UserException(context, "Deadlock in wait statement. The following arrays are written " + "inside the body of the wait: " + waitVarSet.toString()); } backend.startWaitStatement(context.getFunctionContext().constructName("explicit-wait"), VarRepr.backendVars(waitEvaled), WaitMode.WAIT_ONLY, true, wait.isDeepWait(), ExecTarget.nonDispatchedControl()); block(LocalContext.fnSubcontext(context), wait.getBlock()); backend.endWaitStatement(); }
From source file:europarl.PhraseTranslation.java
private void postProcessData(ArrayList<PhraseTranslation> translations, Bag<String> words_list) { //it can remove useless attributes (in a COHERENT way: both from words_list and from translations) //it can remove instances (so it can even remove whole classes) log.debug("Start preprocessing"); HashSet<String> to_remove = new HashSet<String>(); //BEGIN removing too many classes Bag<String> classes = new Bag<String>(); for (PhraseTranslation phraseTranslation : translations) { classes.add(phraseTranslation.getTranslatedWord()); }//from w w w .ja v a 2s . c o m if (log.isDebugEnabled()) for (String translation : classes) if (classes.getCount(translation) > 2) System.out.println("Class " + translation + " : " + classes.getCount(translation)); ArrayList<Integer> class_occurrencies = new ArrayList<Integer>(classes.values()); java.util.Collections.sort(class_occurrencies); System.out.println("CLASS OCC " + class_occurrencies); ArrayList<PhraseTranslation> tr_to_remove = new ArrayList<PhraseTranslation>(); for (String cl : classes) { if (classes.getCount(cl) < class_occurrencies .get(class_occurrencies.size() - Cfg.cfg.getInt("target_classes", 4))) { for (PhraseTranslation phraseTranslation : translations) { if (phraseTranslation.getTranslatedWord().equals(cl)) tr_to_remove.add(phraseTranslation); } } } for (PhraseTranslation phraseTranslation : tr_to_remove) { for (String word : phraseTranslation.getPhraseWords()) { words_list.countdown(word); } translations.remove(phraseTranslation); } System.out.println(translations.size()); //END removing too many classes //BEGIN removing "useless" words, ie words with less than K occurrences for (String word : words_list) { assert 2 == Cfg.cfg.getInt("minimum_word_occurrencies"); if (words_list.getCount(word) <= Cfg.cfg.getInt("minimum_word_occurrencies") || words_list.getCount(word) >= translations.size() * 50 / 100) { log.debug(word + "occurs only" + words_list.getCount(word) + " times"); to_remove.add(word); } } for (String word : to_remove) { words_list.remove(word); for (PhraseTranslation trans : translations) trans.removeWord(word); } log.info("Useless words: " + to_remove.size() + ". Now: " + words_list.size()); to_remove.clear(); //END removing "useless" words }
From source file:org.ourbeehive.mbp.builder.SelectBuilder.java
private void computeSelectClause(MapperSql mapperSql, MapperProfile mapperProfile, OneToOneIdx oneToOneIndex, ResultMapConfig resultMapConfig, OrmTable ormTable) throws AppException { // Get table name and table alias. String tableName = null;//from w w w . j a v a 2s. co m String tableAlias = null; if (oneToOneIndex != null) { tableName = oneToOneIndex.getSonTableName(); tableAlias = oneToOneIndex.getSonTableAlias(); } else { tableName = resultMapConfig.getTableName(); tableAlias = resultMapConfig.getTableAlias(); } // Compute the select clause according to the the OrmClass definition. HashSet<String> includedAttr = ProfileHelper.getIncludedAttrName(resultMapConfig); HashSet<String> excludedAttr = ProfileHelper.getExcludedAttrName(resultMapConfig); List<OrmColumn> ormColumnList = ormTable.getColumnList(); OrmColumn ormColumn = null; String columnName = null; String attrName = null; StringBuffer selectClause = mapperSql.getSelectClause(); StringBuffer columnNameClause = mapperSql.getColumnNameClause(); StringBuffer columnAliasClause = mapperSql.getColumnAliasClause(); for (int i = 0; i < ormColumnList.size(); i++) { ormColumn = ormColumnList.get(i); columnName = ormColumn.getName(); attrName = JavaFormatter.getJavaStyle(columnName, false); // Check inclusion and exclusion, inclusion take higher preference. if (includedAttr.size() != 0) { if (includedAttr.contains(attrName) == false) { logger.debug( "INCLUDE PROPERTY: Property '" + attrName + "' is NOT in the inclusion list, skipped."); continue; } } else if (excludedAttr.size() != 0) { if (excludedAttr.contains(attrName) == true) { logger.debug( "EXCLUDE PROPERTY: Property '" + attrName + "' is in the exclusion list, skipped."); continue; } } // Handle the select clause, considering the case of the first column. if (StringUtils.isBlank(selectClause)) { selectClause.append(MapperFormatter.getSelectStmtWithPrefix(resultMapConfig)); } else { selectClause.append(MapperElm.COMMA + MapperElm.WHITE_SPACE); } selectClause .append(MapperFormatter.getColumnNamePair(mapperProfile, tableName, tableAlias, columnName)); MapperFormatter.checkWidth(selectClause); // Handle the column name clause if (StringUtils.isNotBlank(columnNameClause)) { columnNameClause.append(MapperElm.COMMA + MapperElm.WHITE_SPACE); } columnNameClause .append(MapperFormatter.getColumnNamePair(mapperProfile, tableName, tableAlias, columnName)); MapperFormatter.checkWidth(columnNameClause); // Handle the column alias clause. if (StringUtils.isNotBlank(columnAliasClause)) { columnAliasClause.append(MapperElm.COMMA + MapperElm.WHITE_SPACE); } columnAliasClause .append(MapperFormatter.getColumnAlias(mapperProfile, tableName, tableAlias, columnName)); MapperFormatter.checkWidth(columnAliasClause); } }
From source file:com.microsoft.azure.keyvault.test.CertificateOperationsTest.java
/** * List versions of a certificate in a vault. *///from w ww . j a v a 2 s . c o m @Test public void listCertificateVersions() throws Exception { String certificateName = "listCertificateVersions"; String certificateContent = "MIIJOwIBAzCCCPcGCSqGSIb3DQEHAaCCCOgEggjkMIII4DCCBgkGCSqGSIb3DQEHAaCCBfoEggX2MIIF8jCCBe4GCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAj15YH9pOE58AICB9AEggTYLrI+SAru2dBZRQRlJY7XQ3LeLkah2FcRR3dATDshZ2h0IA2oBrkQIdsLyAAWZ32qYR1qkWxLHn9AqXgu27AEbOk35+pITZaiy63YYBkkpR+pDdngZt19Z0PWrGwHEq5z6BHS2GLyyN8SSOCbdzCz7blj3+7IZYoMj4WOPgOm/tQ6U44SFWek46QwN2zeA4i97v7ftNNns27ms52jqfhOvTA9c/wyfZKAY4aKJfYYUmycKjnnRl012ldS2lOkASFt+lu4QCa72IY6ePtRudPCvmzRv2pkLYS6z3cI7omT8nHP3DymNOqLbFqr5O2M1ZYaLC63Q3xt3eVvbcPh3N08D1hHkhz/KDTvkRAQpvrW8ISKmgDdmzN55Pe55xHfSWGB7gPw8sZea57IxFzWHTK2yvTslooWoosmGxanYY2IG/no3EbPOWDKjPZ4ilYJe5JJ2immlxPz+2e2EOCKpDI+7fzQcRz3PTd3BK+budZ8aXX8aW/lOgKS8WmxZoKnOJBNWeTNWQFugmktXfdPHAdxMhjUXqeGQd8wTvZ4EzQNNafovwkI7IV/ZYoa++RGofVR3ZbRSiBNF6TDj/qXFt0wN/CQnsGAmQAGNiN+D4mY7i25dtTu/Jc7OxLdhAUFpHyJpyrYWLfvOiS5WYBeEDHkiPUa/8eZSPA3MXWZR1RiuDvuNqMjct1SSwdXADTtF68l/US1ksU657+XSC+6ly1A/upz+X71+C4Ho6W0751j5ZMT6xKjGh5pee7MVuduxIzXjWIy3YSd0fIT3U0A5NLEvJ9rfkx6JiHjRLx6V1tqsrtT6BsGtmCQR1UCJPLqsKVDvAINx3cPA/CGqr5OX2BGZlAihGmN6n7gv8w4O0k0LPTAe5YefgXN3m9pE867N31GtHVZaJ/UVgDNYS2jused4rw76ZWN41akx2QN0JSeMJqHXqVz6AKfz8ICS/dFnEGyBNpXiMRxrY/QPKi/wONwqsbDxRW7vZRVKs78pBkE0ksaShlZk5GkeayDWC/7Hi/NqUFtIloK9XB3paLxo1DGu5qqaF34jZdktzkXp0uZqpp+FfKZaiovMjt8F7yHCPk+LYpRsU2Cyc9DVoDA6rIgf+uEP4jppgehsxyT0lJHax2t869R2jYdsXwYUXjgwHIV0voj7bJYPGFlFjXOp6ZW86scsHM5xfsGQoK2Fp838VT34SHE1ZXU/puM7rviREHYW72pfpgGZUILQMohuTPnd8tFtAkbrmjLDo+k9xx7HUvgoFTiNNWuq/cRjr70FKNguMMTIrid+HwfmbRoaxENWdLcOTNeascER2a+37UQolKD5ksrPJG6RdNA7O2pzp3micDYRs/+s28cCIxO//J/d4nsgHp6RTuCu4+Jm9k0YTw2Xg75b2cWKrxGnDUgyIlvNPaZTB5QbMid4x44/lE0LLi9kcPQhRgrK07OnnrMgZvVGjt1CLGhKUv7KFc3xV1r1rwKkosxnoG99oCoTQtregcX5rIMjHgkc1IdflGJkZzaWMkYVFOJ4Weynz008i4ddkske5vabZs37Lb8iggUYNBYZyGzalruBgnQyK4fz38Fae4nWYjyildVfgyo/fCePR2ovOfphx9OQJi+M9BoFmPrAg+8ARDZ+R+5yzYuEc9ZoVX7nkp7LTGB3DANBgkrBgEEAYI3EQIxADATBgkqhkiG9w0BCRUxBgQEAQAAADBXBgkqhkiG9w0BCRQxSh5IAGEAOAAwAGQAZgBmADgANgAtAGUAOQA2AGUALQA0ADIAMgA0AC0AYQBhADEAMQAtAGIAZAAxADkANABkADUAYQA2AGIANwA3MF0GCSsGAQQBgjcRATFQHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABvAGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIwggLPBgkqhkiG9w0BBwagggLAMIICvAIBADCCArUGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEGMA4ECNX+VL2MxzzWAgIH0ICCAojmRBO+CPfVNUO0s+BVuwhOzikAGNBmQHNChmJ/pyzPbMUbx7tO63eIVSc67iERda2WCEmVwPigaVQkPaumsfp8+L6iV/BMf5RKlyRXcwh0vUdu2Qa7qadD+gFQ2kngf4Dk6vYo2/2HxayuIf6jpwe8vql4ca3ZtWXfuRix2fwgltM0bMz1g59d7x/glTfNqxNlsty0A/rWrPJjNbOPRU2XykLuc3AtlTtYsQ32Zsmu67A7UNBw6tVtkEXlFDqhavEhUEO3dvYqMY+QLxzpZhA0q44ZZ9/ex0X6QAFNK5wuWxCbupHWsgxRwKftrxyszMHsAvNoNcTlqcctee+ecNwTJQa1/MDbnhO6/qHA7cfG1qYDq8Th635vGNMW1w3sVS7l0uEvdayAsBHWTcOC2tlMa5bfHrhY8OEIqj5bN5H9RdFy8G/W239tjDu1OYjBDydiBqzBn8HG1DSj1Pjc0kd/82d4ZU0308KFTC3yGcRad0GnEH0Oi3iEJ9HbriUbfVMbXNHOF+MktWiDVqzndGMKmuJSdfTBKvGFvejAWVO5E4mgLvoaMmbchc3BO7sLeraHnJN5hvMBaLcQI38N86mUfTR8AP6AJ9c2k514KaDLclm4z6J8dMz60nUeo5D3YD09G6BavFHxSvJ8MF0Lu5zOFzEePDRFm9mH8W0N/sFlIaYfD/GWU/w44mQucjaBk95YtqOGRIj58tGDWr8iUdHwaYKGqU24zGeRae9DhFXPzZshV1ZGsBQFRaoYkyLAwdJWIXTi+c37YaC8FRSEnnNmS79Dou1Kc3BvK4EYKAD2KxjtUebrV174gD0Q+9YuJ0GXOTspBvCFd5VT2Rw5zDNrA/J3F5fMCk4wOzAfMAcGBSsOAwIaBBSxgh2xyF+88V4vAffBmZXv8Txt4AQU4O/NX4MjxSodbE7ApNAMIvrtREwCAgfQ"; String certificatePassword = "123"; // Set content type to indicate the certificate is PKCS12 format. SecretProperties secretProperties = new SecretProperties(); secretProperties.withContentType(MIME_PKCS12); CertificatePolicy certificatePolicy = new CertificatePolicy(); certificatePolicy.withSecretProperties(secretProperties); HashSet<String> certificates = new HashSet<String>(); for (int i = 0; i < MAX_CERTS; ++i) { int failureCount = 0; for (;;) { try { CertificateBundle certificateBundle = keyVaultClient.importCertificate( new ImportCertificateRequest.Builder(getVaultUri(), certificateName, certificateContent) .withPassword(certificatePassword).withPolicy(certificatePolicy).build()); CertificateIdentifier id = certificateBundle.certificateIdentifier(); certificates.add(id.identifier()); break; } catch (KeyVaultErrorException e) { ++failureCount; if (e.body().error().code().equals("Throttled")) { System.out.println("Waiting to avoid throttling"); Thread.sleep(failureCount * 1500); continue; } throw e; } } } PagedList<CertificateItem> listResult = keyVaultClient.listCertificateVersions(getVaultUri(), certificateName, PAGELIST_MAX_CERTS); Assert.assertTrue(PAGELIST_MAX_CERTS >= listResult.currentPage().items().size()); listResult = keyVaultClient.listCertificateVersions(getVaultUri(), certificateName); for (CertificateItem item : listResult) { if (item != null) { certificates.remove(item.id()); } } Assert.assertEquals(0, certificates.size()); keyVaultClient.deleteCertificate(getVaultUri(), certificateName); }
From source file:com.clust4j.algo.AbstractClusterer.java
final private Array2DRowRealMatrix initData(final RealMatrix data) { final int m = data.getRowDimension(), n = data.getColumnDimension(); final double[][] ref = new double[m][n]; final HashSet<Double> unique = new HashSet<>(); // Used to compute variance on the fly for summaries later... double[] sum = new double[n]; double[] sumSq = new double[n]; double[] maxes = VecUtils.rep(Double.NEGATIVE_INFINITY, n); double[] mins = VecUtils.rep(Double.POSITIVE_INFINITY, n); // This will store summaries for each column + a header ModelSummary summaries = new ModelSummary( new Object[] { "Feature #", "Variance", "Std. Dev", "Mean", "Max", "Min" }); /*/*from w w w . j ava 2 s. c om*/ * Internally performs the copy */ double entry; for (int i = 0; i < m; i++) { for (int j = 0; j < n; j++) { entry = data.getEntry(i, j); if (Double.isNaN(entry)) { error(new NaNException("NaN in input data. " + "Select a matrix imputation method for " + "incomplete records")); } else { // copy the entry ref[i][j] = entry; unique.add(entry); // capture stats... sumSq[j] += entry * entry; sum[j] += entry; maxes[j] = FastMath.max(entry, maxes[j]); mins[j] = FastMath.min(entry, mins[j]); // if it's the last row, we can compute these: if (i == m - 1) { double var = (sumSq[j] - (sum[j] * sum[j]) / (double) m) / ((double) m - 1.0); if (var == 0) { warn("zero variance in feature " + j); } summaries.add(new Object[] { j, // feature num var, // var m < 2 ? Double.NaN : FastMath.sqrt(var), // std dev sum[j] / (double) m, // mean maxes[j], // max mins[j] // min }); } } } } // Log the summaries summaryLogger(formatter.format(summaries)); if (unique.size() == 1) this.singular_value = true; /* * Don't need to copy again, because already internally copied... */ return new Array2DRowRealMatrix(ref, false); }
From source file:net.sourceforge.mipa.predicatedetection.lattice.sequence.SequenceWindowedLatticeChecker.java
private void updateReachableState(SequenceLatticeIDNode minCGS, AbstractLatticeIDNode maxCGS, int id) { // TODO Auto-generated method stub if (minCGS == null) { return;//from w ww. ja va2s. co m } /* * minCGS.getReachedStates().clear(); * * String[] string = minCGS.getSatisfiedPredicates().split(" "); for * (int i = 0; i < string.length; i++) { State state = * automaton.getInitialState().step(string[i].charAt(0)); * minCGS.addReachedStates(state); } // * minCGS.addReachedStates(automaton.getInitialState()); if (DEBUG) { * long time_t = (new Date()).getTime(); out.print("[ "); for (int i = * 0; i < minCGS.getID().length; i++) { out.print(minCGS.getID()[i] + * " "); } out.print("]: satisfied predicates: " + * minCGS.getSatisfiedPredicates()); out.print(" reachable states: "); * Iterator<State> it = minCGS.getReachedStates().iterator(); while * (it.hasNext()) { State state = it.next(); out.print(state.getName() + * " "); } out.println(); out.flush(); wastedTime += (new * Date()).getTime() - time_t; } long time = (new Date()).getTime(); * ArrayList<SequenceLatticeIDNode> set = new * ArrayList<SequenceLatticeIDNode>(); ArrayList<String> setID = new * ArrayList<String>(); set.add(minCGS); * setID.add(StringUtils.join(minCGS.getID(), ' ')); while * (!set.isEmpty()) { SequenceLatticeIDNode node = set.remove(0); for * (int i = 0; i < children.length; i++) { String[] index = new * String[children.length]; for (int j = 0; j < children.length; j++) { * index[j] = node.getID()[j]; } index[i] = * Integer.toString(Integer.valueOf(index[i]) + 1); String ID = * StringUtils.join(index, ' '); if (!setID.contains(ID) && * getMappedLattice().get(ID) != null) { SequenceLatticeIDNode newNode = * (SequenceLatticeIDNode) getMappedLattice() .get(ID); if * (newNode.getGlobalState()[id].getID().equals( * minCGS.getGlobalState()[id].getID())) { * newNode.getReachedStates().clear(); computeReachableStates(newNode); * set.add(newNode); setID.add(StringUtils.join(newNode.getID(), ' ')); * } else { HashSet<State> oriState = new HashSet<State>(); * Iterator<State> iterator = newNode.getReachedStates() .iterator(); * while (iterator.hasNext()) { oriState.add(iterator.next()); } * newNode.getReachedStates().clear(); computeReachableStates(newNode); * * boolean flag = true; if (oriState.size() == * newNode.getReachedStates() .size()) { String ori = ""; iterator = * oriState.iterator(); while (iterator.hasNext()) { State state = * iterator.next(); ori += state.getName() + " "; * * } String news = ""; iterator = newNode.getReachedStates().iterator(); * while (iterator.hasNext()) { State state = iterator.next(); news += * state.getName() + " "; } * * String[] oriStates = ori.trim().split(" "); String[] newStates = * news.trim().split(" "); for (int j = 0; j < oriStates.length; j++) { * String s = oriStates[j]; boolean f = false; for (int k = 0; k < * newStates.length; k++) { if (s.equals(newStates[k])) { f = true; * break; } } if (f == false) { flag = false; break; } } } else { flag = * false; } if (flag == false) { set.add(newNode); * setID.add(StringUtils.join(newNode.getID(), ' ')); } } } } } for (int * i = 0; i < children.length; i++) { String[] index = new * String[children.length]; for (int j = 0; j < children.length; j++) { * index[j] = node.getID()[j]; } index[i] = * Integer.toString(Integer.valueOf(index[i]) + 1); String ID = * StringUtils.join(index, ' '); if (!setID.contains(ID) && * getMappedLattice().get(ID) != null) { SequenceLatticeIDNode newNode = * (SequenceLatticeIDNode) getMappedLattice() .get(ID); if * (newNode.getGlobalState()[id].getID().equals( * minCGS.getGlobalState()[id].getID())) { * newNode.getReachedStates().clear(); computeReachableStates(newNode); * set.add(newNode); setID.add(StringUtils.join(newNode.getID(), ' ')); * } else { HashSet<State> oriState = new HashSet<State>(); * Iterator<State> iterator = newNode.getReachedStates() .iterator(); * while (iterator.hasNext()) { oriState.add(iterator.next()); } * newNode.getReachedStates().clear(); computeReachableStates(newNode); * * boolean flag = true; if (oriState.size() == * newNode.getReachedStates() .size()) { String ori = ""; iterator = * oriState.iterator(); while (iterator.hasNext()) { State state = * iterator.next(); ori += state.getName() + " "; * * } String news = ""; iterator = newNode.getReachedStates().iterator(); * while (iterator.hasNext()) { State state = iterator.next(); news += * state.getName() + " "; } * * String[] oriStates = ori.trim().split(" "); String[] newStates = * news.trim().split(" "); for (int j = 0; j < oriStates.length; j++) { * String s = oriStates[j]; boolean f = false; for (int k = 0; k < * newStates.length; k++) { if (s.equals(newStates[k])) { f = true; * break; } } if (f == false) { flag = false; break; } } } else { flag = * false; } if (flag == false) { set.add(newNode); * setID.add(StringUtils.join(newNode.getID(), ' ')); } } } } */ long time = (new Date()).getTime(); ArrayList<SequenceLatticeIDNode> set = new ArrayList<SequenceLatticeIDNode>(); ArrayList<String> setID = new ArrayList<String>(); set.add(minCGS); while (!set.isEmpty()) { SequenceLatticeIDNode node = set.remove(0); if (!setID.contains(StringUtils.join(node.getID(), ' '))) { setID.add(StringUtils.join(node.getID(), ' ')); HashSet<State> oriState = new HashSet<State>(); Iterator<State> iterator = node.getReachedStates().iterator(); while (iterator.hasNext()) { oriState.add(iterator.next()); } if (node.equals(minCGS)) { node.getReachedStates().clear(); String[] string = node.getSatisfiedPredicates().split(" "); for (int i = 0; i < string.length; i++) { State state = automaton.getInitialState().step(string[i].charAt(0)); node.addReachedStates(state); } if (DEBUG) { long time_t = (new Date()).getTime(); out.print("[ "); for (int i = 0; i < node.getID().length; i++) { out.print(node.getID()[i] + " "); } out.print("]: satisfied predicates: " + node.getSatisfiedPredicates()); out.print(" reachable states: "); Iterator<State> it = node.getReachedStates().iterator(); while (it.hasNext()) { State state = it.next(); out.print(state.getName() + " "); } out.println(); out.flush(); wastedTime += (new Date()).getTime() - time_t; } } else { node.getReachedStates().clear(); computeReachableStates(node); } boolean flag = true; if (oriState.size() == node.getReachedStates().size()) { String ori = ""; iterator = oriState.iterator(); while (iterator.hasNext()) { State state = iterator.next(); ori += state.getName() + " "; } String news = ""; iterator = node.getReachedStates().iterator(); while (iterator.hasNext()) { State state = iterator.next(); news += state.getName() + " "; } String[] oriStates = ori.trim().split(" "); String[] newStates = news.trim().split(" "); for (int j = 0; j < oriStates.length; j++) { String s = oriStates[j]; boolean f = false; for (int k = 0; k < newStates.length; k++) { if (s.equals(newStates[k])) { f = true; break; } } if (f == false) { flag = false; break; } } } else { flag = false; } if (flag == false) { for (int i = 0; i < children.length; i++) { String[] index = new String[children.length]; for (int j = 0; j < children.length; j++) { index[j] = node.getID()[j]; } index[i] = Integer.toString(Integer.valueOf(index[i]) + 1); String ID = StringUtils.join(index, ' '); if (getMappedLattice().get(ID) != null) { set.add((SequenceLatticeIDNode) getMappedLattice().get(ID)); } } } else { // [id] not change if (node.getID()[id].equals(windowedLocalStateSet.get(id).get(0).getID())) { for (int i = 0; i < children.length; i++) { if (i == id) { continue; } String[] index = new String[children.length]; for (int j = 0; j < children.length; j++) { index[j] = node.getID()[j]; } index[i] = Integer.toString(Integer.valueOf(index[i]) + 1); String ID = StringUtils.join(index, ' '); if (getMappedLattice().get(ID) != null) { set.add((SequenceLatticeIDNode) getMappedLattice().get(ID)); } } } } } } updateNumber = setID.size(); updateTime = (new Date()).getTime() - time; }
From source file:es.caib.seycon.ng.servei.XarxaServiceImpl.java
private boolean sonAliasIguales(String alias1, String alias2) { if ((alias1 == null && alias2 != null) || (alias1 != null && alias2 == null)) return false; // solo 1 nulo if (alias1 == null && alias2 == null) return true; // ambos nulos HashSet h_alias1 = new HashSet(); HashSet h_alias2 = new HashSet(); // alias1 y alias2 NO son nulos String[] v_alias1 = alias1.split(" "); //$NON-NLS-1$ String[] v_alias2 = alias2.split(" "); //$NON-NLS-1$ // Los guardamos en los sets if (v_alias1 != null) for (int i = 0; i < v_alias1.length; i++) { String act = v_alias1[i]; if (act != null && !"".equals(act.trim())) //$NON-NLS-1$ h_alias1.add(act);//from www . ja va 2 s.c o m } if (v_alias2 != null) for (int i = 0; i < v_alias2.length; i++) { String act = v_alias2[i]; if (act != null && !"".equals(act.trim())) //$NON-NLS-1$ h_alias2.add(act); } if (h_alias1.size() != h_alias2.size()) return false; // No tienen el mismo tamao // Los comparamos buscando todos los del primero en el segundo: for (Iterator it = h_alias1.iterator(); it.hasNext();) { String elem = (String) it.next(); if (!h_alias2.contains(elem)) return false; } return true; }
From source file:dao.PblogTopicDaoDb.java
/** * Get blogs within a specific month/* ww w . j a v a 2 s . c om*/ * @param pBlogId - the personal blogid * @param accessFlag - the access Flag, which indicates the datasource from which topics are retrieved. * READ_FROM_MASTER (1), READ_FROM_SLAVE(0) * @param getLoginInfo - get the blogger's information * @param entrydate - the date * @return HashSet - the dates for each topic * @throws BaseDaoException - when error occurs */ public HashSet getMonthlyTopicDates(String pBlogId, int accessFlag, boolean getLoginInfo, String entrydate) throws BaseDaoException { if (RegexStrUtil.isNull(pBlogId)) { throw new BaseDaoException("params are null"); } /** * Get scalability ds for topics, topics partitioned on pBlogId */ String sourceName = scalabilityManager.getReadScalability(pBlogId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getTopicDates() " + sourceName + " pBlogId = " + pBlogId); } HashSet result = null; Connection conn = null; try { conn = ds.getConnection(); result = getMonthlyQuery.run(conn, pBlogId, MyUtils.getMonthNum(entrydate), MyUtils.getYear(entrydate)); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while listing pblogtopics getMonthlyQuery()" + pBlogId, e1); } throw new BaseDaoException("error in getMonthlyQuery(), pBlogId =" + pBlogId, e); } try { if (conn != null) { conn.close(); } } catch (Exception e2) { throw new BaseDaoException("conn.close() error in getMonthlyQuery(), pBlogId =" + pBlogId, e2); } HashSet dateSet = null; if ((result != null) && (result.size() > 0)) { Iterator it = result.iterator(); dateSet = new HashSet(); while (it.hasNext()) { Blog blog = (Blog) it.next(); dateSet.add(blog.getValue(DbConstants.ENTRY_DATE)); } } return dateSet; /* if ((result != null) && (result.size() > 0) ) { Iterator it = result.iterator(); while (it.hasNext() ) { Blog blog = (Blog)it.next(); String date = blog.getValue(DbConstants.ENTRY_DATE); if (date.equals(entrydate)) { //treeCache.put(fqn, pBlogId, result); return result; } } } */ }
From source file:com.linkedin.databus2.relay.TestGoldenGateEventProducer.java
private void testAddEventToBufferRateControl(long throttleDurationInSecs) throws InvalidConfigException, UnsupportedKeyException, DatabusException, NoSuchFieldException, IllegalAccessException { // 1 event per second required. Send 5 events. Must have 4 sleeps. long rate = 1; int numEvents = 5; PhysicalSourceStaticConfig pssc = buildPssc(rate, throttleDurationInSecs); long scn = 10; DbusEventBufferAppendable mb = createBufMult(pssc); GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, null, mb, null, null); // enable if want to run with mocked timer // run_with_mock_timer(gg); int sourceId = 505; HashSet<DBUpdateImage> db = new HashSet<DBUpdateImage>(); // name1 is the only key ColumnsState.KeyPair kp1 = new ColumnsState.KeyPair(new String("name1"), Schema.Type.RECORD); ArrayList<ColumnsState.KeyPair> keyPairs = new ArrayList<ColumnsState.KeyPair>(numEvents); keyPairs.add(kp1);//ww w. j a va 2 s.c om Schema s = Schema.parse(avroSchema2); GenericRecord gr1 = new GenericData.Record(s); gr1.put("name1", "phani1"); gr1.put("name2", "boris1"); GenericRecord gr2 = new GenericData.Record(s); gr2.put("name1", "phani2"); gr2.put("name2", "boris2"); GenericRecord gr3 = new GenericData.Record(s); gr3.put("name1", "phani3"); gr3.put("name2", "boris3"); GenericRecord gr4 = new GenericData.Record(s); gr4.put("name1", "phani4"); gr4.put("name2", "boris4"); GenericRecord gr5 = new GenericData.Record(s); gr5.put("name1", "phani5"); gr5.put("name2", "boris5"); DBUpdateImage dbi1 = new DBUpdateImage(keyPairs, scn, gr1, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false); DBUpdateImage dbi2 = new DBUpdateImage(keyPairs, scn, gr2, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false); DBUpdateImage dbi3 = new DBUpdateImage(keyPairs, scn, gr3, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false); DBUpdateImage dbi4 = new DBUpdateImage(keyPairs, scn, gr4, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false); DBUpdateImage dbi5 = new DBUpdateImage(keyPairs, scn, gr5, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false); db.add(dbi1); db.add(dbi2); db.add(dbi3); db.add(dbi4); db.add(dbi5); // For a given transaction, and logical source : only 1 update ( the last one succeeds ) Assert.assertEquals(1, db.size()); // Generate 5 transactions with the same update for (int i = 0; i < numEvents; i++) { List<TransactionState.PerSourceTransactionalUpdate> dbUpdates = new ArrayList<TransactionState.PerSourceTransactionalUpdate>( 10); TransactionState.PerSourceTransactionalUpdate dbUpdate = new TransactionState.PerSourceTransactionalUpdate( sourceId, db); dbUpdates.add(dbUpdate); long timestamp = 60; gg.addEventToBuffer(dbUpdates, new TransactionInfo(0, 0, timestamp, scn)); scn++; } // It may not sleep the very first time as 1 second may have elapsed from when the rate control got started to when event in // getting inserted. Subsequently, expect rate control to kick in long numSleeps = Math.min(numEvents, throttleDurationInSecs); Assert.assertEquals(gg.getRateControl().getNumSleeps(), numSleeps); gg.getRateControl().resetNumSleeps(); return; }
From source file:dao.PblogTopicDaoDb.java
/** * Allow users to view it, with session or without session * topics are retrieved from datasource that is based on accessFlag * Retrieves topics based on pBlogId and date * @param pBlogId - the personal blogid * @param accessFlag - the access Flag, which indicates the datasource from which topics are retrieved. * READ_FROM_MASTER (1), READ_FROM_SLAVE(0) * @param getLoginInfo - get the blogger's information * @param entrydate - the date/*from ww w. j a va2 s . c om*/ * @return HashSet - the dates for each topic * @throws BaseDaoException - when error occurs */ public HashSet getTopicDates(String pBlogId, int accessFlag, boolean getLoginInfo, String entrydate) throws BaseDaoException { if (RegexStrUtil.isNull(pBlogId)) { throw new BaseDaoException("params are null"); } /** * Get scalability ds for topics, topics partitioned on pBlogId */ String sourceName = scalabilityManager.getReadScalability(pBlogId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getTopicDates() " + sourceName + " pBlogId = " + pBlogId); } HashSet result = null; Connection conn = null; try { conn = ds.getConnection(); result = getDateQuery.run(conn, pBlogId, MyUtils.getMonthNum(entrydate), MyUtils.getYear(entrydate), MyUtils.getDay(entrydate)); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while listing pblogtopics getDateQuery()" + pBlogId, e1); } throw new BaseDaoException("error in getDateQuery(), pBlogId =" + pBlogId, e); } try { if (conn != null) { conn.close(); } } catch (Exception e2) { throw new BaseDaoException("conn.close() error in getDateQuery(), pBlogId =" + pBlogId, e2); } HashSet dateSet = null; if ((result != null) && (result.size() > 0)) { Iterator it = result.iterator(); dateSet = new HashSet(); while (it.hasNext()) { Blog blog = (Blog) it.next(); dateSet.add(blog.getValue(DbConstants.ENTRY_DATE)); } } return dateSet; /* if ((result != null) && (result.size() > 0) ) { Iterator it = result.iterator(); while (it.hasNext() ) { Blog blog = (Blog)it.next(); String date = blog.getValue(DbConstants.ENTRY_DATE); if (date.equals(entrydate)) { //treeCache.put(fqn, pBlogId, result); return result; } } } */ }