List of usage examples for java.util LinkedHashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:net.sradonia.eventbus.EventBus.java
/** * <p>/*from w w w . j a v a 2 s .c om*/ * Publishes an event on the bus. * </p> * * @param topic * the topic of the event * @param event * the event object * @return <code>true</code> if the event has been published successfully, <code>false</code> if it has been vetoed */ public boolean publish(String topic, Object event) { if (event == null) throw new IllegalArgumentException("can't publish null event!"); if (log.isInfoEnabled()) log.info("publishing {topic=" + topic + ", event=" + event + "}"); Class<?> eventClass = event.getClass(); // check VetoListeners LinkedHashSet<VetoListener> vetoListeners = new LinkedHashSet<VetoListener>(); if (this.vetoListeners != null) { synchronized (this.vetoListeners) { vetoListeners.addAll(this.vetoListeners); } } vetoListeners.addAll(getVetoListenersForClass(eventClass)); if (topic != null) vetoListeners.addAll(getVetoListenersForTopic(topic)); for (VetoListener vetoListener : vetoListeners) { try { if (vetoListener.shouldVeto(topic, event)) { if (log.isInfoEnabled()) log.info(vetoListener + " vetoed event {topic=" + topic + ", event=" + event + "}"); return false; } } catch (RuntimeException e) { if (log.isErrorEnabled()) log.error(vetoListener + " threw an exception while checking for veto of event {topic=" + topic + ", event=" + event + "}", e); throw e; } } // publish Set<EventSubscriber> subscribers = new LinkedHashSet<EventSubscriber>(); if (this.subscribers != null) { synchronized (this.subscribers) { subscribers.addAll(this.subscribers); } } subscribers.addAll(getSubscribersForClass(eventClass)); if (topic != null) subscribers.addAll(getSubscribersForTopic(topic)); for (EventSubscriber subscriber : subscribers) { try { subscriber.onEvent(topic, event); } catch (RuntimeException e) { if (log.isErrorEnabled()) log.error(subscriber + " threw an exception while handling event {topic=" + topic + ", event=" + event + "}", e); throw e; } } return true; }
From source file:org.apache.lucene.queryparser.classic.PreAnalyzedQueryParser.java
/*** * Split the term query nodes into new nodes, depending on the output from the tokenization process * // w w w .ja v a 2 s. c om * @param langCode * @param rawContent * @return * */ public ArrayList<QueryNode> tokenizeChildNodes(ArrayList<QueryNode> qryNodeList, String langCode) { ArrayList<QueryNode> newQryNodeList = new ArrayList<QueryNode>(); for (QueryNode qryNode : qryNodeList) { if (qryNode.getType().equals(NTypes.TERM) && !qryNode.getIsPhrase() && qryNode.getIsPreAnalyzed()) { ArrayList<String> tokens = getTokensArrayUsingFieldTypes(langCode, qryNode.getData()); //Removing duplicates LinkedHashSet hs = new LinkedHashSet(); hs.addAll(tokens); tokens.clear(); tokens.addAll(hs); for (String token : tokens) { newQryNodeList.add(new QueryNode(token, NTypes.TERM, qryNode.getIsPreAnalyzed())); } } else { newQryNodeList.add(qryNode); } } return newQryNodeList; }
From source file:org.apache.lucene.queryparser.classic.PreAnalyzedQueryParser.java
public ArrayList<QueryNode> synonymExpandChildNodes(ArrayList<QueryNode> qryNodeList, Boolean synExp) { ArrayList<QueryNode> newQryNodeList = new ArrayList<QueryNode>(); Boolean fieldClauseHasSynExp = false; for (QueryNode qryNode : qryNodeList) { if (qryNode.getType().equals(NTypes.TERM) && !qryNode.getIsPhrase() && qryNode.getIsPreAnalyzed()) { if (fieldClauseHasSynExp) { ArrayList<String> synonyms = expandSynonyms(qryNode.getData()); //Removing duplicates LinkedHashSet hs = new LinkedHashSet(); hs.addAll(synonyms); synonyms.clear();// www. j av a 2s.c o m synonyms.addAll(hs); if (synonyms.size() > 0) { newQryNodeList.add(new QueryNode("(", NTypes.SYMBOL)); } newQryNodeList.add(new QueryNode(qryNode.getData(), NTypes.TERM, qryNode.getIsPreAnalyzed())); for (String syn : synonyms) { newQryNodeList.add(new QueryNode(syn, NTypes.TERM, qryNode.getIsPreAnalyzed())); } if (synonyms.size() > 0) { newQryNodeList.add(new QueryNode(")", NTypes.SYMBOL)); } } else { newQryNodeList.add(new QueryNode(qryNode.getData(), NTypes.TERM, qryNode.getIsPreAnalyzed())); } } else { if (qryNode.getType().equals(NTypes.FIELD)) { if (((qryNode.getLocalParams() != null) && (qryNode.getLocalParams().contains("syn=true"))) || synExp) { fieldClauseHasSynExp = true; } else { fieldClauseHasSynExp = false; } newQryNodeList.add(qryNode); } else { newQryNodeList.add(qryNode); } } } return newQryNodeList; }
From source file:org.apache.lucene.queryparser.classic.PreAnalyzedQueryParser.java
public ArrayList<QueryNode> lemmaExpandChildNodes(ArrayList<QueryNode> qryNodeList, Boolean lemmaExp) { ArrayList<QueryNode> newQryNodeList = new ArrayList<QueryNode>(); Boolean fieldClauseHasLemmaExp = false; for (QueryNode qryNode : qryNodeList) { if (qryNode.getType().equals(NTypes.TERM) && !qryNode.getIsPhrase() && qryNode.getIsPreAnalyzed()) { if (fieldClauseHasLemmaExp) { List<String> lemmas = PreAnalyzedQParserPlugin.lemmatizer.expand(qryNode.getData()); //Removing duplicates LinkedHashSet hs = new LinkedHashSet(); hs.addAll(lemmas); lemmas.clear();/*w w w . j a v a 2s . co m*/ lemmas.addAll(hs); if (lemmas.size() > 0) { newQryNodeList.add(new QueryNode("(", NTypes.SYMBOL)); } newQryNodeList.add(new QueryNode(qryNode.getData(), NTypes.TERM, qryNode.getIsPreAnalyzed())); for (String lemma : lemmas) { newQryNodeList.add(new QueryNode(lemma, NTypes.TERM, qryNode.getIsPreAnalyzed())); } if (lemmas.size() > 0) { newQryNodeList.add(new QueryNode(")", NTypes.SYMBOL)); } } else { newQryNodeList.add(new QueryNode(qryNode.getData(), NTypes.TERM, qryNode.getIsPreAnalyzed())); } } else { if (qryNode.getType().equals(NTypes.FIELD)) { if (((qryNode.getLocalParams() != null) && (qryNode.getLocalParams().contains("lemma=true"))) || lemmaExp) { fieldClauseHasLemmaExp = true; } else { fieldClauseHasLemmaExp = false; } newQryNodeList.add(qryNode); } else { newQryNodeList.add(qryNode); } } } return newQryNodeList; }
From source file:com.datatorrent.stram.client.StramAppLauncher.java
/** * Submit application to the cluster and return the app id. * Sets the context class loader for application dependencies. * * @param appConfig// w ww.j av a 2s .c o m * @return ApplicationId * @throws Exception */ public ApplicationId launchApp(AppFactory appConfig) throws Exception { loadDependencies(); Configuration conf = propertiesBuilder.conf; conf.setEnum(StreamingApplication.ENVIRONMENT, StreamingApplication.Environment.CLUSTER); LogicalPlan dag = appConfig.createApp(propertiesBuilder); long hdfsTokenMaxLifeTime = conf.getLong(StramClientUtils.DT_HDFS_TOKEN_MAX_LIFE_TIME, conf.getLong( StramClientUtils.HDFS_TOKEN_MAX_LIFE_TIME, StramClientUtils.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT)); dag.setAttribute(LogicalPlan.HDFS_TOKEN_LIFE_TIME, hdfsTokenMaxLifeTime); long rmTokenMaxLifeTime = conf.getLong(StramClientUtils.DT_RM_TOKEN_MAX_LIFE_TIME, conf.getLong(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY, YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT)); dag.setAttribute(LogicalPlan.RM_TOKEN_LIFE_TIME, rmTokenMaxLifeTime); if (conf.get(StramClientUtils.KEY_TAB_FILE) != null) { dag.setAttribute(LogicalPlan.KEY_TAB_FILE, conf.get(StramClientUtils.KEY_TAB_FILE)); } else if (conf.get(StramUserLogin.DT_AUTH_KEYTAB) != null) { Path localKeyTabPath = new Path(conf.get(StramUserLogin.DT_AUTH_KEYTAB)); FileSystem fs = StramClientUtils.newFileSystemInstance(conf); try { Path destPath = new Path(StramClientUtils.getDTDFSRootDir(fs, conf), localKeyTabPath.getName()); if (!fs.exists(destPath)) { fs.copyFromLocalFile(false, false, localKeyTabPath, destPath); } dag.setAttribute(LogicalPlan.KEY_TAB_FILE, destPath.toString()); } finally { fs.close(); } } String tokenRefreshFactor = conf.get(StramClientUtils.TOKEN_ANTICIPATORY_REFRESH_FACTOR); if (tokenRefreshFactor != null && tokenRefreshFactor.trim().length() > 0) { dag.setAttribute(LogicalPlan.TOKEN_REFRESH_ANTICIPATORY_FACTOR, Double.parseDouble(tokenRefreshFactor)); } StramClient client = new StramClient(conf, dag); try { client.start(); LinkedHashSet<String> libjars = Sets.newLinkedHashSet(); String libjarsCsv = conf.get(LIBJARS_CONF_KEY_NAME); if (libjarsCsv != null) { String[] jars = StringUtils.splitByWholeSeparator(libjarsCsv, StramClient.LIB_JARS_SEP); libjars.addAll(Arrays.asList(jars)); } if (deployJars != null) { for (File deployJar : deployJars) { libjars.add(deployJar.getAbsolutePath()); } } client.setResources(libjars); client.setFiles(conf.get(FILES_CONF_KEY_NAME)); client.setArchives(conf.get(ARCHIVES_CONF_KEY_NAME)); client.setOriginalAppId(conf.get(ORIGINAL_APP_ID)); client.setQueueName(conf.get(QUEUE_NAME)); client.startApplication(); return client.getApplicationReport().getApplicationId(); } finally { client.stop(); } }
From source file:org.pentaho.reporting.engine.classic.extensions.datasources.olap4j.AbstractMDXDataFactory.java
public String[] getReferencedFields(final String queryName, final DataRow parameter) throws ReportDataFactoryException { final boolean isNewConnection = connection == null; try {/* ww w.j ava2s .c o m*/ if (connection == null) { connection = connectionProvider.createConnection(computeJdbcUser(parameter), computeJdbcPassword(parameter)); connection.setLocale(getLocale()); final String role = computeRole(parameter); if (role != null) { connection.setRoleName(role); } } final MDXCompiler compiler = new MDXCompiler(parameter, getLocale()); final String value = computedQuery(queryName, parameter); final String translatedQuery = compiler.translateAndLookup(value, parameter); final LinkedHashSet<String> params = new LinkedHashSet<String>(); params.addAll(compiler.getParameter()); if (getRoleField() != null) { params.add(getRoleField()); } if (getJdbcPasswordField() != null) { params.add(getJdbcPasswordField()); } if (getJdbcUserField() != null) { params.add(getJdbcUserField()); } final PreparedOlapStatement statement = connection.prepareOlapStatement(translatedQuery); final OlapParameterMetaData data = statement.getParameterMetaData(); final int count = data.getParameterCount(); for (int i = 0; i < count; i++) { final String parameterName = data.getParameterName(i + 1); params.add(parameterName); } params.add(DataFactory.QUERY_LIMIT); return params.toArray(new String[params.size()]); } catch (final Throwable e) { throw new ReportDataFactoryException("Failed to obtain a connection", e); } finally { if (isNewConnection) { close(); } } }
From source file:org.goobi.production.model.bibliography.course.Course.java
/** * The function getIndividualIssues() generates a list of IndividualIssue * objects, each of them representing a stamping of one physically appeared * issue.//from ww w . java2 s.c o m * * @return a LinkedHashSet of IndividualIssue objects, each of them * representing one physically appeared issue */ public LinkedHashSet<IndividualIssue> getIndividualIssues() { LinkedHashSet<IndividualIssue> result = new LinkedHashSet<>(); LocalDate lastAppearance = getLastAppearance(); for (LocalDate day = getFirstAppearance(); !day.isAfter(lastAppearance); day = day.plusDays(1)) { for (Block block : this) { result.addAll(block.getIndividualIssues(day)); } } return result; }
From source file:annis.visualizers.iframe.partitur.PartiturVisualizer.java
@Override public void writeOutput(VisualizerInput input, Writer writer) { try {/*from w w w .j a v a2s .c o m*/ nodes = input.getResult().getGraph().getNodes(); token = input.getResult().getGraph().getTokens(); // get partitur PartiturParser partitur = new PartiturParser(input.getResult().getGraph(), input.getNamespace()); // check right to left boolean isRTL = checkRTL(input.getResult().getTokenList()); List<String> tierNames = new LinkedList<String>(partitur.getKnownTiers()); Collections.sort(tierNames); // get keys that are allowed to select LinkedHashSet<String> keys = new LinkedHashSet<String>(); String mapping = input.getMappings().getProperty("annos"); if (mapping == null) { // default to the alphabetical order keys.addAll(partitur.getNameslist()); } else { String[] splitted = mapping.split(","); for (int k = 0; k < splitted.length; k++) { String s = splitted[k].trim(); if (partitur.getNameslist().contains(s)) { keys.add(s); } } } writer.append( "<!DOCTYPE html><html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">"); writer.append("<link href=\"" + input.getResourcePath("jbar.css") + "\" rel=\"stylesheet\" type=\"text/css\" >"); writer.append("<link href=\"" + input.getResourcePath("jquery.tooltip.css") + "\" rel=\"stylesheet\" type=\"text/css\" >"); writer.append("<link href=\"" + input.getResourcePath("jquery.noty.css") + "\" rel=\"stylesheet\" type=\"text/css\" >"); writer.append("<link href=\"" + input.getResourcePath("partitur.css") + "\" rel=\"stylesheet\" type=\"text/css\" >"); writer.append("<script src=\"" + input.getResourcePath("jquery-1.7.1.min.js") + "\"></script>"); writer.append("<script src=\"" + input.getResourcePath("jquery.jbar.js") + "\"></script>"); writer.append("<script src=\"" + input.getResourcePath("jquery.tooltip.min.js") + "\"></script>"); writer.append("<script src=\"" + input.getResourcePath("jquery.noty.js") + "\"></script>"); writer.append("<script>"); writer.append(convertToJavacSriptArray(new LinkedList<String>())); writer.append("\nvar levelNames = ["); int i = 0; for (String levelName : tierNames) { if (keys.contains(levelName)) { writer.append((i++ > 0 ? ", " : "") + "\"" + levelName + "\""); } } writer.append("];\n</script>"); writer.append("<script type=\"text/javascript\" src=\"" + input.getResourcePath("PartiturVisualizer.js") + "\"></script>"); writer.append("</head>"); writer.append("<body>\n"); writer.append("<ul id=\"toolbar\"></ul>"); writer.append("<div id=\"partiture\">"); if (isRTL) { writer.append("<table class=\"partitur_table\" dir=\"rtl\">\n"); } else { writer.append("<table class=\"partitur_table\")\">\n"); } for (String tier : keys) { List<String> indexlist = new ArrayList<String>(); for (List<PartiturParser.ResultElement> span : partitur.getResultlist()) { for (PartiturParser.ResultElement strr : span) { if (strr.getName().equals(tier) && !indexlist.contains(strr.getId())) { indexlist.add(strr.getId()); } } } String[] currentarray; //Saves annotation-ids of the current row while (!indexlist.isEmpty()) { //Create Rows until all Annotations fit in List<String> currentdontuselist = new LinkedList<String>(); //Lists all Annotations that should not be added to the current row writer.append("<tr class=\"level_" + tier + "\"><th>" + tier + "</th>"); //new row currentarray = new String[partitur.getResultlist().size()]; for (int iterator3 = 0; iterator3 < partitur.getResultlist().size(); iterator3++) { currentarray[iterator3] = null; } int spanCounter = 0; for (List<PartiturParser.ResultElement> span : partitur.getResultlist()) { //for each Token for (PartiturParser.ResultElement annotationelement : span) { // for each Annotation annotationelement of that Token if (indexlist.contains(annotationelement.getId()) && !currentdontuselist.contains(annotationelement.getId())) { boolean neu = false; //Should the Annotation be added? if (currentarray[spanCounter] == null) { indexlist.remove(annotationelement.getId()); currentarray[spanCounter] = annotationelement.getId(); neu = true; } //get all other annotationelement.id (earlier Ids => dontuselist) int span2Counter = 0; for (List<PartiturParser.ResultElement> span2 : partitur.getResultlist()) { for (PartiturParser.ResultElement strr2 : span2) { if (strr2.getId().equals(annotationelement.getId()) && neu) //{ { if (currentarray[span2Counter] == null) { currentarray[span2Counter] = annotationelement.getId(); } } if (span2Counter <= spanCounter && !currentdontuselist.contains(strr2.getId())) { currentdontuselist.add(strr2.getId()); } } span2Counter++; } //break; //Not needed? } } spanCounter++; } //Write Row int length = 1; for (int iterator5 = 0; iterator5 < currentarray.length; iterator5 += length) { StringBuffer tokenIdsArray = new StringBuffer(); StringBuffer eventIdsArray = new StringBuffer(); boolean unused = true; length = 1; if (currentarray[iterator5] == null) { //empty entry writer.append("<td></td>"); } else { PartiturParser.ResultElement element = null; HashSet<Integer> common = new HashSet<Integer>(); boolean found = false; int outputSpanCounter = 0; for (List<PartiturParser.ResultElement> outputSpan : partitur.getResultlist()) { for (PartiturParser.ResultElement strr : outputSpan) { if (strr.getId().equals(currentarray[iterator5])) { if (!found) { element = strr; } if (!common.contains(outputSpanCounter)) { common.add(outputSpanCounter); } found = true; if (unused) { tokenIdsArray.append("" + strr.getId() + "_" + outputSpanCounter); eventIdsArray .append(tier + "_" + strr.getId() + "_" + outputSpanCounter); unused = false; } else { tokenIdsArray.append("," + strr.getId() + "_" + outputSpanCounter); eventIdsArray.append( "," + tier + "_" + strr.getId() + "_" + outputSpanCounter); } } } outputSpanCounter++; } for (int iterator7 = iterator5 + 1; iterator7 < currentarray.length; iterator7++) { if (common.contains(iterator7)) { length++; } else { break; } } for (int iterator8 = 0; iterator8 < currentarray.length; iterator8++) { if (common.contains(iterator8)) { Long id = ((PartiturParser.Token) partitur.getToken().toArray()[iterator8]) .getId(); if (unused) { tokenIdsArray.append("" + id); eventIdsArray.append(tier + "_" + id); unused = false; } else { tokenIdsArray.append("," + id); eventIdsArray.append("," + tier + "_" + id); } } } String color = "black"; if (input.getMarkableExactMap().containsKey("" + element.getNodeId())) { color = input.getMarkableExactMap().get("" + element.getNodeId()); } if (found) { writer.append("<td class=\"single_event\" " + "id=\"event_" + tier + "_" + element.getId() + "_" + iterator5 + "\" " + "style=\"color:" + color + ";\" " + "colspan=" + length + " " + "annis:tokenIds=\"" + tokenIdsArray + "\" " + "annis:eventIds=\"" + eventIdsArray + "\" " + "title=\"" + partitur.namespaceForTier(tier) + ":" + tier + " = " + StringEscapeUtils.escapeXml(element.getValue()) + "\" " //tier =tier, event.getValue()= element.name + "onMouseOver=\"toggleAnnotation(this, true);\" " + "onMouseOut=\"toggleAnnotation(this, false);\" " + addTimeAttribute(element.getNodeId()) + ">" + element.getValue() + "</td>"); } else { writer.append("<td class=\"single_event\" >error</td>"); } } } writer.append("</tr>"); //finish row } } // add token itself writer.append("<tr><th>tok</th>"); for (PartiturParser.Token token : partitur.getToken()) { String color = "black"; if (input.getMarkableExactMap().containsKey("" + token.getId())) { color = input.getMarkableExactMap().get("" + token.getId()); } writer.append("<td class=\"tok\" style=\"color:" + color + ";\" " + "id=\"token_" + token.getId() + "\" " + ">" + token.getValue() + "</td>"); } writer.append("</tr>"); writer.append("</table>\n"); writer.append("</div>\n"); writer.append("</body></html>"); } catch (Exception ex) { log.error(null, ex); try { String annisLine = ""; for (int i = 0; i < ex.getStackTrace().length; i++) { if (ex.getStackTrace()[i].getClassName().startsWith("annis.")) { annisLine = ex.getStackTrace()[i].toString(); } } writer.append("<html><body>Error occured (" + ex.getClass().getName() + "): " + ex.getLocalizedMessage() + "<br/>" + annisLine + "</body></html>"); } catch (IOException ex1) { log.error(null, ex1); } } }
From source file:net.sradonia.eventbus.EventBus.java
/** * @param clazz//from w ww.j a v a 2 s .c om * the class for which to collect veto listeners * @return a collection of matching veto listeners */ protected Set<VetoListener> getVetoListenersForClass(Class<?> clazz) { LinkedHashSet<VetoListener> listeners = new LinkedHashSet<VetoListener>(); if (vetoListenersForExactClass != null) { synchronized (vetoListenersForExactClass) { Set<VetoListener> set = vetoListenersForExactClass.get(clazz); if (set != null) listeners.addAll(set); } } if (vetoListenersForClass != null) { synchronized (vetoListenersForClass) { for (Class<?> c : vetoListenersForClass.keySet()) { if (c.isAssignableFrom(clazz)) { listeners.addAll(vetoListenersForClass.get(c)); } } } } return listeners; }
From source file:org.alfresco.bm.dataload.rm.unfiled.ScheduleUnfiledRecordLoaders.java
/** * Helper method that initialize the unfiled record folders that can receive loaded unfiled records. * This method, also calculates the number of records to add to the initialized unfiled record folders. *///from w w w . j a v a2 s.co m private void calculateListOfEmptyFolders() { if (mapOfRecordsPerUnfiledRecordFolder == null) { mapOfRecordsPerUnfiledRecordFolder = new LinkedHashMap<FolderData, Integer>(); List<FolderData> unfiledRecordFoldersThatNeedRecords = new ArrayList<FolderData>(); if (paths == null || paths.size() == 0) { unfiledRecordFoldersThatNeedRecords.addAll(initialiseFoldersToExistingStructure(UNFILED_CONTEXT)); } else { LinkedHashSet<FolderData> unfiledFolderStructerFromExistentProvidedPaths = new LinkedHashSet<FolderData>(); for (String path : paths) { if (!path.startsWith("/")) { path = "/" + path; } FolderData folder = fileFolderService.getFolder(UNFILED_CONTEXT, UNFILED_RECORD_CONTAINER_PATH + path); if (folder != null)//if folder exists { unfiledFolderStructerFromExistentProvidedPaths.addAll(getUnfiledRecordFolders(folder)); } else { try { folder = createFolder(path); unfiledRecordFoldersThatNeedRecords.add(folder); } catch (Exception e) { // something went wrong on creating current path structure, not all required paths will be created } } } //add unfiled record folders from existent paths if (unfiledFolderStructerFromExistentProvidedPaths.size() > 0) { unfiledRecordFoldersThatNeedRecords.addAll(unfiledFolderStructerFromExistentProvidedPaths); } // configured paths did not existed in db and something went wrong with creation for all of them, initialize to existing structure in this case if (unfiledRecordFoldersThatNeedRecords.size() == 0) { unfiledRecordFoldersThatNeedRecords .addAll(initialiseFoldersToExistingStructure(UNFILED_CONTEXT)); } } if (unfiledRecordFoldersThatNeedRecords.size() > 0) { mapOfRecordsPerUnfiledRecordFolder = distributeNumberOfRecords(unfiledRecordFoldersThatNeedRecords, unfiledRecordsNumber); } } }