List of usage examples for java.util LinkedHashMap putAll
void putAll(Map<? extends K, ? extends V> m);
From source file:com.logsniffer.event.es.EsEventPersistence.java
private void prepareMapping(final long snifferId) { logger.info("Rebuilding mapping for sniffer {}", snifferId); final Sniffer sniffer = snifferPersistence.getSniffer(snifferId); if (sniffer == null) { logger.info("Skip rebuilding mapping due to no more existing sniffer: {}", snifferId); return;//w w w . ja v a 2 s . c om } final LinkedHashMap<String, FieldBaseTypes> snifferTypes = new LinkedHashMap<>(); final LogSource<?> source = logSourceProvider.getSourceById(sniffer.getLogSourceId()); final LinkedHashMap<String, FieldBaseTypes> entriesTypes = new LinkedHashMap<>(); try { entriesTypes.putAll(source.getReader().getFieldTypes()); } catch (final FormatException e) { logger.warn("Failed to access entries fields, these won't be considered", e); } try { clientTpl.executeWithClient(new ClientCallback<Object>() { @Override public Object execute(final Client client) { final StringWriter jsonMapping = new StringWriter(); final JSONBuilder mappingBuilder = new JSONBuilder(jsonMapping).object(); final JSONBuilder props = mappingBuilder.key(getType(snifferId)).object().key("properties") .object(); // TODO: Map sniffer fields dynamically props.key(Event.FIELD_TIMESTAMP).object().key("type").value("date").endObject(); props.key(Event.FIELD_PUBLISHED).object().key("type").value("date").endObject(); for (final String key : entriesTypes.keySet()) { mapField(props, Event.FIELD_ENTRIES + "." + key, entriesTypes.get(key)); } mappingBuilder.endObject().endObject().endObject(); logger.info("Creating mapping for sniffer {}: {}", snifferId, jsonMapping); client.admin().indices().preparePutMapping(indexNamingStrategy.buildActiveName(snifferId)) .setType(getType(snifferId)).setSource(jsonMapping.toString()).get(); return null; } }); } catch (final Exception e) { logger.warn("Failed to update mapping for sniffer " + snifferId + ", try to delete all events", e); } }
From source file:org.apache.asterix.optimizer.rules.subplan.InlineSubplanInputForNestedTupleSourceRule.java
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> rewriteSubplanOperator( Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue(); // Recursively traverses input operators as if the current operator before rewriting the current operator. Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> changedAndVarMap = traverseNonSubplanOperator( op, context);//from www.j a va2 s. co m if (op.getOperatorTag() != LogicalOperatorTag.SUBPLAN) { return changedAndVarMap; } /** * Apply the special join-based rewriting. */ Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> result = applySpecialFlattening(opRef, context); if (!result.first) { /** * If the special join-based rewriting does not apply, apply the general * rewriting which blindly inlines all NTSs. */ result = applyGeneralFlattening(opRef, context); } LinkedHashMap<LogicalVariable, LogicalVariable> returnedMap = new LinkedHashMap<>(); // Adds variable mappings from input operators. returnedMap.putAll(changedAndVarMap.second); // Adds variable mappings resulting from the rewriting of the current operator. returnedMap.putAll(result.second); return new Pair<>(result.first || changedAndVarMap.first, returnedMap); }
From source file:org.openspaces.maven.plugin.CreatePUProjectMojo.java
/** * Returns an array of available project templates names. *///from w w w . j a v a 2s.com private HashMap getAvailableTemplates() throws Exception { HashMap templates = new HashMap(); Enumeration urls = Thread.currentThread().getContextClassLoader().getResources(DIR_TEMPLATES); while (urls.hasMoreElements()) { URL url = (URL) urls.nextElement(); PluginLog.getLog().debug("retrieving all templates from url: " + url); HashMap jarTemplates = getJarTemplates(getJarURLFromURL(url, "")); templates.putAll(jarTemplates); } LinkedHashMap sortedTemplates = new LinkedHashMap(); String desc = (String) templates.remove("event-processing"); if (desc != null) { sortedTemplates.put("event-processing", desc); } desc = (String) templates.remove("persistent-event-processing"); if (desc != null) { sortedTemplates.put("persistent-event-processing", desc); } sortedTemplates.putAll(templates); return sortedTemplates; }
From source file:org.rapidcontext.app.web.WebDavRequest.java
/** * Adds a resource to the result with the specified dates and size. * * @param href the root-relative resource link * @param created the resource creation date * @param modified the resource modification date * @param size the resource size (in bytes) *///w w w . jav a 2 s .c om public void addResource(String href, Date created, Date modified, long size) { LinkedHashMap props = new LinkedHashMap(); String name; String str; props.putAll(properties); name = StringUtils.removeEnd(href, "/"); name = StringUtils.substringAfterLast(href, "/"); if (props.containsKey(PROP_DISPLAY_NAME)) { props.put(PROP_DISPLAY_NAME, name); } if (props.containsKey(PROP_CREATION_DATE)) { str = CREATION_DATE_FORMAT.format(created); props.put(PROP_CREATION_DATE, str); } if (props.containsKey(PROP_LAST_MODIFIED)) { str = LAST_MODIFIED_DATE_FORMAT.format(modified); props.put(PROP_LAST_MODIFIED, str); } if (props.containsKey(PROP_CONTENT_TYPE)) { props.put(PROP_CONTENT_TYPE, href.endsWith("/") ? null : Mime.type(name)); } if (href.endsWith("/")) { if (props.containsKey(PROP_RESOURCE_TYPE)) { props.put(PROP_RESOURCE_TYPE, "<D:collection/>"); } if (props.containsKey(PROP_CONTENT_LENGTH)) { props.put(PROP_CONTENT_LENGTH, "0"); } if (props.containsKey(PROP_ETAG)) { props.put(PROP_ETAG, null); } } else { if (props.containsKey(PROP_CONTENT_LENGTH)) { props.put(PROP_CONTENT_LENGTH, String.valueOf(size)); } if (props.containsKey(PROP_ETAG)) { str = "W/\"" + size + "-" + modified.getTime() + "\""; props.put(PROP_ETAG, str); } } // Fake quota properties to enable read-write access if (props.containsKey(PROP_QUOTA_USED_BYTES)) { props.put(PROP_QUOTA_USED_BYTES, "0"); } if (props.containsKey(PROP_QUOTA_AVAIL_BYTES)) { props.put(PROP_QUOTA_AVAIL_BYTES, "1000000000"); } addResource(href, props); }
From source file:com.android.tradefed.testtype.suite.TfSuiteRunner.java
/** * Helper to expand all TfSuiteRunner included in sub-configuration. Avoid having module inside * module if a suite is ran as part of another suite. *//*from w w w .j av a 2 s . com*/ private LinkedHashMap<String, IConfiguration> expandTestSuites(String configName, IConfiguration config, DirectedGraph<String> graph) { LinkedHashMap<String, IConfiguration> configMap = new LinkedHashMap<String, IConfiguration>(); List<IRemoteTest> tests = new ArrayList<>(config.getTests()); for (IRemoteTest test : tests) { if (test instanceof TfSuiteRunner) { TfSuiteRunner runner = (TfSuiteRunner) test; // Suite runner can only load and run stuff, if it has a suite tag set. if (runner.getSuiteTag() != null) { LinkedHashMap<String, IConfiguration> subConfigs = runner.loadTests(configName, graph); configMap.putAll(subConfigs); } else { CLog.w("Config %s does not have a suite-tag it cannot run anything.", configName); } config.getTests().remove(test); } } // If we have any IRemoteTests remaining in the base configuration, it will run. if (!config.getTests().isEmpty()) { configMap.put(sanitizeModuleName(configName), config); } return configMap; }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
/** * This method exists because java 1.5 no longer has * LinkedHashMap(LinkedHashMap), replacing it with LinkedHashMap(Map<? extends * K, ? extends V> m). Nevertheless, we want to use it in Production Mode to * test that Production Mode function./*from www. ja va2 s . c o m*/ * * @param hashMap the LinkedHashMap to be copied * @return the copy */ private <K, V> LinkedHashMap<K, V> cloneLinkedHashMap(LinkedHashMap<K, V> hashMap) { if (!TestUtils.isJvm()) { return new LinkedHashMap<K, V>(hashMap); } else { LinkedHashMap<K, V> m = new LinkedHashMap<K, V>(); m.putAll(hashMap); return m; } }
From source file:com.datatorrent.stram.engine.OperatorThread.java
private void setupNode(OperatorDeployInfo ndi) { //failedNodes.remove(ndi.id); //final Node<?> node = nodes.get(ndi.id); node.setup(node.context);//w w w .jav a 2 s . c om /* setup context for all the input ports */ LinkedHashMap<String, PortContextPair<InputPort<?>>> inputPorts = node .getPortMappingDescriptor().inputPorts; LinkedHashMap<String, Operators.PortContextPair<InputPort<?>>> newInputPorts = new LinkedHashMap<>( inputPorts.size()); for (OperatorDeployInfo.InputDeployInfo idi : ndi.inputs) { InputPort<?> port = inputPorts.get(idi.portName).component; PortContext context = new PortContext(idi.contextAttributes, node.context); newInputPorts.put(idi.portName, new PortContextPair<>(port, context)); port.setup(context); } inputPorts.putAll(newInputPorts); /* setup context for all the output ports */ LinkedHashMap<String, PortContextPair<OutputPort<?>>> outputPorts = node .getPortMappingDescriptor().outputPorts; LinkedHashMap<String, PortContextPair<OutputPort<?>>> newOutputPorts = new LinkedHashMap<>( outputPorts.size()); for (OperatorDeployInfo.OutputDeployInfo odi : ndi.outputs) { OutputPort<?> port = outputPorts.get(odi.portName).component; PortContext context = new PortContext(odi.contextAttributes, node.context); newOutputPorts.put(odi.portName, new PortContextPair<>(port, context)); port.setup(context); } outputPorts.putAll(newOutputPorts); /* This introduces need for synchronization on processNodeRequest which was solved by adding deleted field in StramToNodeRequest */ cContext.processNodeRequests(false); node.activate(); cContext.publish(new ContainerEvent.NodeActivationEvent(node)); }
From source file:org.dkpro.lab.reporting.FlexTable.java
/** * Add a new row. If the row already exists, it is overwritten. * * @param aId//from w ww . j a va 2 s.c om * the row ID. * @param aRow * the row data. */ public void addRow(String aId, Map<String, ? extends V> aRow) { LinkedHashMap<String, V> row = new LinkedHashMap<String, V>(); if (aRow != null) { for (String key : aRow.keySet()) { columns.put(key, PRESENT); } row.putAll(aRow); } rows.put(aId, row); }
From source file:com.sillelien.dollar.api.types.DollarMap.java
@NotNull @Override/*from ww w. j a v a 2s . c om*/ public var $insert(@NotNull var value, int position) { final LinkedHashMap<var, var> newMap = new LinkedHashMap<>(); int count = 0; for (Map.Entry<var, var> entry : newMap.entrySet()) { if (count == position) { newMap.put(value.$pairKey(), value.$pairValue()); } newMap.put(entry.getKey(), entry.getValue()); } newMap.putAll(toVarMap().mutable()); return DollarFactory.fromValue(newMap, errors(), value.errors()); }
From source file:org.pircbotx.PircBotX.java
/** * Start the bot by connecting to the server. If * {@link Configuration#isAutoReconnect()} is true this will continuously * reconnect to the server until {@link #stopBotReconnect() } is called or * an exception is thrown from connecting * * @throws IOException if it was not possible to connect to the server. * @throws IrcException// w ww. j av a 2s .c o m */ public void startBot() throws IOException, IrcException { //Begin magic reconnectStopped = false; do { //Try to connect to the server, grabbing any exceptions LinkedHashMap<InetSocketAddress, Exception> connectExceptions = Maps.newLinkedHashMap(); try { connectAttemptTotal++; connectAttempts++; connectExceptions.putAll(connect()); } catch (Exception e) { //Initial connect exceptions are returned in the map, this is a more serious error log.error("Exception encountered during connect", e); connectExceptions.put(new InetSocketAddress(serverHostname, serverPort), e); if (!configuration.isAutoReconnect()) throw new RuntimeException("Exception encountered during connect", e); } finally { if (!connectExceptions.isEmpty()) Utils.dispatchEvent(this, new ConnectAttemptFailedEvent(this, configuration.getAutoReconnectAttempts() - connectAttempts, ImmutableMap.copyOf(connectExceptions))); //Cleanup if not already called synchronized (stateLock) { if (state != State.DISCONNECTED) shutdown(); } } //No longer connected to the server if (!configuration.isAutoReconnect()) return; if (reconnectStopped) { log.debug("stopBotReconnect() called, exiting reconnect loop"); return; } if (connectAttempts == configuration.getAutoReconnectAttempts()) { throw new IOException("Failed to connect to IRC server(s) after " + connectAttempts + " attempts"); } //Optionally pause between attempts, useful if network is temporarily down if (configuration.getAutoReconnectDelay() > 0) try { log.debug("Pausing for {} milliseconds before connecting again", configuration.getAutoReconnectDelay()); Thread.sleep(configuration.getAutoReconnectDelay()); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while pausing before the next connect attempt", e); } } while (connectAttempts < configuration.getAutoReconnectAttempts()); }