List of usage examples for org.apache.commons.lang3.tuple Pair getValue
@Override
public R getValue()
Gets the value from this pair.
This method implements the Map.Entry interface returning the right element as the value.
From source file:com.linkedin.pinot.broker.broker.helix.ClusterChangeMediator.java
public ClusterChangeMediator(HelixExternalViewBasedRouting helixExternalViewBasedRouting, final BrokerMetrics brokerMetrics) { _helixExternalViewBasedRouting = helixExternalViewBasedRouting; // Simple thread that polls every 10 seconds to check if there are any cluster updates to apply _deferredClusterUpdater = new Thread("Deferred cluster state updater") { @Override/*from w w w. j ava2s.c o m*/ public void run() { while (true) { try { // Wait for at least one update Pair<UpdateType, Long> firstUpdate = _clusterChangeQueue.take(); // Update the queue time metrics long queueTime = System.currentTimeMillis() - firstUpdate.getValue(); brokerMetrics.addTimedValue(BrokerTimer.ROUTING_TABLE_UPDATE_QUEUE_TIME, queueTime, TimeUnit.MILLISECONDS); // Take all other updates also present List<Pair<UpdateType, Long>> allUpdates = new ArrayList<>(); allUpdates.add(firstUpdate); _clusterChangeQueue.drainTo(allUpdates); // Gather all update types boolean externalViewUpdated = false; boolean instanceConfigUpdated = false; for (Pair<UpdateType, Long> update : allUpdates) { if (update.getKey() == UpdateType.EXTERNAL_VIEW) { externalViewUpdated = true; } else if (update.getKey() == UpdateType.INSTANCE_CONFIG) { instanceConfigUpdated = true; } } if (externalViewUpdated) { try { _helixExternalViewBasedRouting.processExternalViewChange(); } catch (Exception e) { LOGGER.warn("Caught exception while updating external view", e); } } if (instanceConfigUpdated) { try { _helixExternalViewBasedRouting.processInstanceConfigChange(); } catch (Exception e) { LOGGER.warn("Caught exception while processing instance config", e); } } } catch (InterruptedException e) { LOGGER.warn("Was interrupted while waiting for a cluster change", e); break; } } LOGGER.warn("Stopping deferred cluster state update thread"); _deferredClusterUpdater = null; } }; _deferredClusterUpdater.start(); }
From source file:com.teradata.tempto.internal.hadoop.hdfs.WebHDFSClient.java
private URI buildUri(String path, String username, String operation, Pair<String, String>... parameters) { try {/*w ww. j ava2 s . c o m*/ if (!path.startsWith("/")) { path = "/" + path; } URIBuilder uriBuilder = new URIBuilder().setScheme("http").setHost(nameNode.getHostText()) .setPort(nameNode.getPort()).setPath("/webhdfs/v1" + checkNotNull(path)) .setParameter("op", checkNotNull(operation)).setParameter("user.name", checkNotNull(username)); for (Pair<String, String> parameter : parameters) { uriBuilder.setParameter(parameter.getKey(), parameter.getValue()); } return uriBuilder.build(); } catch (URISyntaxException e) { throw new RuntimeException("Could not create save file URI" + ", nameNode: " + nameNode + ", path: " + path + ", username: " + username); } }
From source file:com.nextdoor.bender.operation.substitution.field.FieldSubstitution.java
@Override protected void doSubstitution(InternalEvent ievent, DeserializedEvent devent, Map<String, Object> nested) { Pair<String, Object> kv; try {/*from w w w . j a v a 2 s. co m*/ kv = getFieldAndSource(devent, srcFields, false); } catch (FieldNotFoundException e) { if (this.failSrcNotFound) { throw new OperationException(e); } return; } nested.put(this.key, kv.getValue()); /* * Remove source field */ if (this.removeSrcField) { devent.deleteField(kv.getKey()); } }
From source file:com.hortonworks.streamline.streams.notification.service.NotificationQueueHandler.java
/** * Attempt re-delivery of a previously enqueued notification. * * @param notificationId id of a previously submitted notification. *//* w w w . j av a 2 s. c om*/ public void resubmit(String notificationId) { Pair<NotificationQueueTask, Future<?>> taskStatus = taskMap.get(notificationId); if (taskStatus == null) { throw new NotificationServiceException( "Could not find a previously enqueued task" + " for notification id " + notificationId); } else if (!taskStatus.getValue().isDone()) { throw new NotificationServiceException( "Previously enqueued task" + " for notification id " + notificationId + " is not done"); } Future<?> future = executorService.submit(taskStatus.getKey()); taskMap.put(notificationId, Pair.of(taskStatus.getKey(), future)); }
From source file:com.linkedin.pinot.server.api.restlet.MmapDebugResource.java
@Override @HttpVerb("get")// w ww . j av a 2 s. c o m @Description("Lists all off-heap allocations and their associated sizes") @Summary("View current off-heap allocations") @Paths({ "/debug/memory/offheap", "/debug/memory/offheap/" }) protected Representation get() throws ResourceException { try { JSONObject returnValue = new JSONObject(); JSONArray allocationsArray = new JSONArray(); List<Pair<MmapUtils.AllocationContext, Integer>> allocations = MmapUtils.getAllocationsAndSizes(); for (Pair<MmapUtils.AllocationContext, Integer> allocation : allocations) { JSONObject jsonAllocation = new JSONObject(); jsonAllocation.put("context", allocation.getKey().getContext()); jsonAllocation.put("type", allocation.getKey().getAllocationType().toString()); jsonAllocation.put("size", allocation.getValue()); allocationsArray.put(jsonAllocation); } returnValue.put("allocations", allocationsArray); return new StringRepresentation(returnValue.toString(2)); } catch (JSONException e) { return new StringRepresentation(e.toString()); } }
From source file:gobblin.cluster.ScheduledJobConfigurationManager.java
/*** * TODO: Change cluster code to handle Spec. Right now all job properties are needed to be in config and template is not honored * TODO: Materialized JobSpec and make use of ResolvedJobSpec * @throws ExecutionException// w w w . java 2 s . c o m * @throws InterruptedException */ private void fetchJobSpecs() throws ExecutionException, InterruptedException { List<Pair<SpecExecutorInstance.Verb, Spec>> changesSpecs = (List<Pair<SpecExecutorInstance.Verb, Spec>>) this.specExecutorInstanceConsumer .changedSpecs().get(); for (Pair<SpecExecutorInstance.Verb, Spec> entry : changesSpecs) { SpecExecutorInstance.Verb verb = entry.getKey(); if (verb.equals(SpecExecutorInstance.Verb.ADD)) { // Handle addition JobSpec jobSpec = (JobSpec) entry.getValue(); postNewJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); jobSpecs.put(entry.getValue().getUri(), (JobSpec) entry.getValue()); } else if (verb.equals(SpecExecutorInstanceConsumer.Verb.UPDATE)) { // Handle update JobSpec jobSpec = (JobSpec) entry.getValue(); postUpdateJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); jobSpecs.put(entry.getValue().getUri(), (JobSpec) entry.getValue()); } else if (verb.equals(SpecExecutorInstanceConsumer.Verb.DELETE)) { // Handle delete Spec anonymousSpec = (Spec) entry.getValue(); postDeleteJobConfigArrival(anonymousSpec.getUri().toString(), new Properties()); jobSpecs.remove(entry.getValue().getUri()); } } }
From source file:gobblin.cluster.StreamingJobConfigurationManager.java
private void fetchJobSpecs() throws ExecutionException, InterruptedException { List<Pair<SpecExecutorInstance.Verb, Spec>> changesSpecs = (List<Pair<SpecExecutorInstance.Verb, Spec>>) this.specExecutorInstanceConsumer .changedSpecs().get();//w ww .j ava 2 s. c om // propagate thread interruption so that caller will exit from loop if (Thread.interrupted()) { throw new InterruptedException(); } for (Pair<SpecExecutorInstance.Verb, Spec> entry : changesSpecs) { SpecExecutorInstance.Verb verb = entry.getKey(); if (verb.equals(SpecExecutorInstance.Verb.ADD)) { // Handle addition JobSpec jobSpec = (JobSpec) entry.getValue(); postNewJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); } else if (verb.equals(SpecExecutorInstanceConsumer.Verb.UPDATE)) { // Handle update JobSpec jobSpec = (JobSpec) entry.getValue(); postUpdateJobConfigArrival(jobSpec.getUri().toString(), jobSpec.getConfigAsProperties()); } else if (verb.equals(SpecExecutorInstanceConsumer.Verb.DELETE)) { // Handle delete Spec anonymousSpec = (Spec) entry.getValue(); postDeleteJobConfigArrival(anonymousSpec.getUri().toString(), new Properties()); } } }
From source file:lineage2.gameserver.data.xml.parser.RestartPointParser.java
/** * Method readData.// w w w . j ava 2 s . co m * @param rootElement Element * @throws Exception */ @Override protected void readData(Element rootElement) throws Exception { List<Pair<Territory, Map<Race, String>>> restartArea = new ArrayList<>(); Map<String, RestartPoint> restartPoint = new HashMap<>(); for (Iterator<Element> iterator = rootElement.elementIterator(); iterator.hasNext();) { Element listElement = iterator.next(); if ("restart_area".equals(listElement.getName())) { Territory territory = null; Map<Race, String> restarts = new HashMap<>(); for (Iterator<Element> i = listElement.elementIterator(); i.hasNext();) { Element n = i.next(); if ("region".equalsIgnoreCase(n.getName())) { Rectangle shape; Attribute map = n.attribute("map"); String s = map.getValue(); String val[] = s.split("_"); int rx = Integer.parseInt(val[0]); int ry = Integer.parseInt(val[1]); int x1 = World.MAP_MIN_X + ((rx - Config.GEO_X_FIRST) << 15); int y1 = World.MAP_MIN_Y + ((ry - Config.GEO_Y_FIRST) << 15); int x2 = (x1 + (1 << 15)) - 1; int y2 = (y1 + (1 << 15)) - 1; shape = new Rectangle(x1, y1, x2, y2); shape.setZmin(World.MAP_MIN_Z); shape.setZmax(World.MAP_MAX_Z); if (territory == null) { territory = new Territory(); } territory.add(shape); } else if ("polygon".equalsIgnoreCase(n.getName())) { Polygon shape = ZoneParser.parsePolygon(n); if (!shape.validate()) { error("RestartPointParser: invalid territory data : " + shape + "!"); } if (territory == null) { territory = new Territory(); } territory.add(shape); } else if ("restart".equalsIgnoreCase(n.getName())) { Race race = Race.valueOf(n.attributeValue("race")); String locName = n.attributeValue("loc"); restarts.put(race, locName); } } if (territory == null) { throw new RuntimeException("RestartPointParser: empty territory!"); } if (restarts.isEmpty()) { throw new RuntimeException("RestartPointParser: restarts not defined!"); } restartArea.add(new ImmutablePair<>(territory, restarts)); } else if ("restart_loc".equals(listElement.getName())) { String name = listElement.attributeValue("name"); int bbs = Integer.parseInt(listElement.attributeValue("bbs", "0")); int msgId = Integer.parseInt(listElement.attributeValue("msg_id", "0")); List<Location> restartPoints = new ArrayList<>(); List<Location> PKrestartPoints = new ArrayList<>(); for (Iterator<Element> i = listElement.elementIterator(); i.hasNext();) { Element n = i.next(); if ("restart_point".equals(n.getName())) { for (Iterator<Element> ii = n.elementIterator(); ii.hasNext();) { Element d = ii.next(); if ("coords".equalsIgnoreCase(d.getName())) { Location loc = Location.parseLoc(d.attribute("loc").getValue()); restartPoints.add(loc); } } } else if ("PKrestart_point".equals(n.getName())) { for (Iterator<Element> ii = n.elementIterator(); ii.hasNext();) { Element d = ii.next(); if ("coords".equalsIgnoreCase(d.getName())) { Location loc = Location.parseLoc(d.attribute("loc").getValue()); PKrestartPoints.add(loc); } } } } if (restartPoints.isEmpty()) { throw new RuntimeException( "RestartPointParser: restart_points not defined for restart_loc : " + name + "!"); } if (PKrestartPoints.isEmpty()) { PKrestartPoints = restartPoints; } RestartPoint rp = new RestartPoint(name, bbs, msgId, restartPoints, PKrestartPoints); restartPoint.put(name, rp); } } for (Pair<Territory, Map<Race, String>> ra : restartArea) { Map<Race, RestartPoint> restarts = new HashMap<>(); for (Map.Entry<Race, String> e : ra.getValue().entrySet()) { RestartPoint rp = restartPoint.get(e.getValue()); if (rp == null) { throw new RuntimeException("RestartPointParser: restart_loc not found : " + e.getValue() + "!"); } restarts.put(e.getKey(), rp); getHolder().addRegionData(new RestartArea(ra.getKey(), restarts)); } } }
From source file:com.nextdoor.bender.operation.substitution.field.FieldSubstitution.java
@Override protected void doSubstitution(InternalEvent ievent, DeserializedEvent devent) { /*/*from w w w .j av a2 s .c o m*/ * Get the field value */ Pair<String, Object> kv; try { kv = getFieldAndSource(devent, srcFields, false); } catch (FieldNotFoundException e) { if (this.failSrcNotFound) { throw new OperationException(e); } return; } try { devent.setField(this.key, kv.getValue()); } catch (FieldNotFoundException e) { if (this.failDstNotFound) { throw new OperationException(e); } return; } /* * Only remove if source field does not equal destination. */ if (this.removeSrcField && !kv.getKey().equals(this.key)) { devent.deleteField(kv.getKey()); } }
From source file:it.polimi.diceH2020.SPACE4CloudWS.solvers.solversImpl.MINLPSolver.MINLPDataFileBuilder.java
private <N extends Number> void printIndexedTable(int idx, Pair<Iterable<Integer>, Iterable<N>> pair) { String currentLine = String.format(" [%d, *] :=", idx); lines.add(currentLine);// w ww . java 2 s. co m Iterator<Integer> first = pair.getKey().iterator(); Iterator<N> second = pair.getValue().iterator(); while (first.hasNext() && second.hasNext()) { Integer key = first.next(); N value = second.next(); if (value instanceof Double) { //UK to have . instead of , as separator currentLine = String.format(Locale.UK, " %d %f", key, value.doubleValue()); } else if (value instanceof Integer) { currentLine = String.format(" %d %d", key, value.intValue()); } lines.add(currentLine); } }