List of usage examples for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair
public ImmutablePair(final L left, final R right)
From source file:edu.kit.trufflehog.view.jung.visualization.FXVisualizationViewer.java
private void onSelectionContextMenu(double posX, double posY) { //FIXME: make this adequate final SelectionContextMenuCommand cmCommand = (SelectionContextMenuCommand) interactionMap .get(GraphInteraction.SELECTION_CONTEXTMENU); if (cmCommand == null) { logger.warn("There is no command registered to: " + GraphInteraction.SELECTION_CONTEXTMENU); return;/* w w w .ja va 2s . com*/ } cmCommand.setSelection(new ImmutablePair<>(new HashSet<>(selectionModel.getSelectedVertices()), new HashSet<>(selectionModel.getSelectedEdges()))); cmCommand.setPosX(posX); cmCommand.setPosY(posY); notifyListeners(cmCommand); }
From source file:com.epam.catgenome.manager.vcf.VcfManager.java
@NotNull private Map<String, Pair<Integer, Integer>> readMetaMap(Map<String, Chromosome> chromosomeMap, VcfFile vcfFile, FeatureReader<VariantContext> reader, Reference reference, boolean doIndex) throws IOException, GeneReadingException { Map<String, Pair<Integer, Integer>> metaMap = new HashMap<>(); CloseableIterator<VariantContext> iterator = reader.iterator(); int startPosition = 1; int endPosition = 1; String currentKey = null;//from w w w. j ava 2s. c o m VariantContext variantContext = null; VariantContext lastFeature = null; VcfFilterInfo info = getFiltersInfo(reader); VcfFileReader vcfFileReader = new VcfFileReader(fileManager, referenceGenomeManager); VCFHeader vcfHeader = (VCFHeader) reader.getHeader(); List<VcfIndexEntry> allEntries = new ArrayList<>(); List<GeneFile> geneFiles = reference.getGeneFile() != null ? Collections.singletonList(reference.getGeneFile()) : Collections.emptyList(); while (iterator.hasNext()) { variantContext = iterator.next(); if (!variantContext.getContig().equals(currentKey)) { if (checkMetaMapKey(chromosomeMap, currentKey)) { metaMap.put(currentKey, new ImmutablePair<>(startPosition, endPosition)); writeEntriesForChromosome(allEntries, geneFiles, Utils.getFromChromosomeMap(chromosomeMap, currentKey), vcfFile, vcfHeader, vcfFileReader, doIndex); } startPosition = variantContext.getStart(); currentKey = variantContext.getContig(); } checkSorted(vcfFile, variantContext, lastFeature); indexVariation(allEntries, variantContext, chromosomeMap, info, vcfHeader, vcfFileReader, doIndex); lastFeature = variantContext; // Put the last one in metaMap endPosition = variantContext.getStart(); if (checkMetaMapKey(chromosomeMap, currentKey)) { metaMap.put(currentKey, new ImmutablePair<>(startPosition, endPosition)); } } // Put the last one if (variantContext != null && checkMetaMapKey(chromosomeMap, currentKey)) { writeEntriesForChromosome(allEntries, geneFiles, Utils.getFromChromosomeMap(chromosomeMap, currentKey), vcfFile, vcfHeader, vcfFileReader, doIndex); } return metaMap; }
From source file:ilcc.ccgparser.incderivation.RevInc.java
private Pair<CCGJTreeNode, ArcJAction> checkRightRevealDep(CCGJTreeNode left, CCGJTreeNode right, String rescatstr, HashMap<Integer, CCGNodeDepInfo> nccgNodeDeps) { CCGJTreeNode result = null;/* ww w. j av a2 s. c om*/ RuleType rule = RuleType.other; CCGcat llcat, rlcat = null, lcat, rcat, icat, revcat = null; int lid, rlid = 0, rpid = 0, rid; lcat = left.getCCGcat().copy(); rcat = right.getCCGcat().copy(); boolean isconj = false; if (rcat.toString().endsWith("[conj]")) { icat = CCGcat.ccgCatFromString(rcat.toString().replace("[conj]", "")); isconj = true; } else if ((rcat.argDir() == CCGcat.BW)) icat = rcat.argument().copy(); else return null; lid = left.getConllNode().getNodeId(); rid = right.getConllNode().getNodeId(); int level = 0; CCGCategory lhcat = left.getConllNode().getccgCat(); Integer lvertex = lid; ArrayList<Integer> rightPerList = depGraph.getRightPer(lvertex); for (int i = rightPerList.size() - 1; i > 0; i--) { Integer rmost = rightPerList.get(i); rlid = rmost; rpid = rightPerList.get(i - 1); String key1 = rlid + "--" + rid, key2 = rid + "--" + rlid; CCGcat tcat = CCGcat.lexCat(sent.getNode(rlid - 1).getWrdStr(), depGraph.getVertex(rmost).toString(), rlid); revcat = tcat; if (tcat.catString().equals("N")) tcat = CCGcat.typeChangingRule(tcat, "NP"); level++; if (goldccgDeps.containsKey(key1) || goldccgDeps.containsKey(key2) || drvDeps.containsKey(key1) || drvDeps.containsKey(key2)) { //if(canCompose(tcat, right.getCCGcat()) || CCGcat.noFeatures(tcat.catString()).contains(CCGcat.noFeatures(right.getCCGcat().catString())) ){ rlcat = tcat; break; } } if (rlcat == null) return null; if (icat == null) { return null; //llcat = CCGcat.lexCat(sent.getNode(lid-1).getConllNode().getWrd(), sent.getNode(lid-1).getConllNode().getSuperTag(), lid); } else { rlcat = CCGcat.lexCat(sent.getNode(rlid - 1).getWrdStr(), icat.catString(), rlid); //String rcatstr = lcat.isAtomic() ? lcat.toString()+"/"+rlcat.toString() : "("+lcat.toString()+")/"+rlcat.toString(); String rlcatstr = (rlcat.isAtomic() ? rlcat.toString() : "(" + rlcat.toString() + ")"); String rcatstr = lcat.isAtomic() ? lcat.toString() + "/" + rlcatstr : "(" + lcat.toString() + ")/" + rlcatstr; llcat = lcat.revealCat(lcat, rcatstr); //llcat = CCGcat.typeChangingRule(lcat, rcatstr); } HashMap<String, CCGDepInfo> depsMap = new HashMap<>(); CCGJRuleInfo tinfo = ccgCombinators.checkCCGRules(rlcat, rcat); if (tinfo != null) { depsMap = Commons.getDepsMap(rlcat, rcat, tinfo.getResultCat(), depsMap); CCGJRuleInfo info = ccgCombinators.checkCCGRules(llcat, tinfo.getResultCat()); if (info != null) { //depsMap = getDepsMap(llcat, tinfo.getResultCat(), info.getResultCat(), depsMap); Commons.updateDepTree(tinfo, rlid, rid, depGraph); for (int id : rcat.headIdList()) { Commons.updateDepTree(tinfo, rlid, id, depGraph); Commons.updateRevealDeps(tinfo, rlid, id, rpid, depsMap, sysccgDeps, depGraph); } if (ftrue || checkWithGoldDeps(depsMap)) { //result = applyBinaryUpdate(left, right, info.getResultCat(), RuleType.reveal, true); ArcJAction act = ArcJAction.make(SRAction.RREVEAL, level, (result == null) ? null : result.getCCGcat().toString(), rule); result = Commons.applyBinaryUpdate(left, right, left.getCCGcat(), act, RuleType.rreveal, true); updateccgNodeDeps(left, right, SRAction.RREVEAL, nccgNodeDeps, depsMap, isconj); Commons.updateSysDeps(depsMap, sysccgDeps); if (revealLevel.containsKey(level)) revealLevel.put(level, revealLevel.get(level) + 1); else revealLevel.put(level, 1); actionMap.put(SRAction.RREVEAL, actionMap.get(SRAction.RREVEAL) + 1); rule = RuleType.rreveal; //if(level>5) level=5; // CCGJRuleInfo ninfo = new CCGJRuleInfo(revcat, rcat, info.getRightCat(), true, rule, level, 0); // treebankRules.addRevealRuleInfo(ninfo, revcat.toString()+" "+rcat.toString()); CCGJRuleInfo ninfo = new CCGJRuleInfo(lcat, rcat, result.getCCGcat(), true, rule, level, 0); treebankRules.addRevealRuleInfo(ninfo, lcat.toString() + " " + rcat.toString()); } } } //if(level>5) level=5; return new ImmutablePair(result, ArcJAction.make(SRAction.RREVEAL, level, (result == null) ? null : result.getCCGcat().toString(), rule)); }
From source file:examples.ClassPropertyUsageAnalyzer.java
/** * Prints a list of related properties to the output. The list is encoded as * a single CSV value, using "@" as a separator. Miga can decode this. * Standard CSV processors do not support lists of entries as values, * however.//w w w. j a va 2s .c o m * * @param out * the output to write to * @param usageRecord * the data to write */ private void printRelatedProperties(PrintStream out, UsageRecord usageRecord) { List<ImmutablePair<PropertyIdValue, Double>> list = new ArrayList<ImmutablePair<PropertyIdValue, Double>>( usageRecord.propertyCoCounts.size()); for (Entry<PropertyIdValue, Integer> coCountEntry : usageRecord.propertyCoCounts.entrySet()) { double otherThisItemRate = (double) coCountEntry.getValue() / usageRecord.itemCount; double otherGlobalItemRate = (double) this.propertyRecords.get(coCountEntry.getKey()).itemCount / this.countPropertyItems; double otherThisItemRateStep = 1 / (1 + Math.exp(6 * (-2 * otherThisItemRate + 0.5))); double otherInvGlobalItemRateStep = 1 / (1 + Math.exp(6 * (-2 * (1 - otherGlobalItemRate) + 0.5))); list.add(new ImmutablePair<PropertyIdValue, Double>(coCountEntry.getKey(), otherThisItemRateStep * otherInvGlobalItemRateStep * otherThisItemRate / otherGlobalItemRate)); } Collections.sort(list, new Comparator<ImmutablePair<PropertyIdValue, Double>>() { @Override public int compare(ImmutablePair<PropertyIdValue, Double> o1, ImmutablePair<PropertyIdValue, Double> o2) { return o2.getValue().compareTo(o1.getValue()); } }); out.print(",\""); int count = 0; for (ImmutablePair<PropertyIdValue, Double> relatedProperty : list) { if (relatedProperty.right < 1.5) { break; } if (count > 0) { out.print("@"); } // makeshift escaping for Miga: out.print(getPropertyLabel(relatedProperty.left).replace("@", "")); count++; } out.print("\""); }
From source file:com.evolveum.midpoint.schema.util.WfContextUtil.java
private static void collectNotifyBefore(List<Pair<Duration, AbstractWorkItemActionType>> rv, List<Duration> beforeTimes, AbstractWorkItemActionType action) { beforeTimes.forEach(beforeTime -> rv.add(new ImmutablePair<>(beforeTime, action))); }
From source file:com.intuit.wasabi.assignment.impl.AssignmentsImplTest.java
@Test public void testGetSingleAssignmentSuccess() throws IOException { AssignmentsImpl assignmentsImpl = spy( new AssignmentsImpl(new HashMap<String, AssignmentIngestionExecutor>(), experimentRepository, assignmentsRepository, ruleCache, pages, assignmentDecorator, threadPoolExecutor, eventLog, metadataCacheEnabled, metadataCache)); //Input/*from w w w .ja v a2 s .c o m*/ Application.Name appName = Application.Name.valueOf("Test"); User.ID user = User.ID.valueOf("testUser"); Experiment.ID id = Experiment.ID.newInstance(); Experiment.Label label = Experiment.Label.valueOf("TestExpLabel"); SegmentationProfile segmentationProfile = mock(SegmentationProfile.class); HttpHeaders headers = mock(HttpHeaders.class); //Mock dependent interactions Experiment experiment = mock(Experiment.class, RETURNS_DEEP_STUBS); when(experiment.getLabel()).thenReturn(label); when(experiment.getID()).thenReturn(id); when(experiment.getState()).thenReturn(Experiment.State.RUNNING); when(experiment.getSamplingPercent()).thenReturn(0.5); when(experiment.getEndTime().getTime()).thenReturn(new Date().getTime() + 1000000L); List<Experiment> expList = newArrayList(experiment); PrioritizedExperimentList pExpList = new PrioritizedExperimentList(); pExpList.addPrioritizedExperiment(PrioritizedExperiment.from(experiment, 1).build()); Optional<PrioritizedExperimentList> prioritizedExperimentListOptional = Optional.of(pExpList); BucketList bucketList = new BucketList(); bucketList .addBucket(Bucket.newInstance(id, Bucket.Label.valueOf("red")).withAllocationPercent(0.5).build()); bucketList .addBucket(Bucket.newInstance(id, Bucket.Label.valueOf("blue")).withAllocationPercent(0.5).build()); List<Experiment.ID> exclusionList = newArrayList(); when(metadataCache.getExperimentById(experiment.getID())).thenReturn(Optional.of(experiment)); when(metadataCache.getExperimentsByAppName(appName)).thenReturn(expList); when(metadataCache.getPrioritizedExperimentListMap(appName)).thenReturn(prioritizedExperimentListOptional); when(metadataCache.getBucketList(id)).thenReturn(bucketList); when(metadataCache.getExclusionList(id)).thenReturn(exclusionList); List<Pair<Experiment, String>> existingAssignments = newArrayList( new ImmutablePair<Experiment, String>(experiment, "red")); Map<Experiment.ID, Experiment> expMap = newHashMap(); expMap.put(id, experiment); when(assignmentsRepository.getAssignments(user, appName, context, expMap)).thenReturn(existingAssignments); Assignment result = assignmentsImpl.doSingleAssignment(user, appName, label, context, true, true, segmentationProfile, headers); assertThat(result.getStatus(), is(Assignment.Status.EXISTING_ASSIGNMENT)); verify(threadPoolExecutor, times(1)).execute(any(ExperimentRuleCacheUpdateEnvelope.class)); }
From source file:com.streamsets.datacollector.event.handler.remote.TestRemoteDataCollector.java
@Test public void testRemotePipelines() throws Exception { RuntimeInfo runtimeInfo = Mockito.mock(RuntimeInfo.class); AclStoreTask aclStoreTask = Mockito.mock(AclStoreTask.class); RemoteStateEventListener remoteStateEventListener = Mockito.mock(RemoteStateEventListener.class); PipelineStoreTask pipelineStoreTask = Mockito.mock(MockPipelineStoreTask.class); RemoteDataCollector dataCollector = new RemoteDataCollector(new Configuration(), new MockManager(), pipelineStoreTask, new MockPipelineStateStore(), aclStoreTask, remoteStateEventListener, runtimeInfo, Mockito.mock(AclCacheHelper.class), Mockito.mock(StageLibraryTask.class), Mockito.mock(BlobStoreTask.class), new SafeScheduledExecutorService(1, "supportBundleExecutor")); List<PipelineState> pipelineStates = new ArrayList<>(); pipelineStates.add(new PipelineStateImpl("user", "name", "rev", PipelineStatus.RUNNING, "message", -1, new HashMap<String, Object>(), ExecutionMode.STANDALONE, "", -1, -1)); File testFolder = tempFolder.newFolder(); Mockito.when(runtimeInfo.getDataDir()).thenReturn(testFolder.getAbsolutePath()); Mockito.when(pipelineStoreTask.hasPipeline(Mockito.anyString())).thenReturn(false); Mockito.when(remoteStateEventListener.getPipelineStateEvents()).thenReturn( Arrays.<Pair<PipelineState, Map<String, String>>>asList(new ImmutablePair<>(pipelineStates.get(0), Collections.singletonMap(Source.POLL_SOURCE_OFFSET_KEY, "offset:1000")))); List<PipelineAndValidationStatus> pipelineAndValidationStatuses = dataCollector .getRemotePipelinesWithChanges(); assertEquals(1, pipelineAndValidationStatuses.size()); PipelineAndValidationStatus pipelineAndValidationStatus = pipelineAndValidationStatuses.get(0); assertEquals("name", pipelineAndValidationStatus.getName()); assertNull(pipelineAndValidationStatus.getTitle()); assertEquals("rev", pipelineAndValidationStatus.getRev()); assertEquals(PipelineStatus.RUNNING, pipelineAndValidationStatus.getPipelineStatus()); assertEquals(false, pipelineAndValidationStatus.isClusterMode()); assertTrue(pipelineAndValidationStatus.getWorkerInfos().isEmpty()); assertTrue(pipelineAndValidationStatus.isRemote()); assertEquals("message", pipelineAndValidationStatus.getMessage()); // "{\n" + " \"offsets\" : {\n" + " \"$com.streamsets.datacollector.pollsource.offset$\" : \"offset:1000\"\n" + " },\n" + " \"version\" : 2\n" + "}" SourceOffsetJson sourceOffsetJson = ObjectMapperFactory.get() .readValue(pipelineAndValidationStatus.getOffset(), SourceOffsetJson.class); assertNotNull(sourceOffsetJson);/*w ww.ja va 2s . c o m*/ assertEquals(2, sourceOffsetJson.getVersion()); assertEquals("offset:1000", sourceOffsetJson.getOffsets().get(Source.POLL_SOURCE_OFFSET_KEY)); assertNull(pipelineAndValidationStatus.getValidationStatus()); }
From source file:com.hortonworks.registries.schemaregistry.SchemaVersionLifecycleManager.java
private ImmutablePair<SchemaVersionLifecycleContext, SchemaVersionLifecycleState> createSchemaVersionLifeCycleContextAndState( Long schemaVersionId) throws SchemaNotFoundException { // get the current state from storage for the given versionID // we can use a query to get max value for the column for a given schema-version-id but StorageManager does not // have API to take custom queries. Collection<SchemaVersionStateStorable> schemaVersionStates = storageManager.find( SchemaVersionStateStorable.NAME_SPACE, Collections.singletonList( new QueryParam(SchemaVersionStateStorable.SCHEMA_VERSION_ID, schemaVersionId.toString())), Collections.singletonList(OrderByField.of(SchemaVersionStateStorable.SEQUENCE, true))); if (schemaVersionStates.isEmpty()) { throw new SchemaNotFoundException("No schema versions found with id " + schemaVersionId); }//from ww w . jav a2 s. c o m SchemaVersionStateStorable stateStorable = schemaVersionStates.iterator().next(); SchemaVersionLifecycleState schemaVersionLifecycleState = schemaVersionLifecycleStateMachine.getStates() .get(stateStorable.getStateId()); SchemaVersionService schemaVersionService = createSchemaVersionService(); SchemaVersionLifecycleContext context = new SchemaVersionLifecycleContext( stateStorable.getSchemaVersionId(), stateStorable.getSequence(), schemaVersionService, schemaVersionLifecycleStateMachine, customSchemaStateExecutor); return new ImmutablePair<>(context, schemaVersionLifecycleState); }
From source file:com.epam.catgenome.manager.FileManager.java
/** * Loads VCF metadata from .bounds file//from w ww. j ava2 s . c o m * * @param featureFile a fileId in the system * @return {@code Map<String, Pair<Integer, Integer>>} map of start indexes of first and last variation * of each chromosome * @throws IOException */ public Map<String, Pair<Integer, Integer>> loadIndexMetadata(FeatureFile featureFile) throws IOException { LOGGER.info(getMessage(MessagesConstants.INFO_BOUNDS_METADATA_LOAD, featureFile.getId(), featureFile.getName())); final Map<String, Object> params = new HashMap<>(); params.put(DIR_ID.name(), featureFile.getId()); params.put(USER_ID.name(), featureFile.getCreatedBy()); FilePathFormat filePathFormat = null; if (featureFile instanceof VcfFile) { filePathFormat = VCF_METADATA_FILE; } if (featureFile instanceof GeneFile) { filePathFormat = GENE_METADATA_FILE; } if (filePathFormat == null) { throw new IllegalArgumentException(getMessage(MessagesConstants.ERROR_UNSUPPORTED_FEATURE_FILE_TYPE, featureFile.getClass().getName())); } Map<String, Pair<Integer, Integer>> metaMap = new HashMap<>(); try (DataInputStream dataInputStream = new DataInputStream( new FileInputStream(toRealPath(substitute(filePathFormat, params))))) { while (dataInputStream.available() > 0) { String chrId = dataInputStream.readUTF(); int startPosition = dataInputStream.readInt(); int endPosition = dataInputStream.readInt(); metaMap.put(chrId, new ImmutablePair<>(startPosition, endPosition)); } } return metaMap; }
From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraAssignmentsRepositoryTest.java
@Test public void testAssignUsersInBatchCalls() { //------ Input Experiment experiment = Experiment.withID(Experiment.ID.valueOf(this.experimentId)) .withIsPersonalizationEnabled(false).withIsRapidExperiment(false).build(); User.ID userID1 = User.ID.valueOf("testuser1"); User.ID userID2 = User.ID.valueOf("testuser2"); Context context = Context.valueOf("test"); Date date = new Date(); String bucketLabel = "bucket-1"; Assignment assignment1 = Assignment.newInstance(experiment.getID()) .withBucketLabel(Bucket.Label.valueOf(bucketLabel)).withCreated(date) .withApplicationName(APPLICATION_NAME).withContext(context).withUserID(userID1).build(); Assignment assignment2 = Assignment.newInstance(experiment.getID()).withBucketLabel(null).withCreated(date) .withApplicationName(APPLICATION_NAME).withContext(context).withUserID(userID2).build(); List<Pair<Experiment, Assignment>> assignmentPairs = new LinkedList<>(); assignmentPairs.add(new ImmutablePair<>(experiment, assignment1)); assignmentPairs.add(new ImmutablePair<>(experiment, assignment2)); //------ Mocking interacting calls ResultSetFuture genericResultSetFuture = mock(ResultSetFuture.class); ResultSet genericResultSet = mock(ResultSet.class); when(genericResultSetFuture.getUninterruptibly()).thenReturn(genericResultSet); doNothing().when(assignmentsCountExecutor).execute(any()); when(driver.getSession()).thenReturn(mock(Session.class)); when(driver.getSession().execute(any(BatchStatement.class))).thenReturn(genericResultSet); //------ Make final call boolean success = true; try {/* www. ja va 2 s. c om*/ repository.assignUsersInBatch(assignmentPairs, date); } catch (Exception e) { logger.error("Failed to execute assignUser test...", e); success = false; } assertThat(success, is(true)); }