List of usage examples for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair
public ImmutablePair(final L left, final R right)
From source file:com.wolvereness.renumerated.Renumerated.java
private void process() throws Throwable { validateInput();//from ww w .j a va 2s .c o m final MultiProcessor executor = MultiProcessor.newMultiProcessor(cores - 1, new ThreadFactoryBuilder().setDaemon(true) .setNameFormat(Renumerated.class.getName() + "-processor-%d") .setUncaughtExceptionHandler(this).build()); final Future<?> fileCopy = executor.submit(new Callable<Object>() { @Override public Object call() throws Exception { if (original != null) { if (original.exists()) { original.delete(); } Files.copy(input, original); } return null; } }); final List<Pair<ZipEntry, Future<byte[]>>> fileEntries = newArrayList(); final List<Pair<MutableObject<ZipEntry>, Future<byte[]>>> classEntries = newArrayList(); { final ZipFile input = new ZipFile(this.input); final Enumeration<? extends ZipEntry> inputEntries = input.entries(); while (inputEntries.hasMoreElements()) { final ZipEntry entry = inputEntries.nextElement(); final Future<byte[]> future = executor.submit(new Callable<byte[]>() { @Override public byte[] call() throws Exception { return ByteStreams.toByteArray(input.getInputStream(entry)); } }); if (entry.getName().endsWith(".class")) { classEntries.add(new MutablePair<MutableObject<ZipEntry>, Future<byte[]>>( new MutableObject<ZipEntry>(entry), future)); } else { fileEntries.add(new ImmutablePair<ZipEntry, Future<byte[]>>(entry, future)); } } for (final Pair<MutableObject<ZipEntry>, Future<byte[]>> pair : classEntries) { final byte[] data = pair.getRight().get(); pair.setValue(executor.submit(new Callable<byte[]>() { String className; List<String> fields; @Override public byte[] call() throws Exception { try { return method(); } catch (final Exception ex) { throw new Exception(pair.getLeft().getValue().getName(), ex); } } private byte[] method() throws Exception { final ClassReader clazz = new ClassReader(data); clazz.accept(new ClassVisitor(ASM4) { @Override public void visit(final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces) { if (superName.equals("java/lang/Enum")) { className = name; } } @Override public FieldVisitor visitField(final int access, final String name, final String desc, final String signature, final Object value) { if (className != null && (access & 0x4000) != 0) { List<String> fieldNames = fields; if (fieldNames == null) { fieldNames = fields = newArrayList(); } fieldNames.add(name); } return null; } }, ClassReader.SKIP_CODE); if (className == null) return data; final String classDescriptor = Type.getObjectType(className).getDescriptor(); final ClassWriter writer = new ClassWriter(0); clazz.accept(new ClassVisitor(ASM4, writer) { @Override public MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) { final MethodVisitor methodVisitor = super.visitMethod(access, name, desc, signature, exceptions); if (!name.equals("<clinit>")) { return methodVisitor; } return new MethodVisitor(ASM4, methodVisitor) { final Iterator<String> it = fields.iterator(); boolean active; String lastName; @Override public void visitTypeInsn(final int opcode, final String type) { if (!active && it.hasNext()) { // Initiate state machine if (opcode != NEW) throw new AssertionError("Unprepared for " + opcode + " on " + type + " in " + className); active = true; } super.visitTypeInsn(opcode, type); } @Override public void visitLdcInsn(final Object cst) { if (active && lastName == null) { if (!(cst instanceof String)) throw new AssertionError( "Unprepared for " + cst + " in " + className); // Switch the first constant in the Enum constructor super.visitLdcInsn(lastName = it.next()); } else { super.visitLdcInsn(cst); } } @Override public void visitFieldInsn(final int opcode, final String owner, final String name, final String desc) { if (opcode == PUTSTATIC && active && lastName != null && owner.equals(className) && desc.equals(classDescriptor) && name.equals(lastName)) { // Finish the current state machine active = false; lastName = null; } super.visitFieldInsn(opcode, owner, name, desc); } }; } }, ClassReader.EXPAND_FRAMES); final MutableObject<ZipEntry> key = pair.getLeft(); key.setValue(new ZipEntry(key.getValue().getName())); return writer.toByteArray(); } })); } for (final Pair<ZipEntry, Future<byte[]>> pair : fileEntries) { pair.getRight().get(); } input.close(); } fileCopy.get(); FileOutputStream fileOut = null; JarOutputStream jar = null; try { jar = new JarOutputStream(fileOut = new FileOutputStream(output)); for (final Pair<ZipEntry, Future<byte[]>> fileEntry : fileEntries) { jar.putNextEntry(fileEntry.getLeft()); jar.write(fileEntry.getRight().get()); } for (final Pair<MutableObject<ZipEntry>, Future<byte[]>> classEntry : classEntries) { final byte[] data = classEntry.getRight().get(); final ZipEntry entry = classEntry.getLeft().getValue(); entry.setSize(data.length); jar.putNextEntry(entry); jar.write(data); } } finally { if (jar != null) { try { jar.close(); } catch (final IOException ex) { } } if (fileOut != null) { try { fileOut.close(); } catch (final IOException ex) { } } } final Pair<Thread, Throwable> uncaught = this.uncaught; if (uncaught != null) throw new MojoExecutionException(String.format("Uncaught exception in %s", uncaught.getLeft()), uncaught.getRight()); }
From source file:com.pinterest.terrapin.TerrapinUtilTest.java
@Test public void testGetResourceAndPartitionNum() { assertEquals(new ImmutablePair("$terrapin$data$file_set$1343443323", 100), TerrapinUtil.getResourceAndPartitionNum("$terrapin$data$file_set$1343443323$100")); assertEquals(new ImmutablePair("$terrapin$data$file_set$1343443323", 100), TerrapinUtil.getResourceAndPartitionNum("$terrapin$data$file_set$1343443323_100")); // Invalid partition number. Assert.assertNull(TerrapinUtil.getResourceAndPartitionNum("$terrapin$data$file_set$1343443323_1r0")); Assert.assertNull(TerrapinUtil.getResourceAndPartitionNum("$terrapin$data$file_set$1343443323$1r0")); // Invalid separator - neither $ nor _ are present. Assert.assertNull(TerrapinUtil.getResourceAndPartitionNum("WrongPartition")); }
From source file:com.dangdang.config.service.zookeeper.ZookeeperConfigGroup.java
private Pair<String, String> loadKey(final String nodePath) throws Exception { String nodeName = ZKPaths.getNodeFromPath(nodePath); Set<String> keysSpecified = configProfile.getKeysSpecified(); switch (configProfile.getKeyLoadingMode()) { case INCLUDE: if (keysSpecified == null || !keysSpecified.contains(nodeName)) { return null; }/*w w w. jav a2 s . c o m*/ break; case EXCLUDE: if (keysSpecified.contains(nodeName)) { return null; } break; case ALL: break; default: break; } GetDataBuilder data = client.getData(); String value = new String(data.watched().forPath(nodePath), Charsets.UTF_8); return new ImmutablePair<String, String>(nodeName, value); }
From source file:code.elix_x.excore.utils.net.packets.SmartNetworkWrapper.java
public <REQ extends IMessage> void registerMessage(final Runnable onReceive, Class<REQ> requestMessageType, Side side) {/*from ww w.j a va2 s . c o m*/ registerMessage(new Function<Pair<REQ, MessageContext>, Pair<Runnable, IMessage>>() { @Override public Pair<Runnable, IMessage> apply(Pair<REQ, MessageContext> t) { return new ImmutablePair<Runnable, IMessage>(onReceive, null); } }, requestMessageType, side); }
From source file:com.twosigma.beakerx.table.serializer.TableDisplayDeSerializer.java
public static Pair<String, Object> getDeserializeObject(BeakerObjectConverter parent, JsonNode n, ObjectMapper mapper) {/*from w ww .ja v a 2 s . co m*/ Object o = null; String subtype = null; try { List<List<?>> values = TableDisplayDeSerializer.getValues(parent, n, mapper); List<String> columns = TableDisplayDeSerializer.getColumns(n, mapper); List<String> classes = TableDisplayDeSerializer.getClasses(n, mapper); if (n.has("subtype")) subtype = mapper.readValue(n.get("subtype").toString(), String.class); if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) { o = getValuesAsDictionary(parent, n, mapper); } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE) && columns != null && values != null) { o = getValuesAsRows(parent, n, mapper); } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) { o = getValuesAsMatrix(parent, n, mapper); } if (o == null) { if (n.has("hasIndex") && mapper.readValue(n.get("hasIndex").asText(), String.class).equals("true") && columns != null && values != null && n.has(INDEX_NAME)) { o = handleIndex(n, mapper, values, columns, classes); } else { o = new TableDisplay(values, columns, classes); } } } catch (Exception e) { logger.error("exception deserializing TableDisplay ", e); } return new ImmutablePair<String, Object>(subtype, o); }
From source file:com.acmutv.ontoqa.core.CoreController.java
/** * The core main method.// w ww . j a v a 2 s.c o m * It realizes the question-answering process, retrieving an answer for the given question. * The underlying ontology and lexicon are specified in the app configuration. * @param question the question. * @param grammar the SLTAG grammar. * @param ontology the ontology. * @return the answer. * @throws QuestionException when question is malformed. * @throws QueryException when the SPARQL query cannot be submitted. * @throws OntoqaFatalException when question cannot be processed. * @throws OntoqaParsingException when parsing error occurs. */ public static Pair<Query, Answer> process(String question, Grammar grammar, Ontology ontology) throws Exception { LOGGER.debug("Question: {}", question); final String normalizedQuestion = normalizeQuestion(question); LOGGER.debug("Normalized question: {}", normalizedQuestion); Sltag sltag = parser.parse(normalizedQuestion, grammar, ontology); Dudes dudes = sltag.getSemantics(); Query query = dudes.convertToSPARQL(); LOGGER.debug("SPARQL Query:\n{}", query.toString()); QueryResult qQueryResult = KnowledgeManager.submit(ontology, query); Answer answer = qQueryResult.toAnswer(); LOGGER.trace(answer.toPrettyString()); return new ImmutablePair<>(query, answer); }
From source file:com.uber.hoodie.utilities.sources.DFSSource.java
@Override public Pair<Optional<JavaRDD<GenericRecord>>, String> fetchNewData(Optional<String> lastCheckpointStr, long maxInputBytes) { try {/*from w w w.ja v a 2 s.c o m*/ // obtain all eligible files under root folder. List<FileStatus> eligibleFiles = new ArrayList<>(); RemoteIterator<LocatedFileStatus> fitr = fs .listFiles(new Path(config.getString(Config.ROOT_INPUT_PATH_PROP)), true); while (fitr.hasNext()) { LocatedFileStatus fileStatus = fitr.next(); if (fileStatus.isDirectory() || IGNORE_FILEPREFIX_LIST.stream() .filter(pfx -> fileStatus.getPath().getName().startsWith(pfx)).count() > 0) { continue; } eligibleFiles.add(fileStatus); } // sort them by modification time. eligibleFiles.sort((FileStatus f1, FileStatus f2) -> Long.valueOf(f1.getModificationTime()) .compareTo(Long.valueOf(f2.getModificationTime()))); // Filter based on checkpoint & input size, if needed long currentBytes = 0; long maxModificationTime = Long.MIN_VALUE; List<FileStatus> filteredFiles = new ArrayList<>(); for (FileStatus f : eligibleFiles) { if (lastCheckpointStr.isPresent() && f.getModificationTime() <= Long.valueOf(lastCheckpointStr.get())) { // skip processed files continue; } maxModificationTime = f.getModificationTime(); currentBytes += f.getLen(); filteredFiles.add(f); if (currentBytes >= maxInputBytes) { // we have enough data, we are done break; } } // no data to read if (filteredFiles.size() == 0) { return new ImmutablePair<>(Optional.empty(), lastCheckpointStr.isPresent() ? lastCheckpointStr.get() : String.valueOf(Long.MIN_VALUE)); } // read the files out. String pathStr = filteredFiles.stream().map(f -> f.getPath().toString()) .collect(Collectors.joining(",")); String schemaStr = schemaProvider.getSourceSchema().toString(); final AvroConvertor avroConvertor = new AvroConvertor(schemaStr); return new ImmutablePair<>( Optional.of(DFSSource.fromFiles(dataFormat, avroConvertor, pathStr, sparkContext)), String.valueOf(maxModificationTime)); } catch (IOException ioe) { throw new HoodieIOException("Unable to read from source from checkpoint: " + lastCheckpointStr, ioe); } }
From source file:models.Search.java
private Pair<List<Document>, Long> doSearch() { validateSearchParameters();/*from www . j a va2 s . c o m*/ final QueryBuilder queryBuilder = createQuery(); Logger.trace("Using query: " + queryBuilder); final SearchResponse response = search(queryBuilder, Boolean.getBoolean(parameters.get(Parameter.SCROLL)) ? SearchType.SCAN : SearchType.DFS_QUERY_THEN_FETCH); Logger.trace("Got response: " + response); final SearchHits hits = response.getHits(); final List<Document> docs = asDocuments(hits, fields(parameters)); final Pair<List<Document>, Long> result = new ImmutablePair<>(docs, hits.getTotalHits()); Logger.debug( String.format("Got %s hits overall, created %s matching docs", hits.getTotalHits(), docs.size())); return result; }
From source file:com.streamsets.pipeline.stage.it.AllPartitionTypesIT.java
@Test public void testPartitionType() throws Exception { HiveMetadataProcessor processor = new HiveMetadataProcessorBuilder().partitions(partition).build(); final HiveMetastoreTarget hiveTarget = new HiveMetastoreTargetBuilder().build(); Map<String, Field> map = new LinkedHashMap<>(); map.put("col", field); Record record = RecordCreator.create("s", "s:1"); record.set(Field.create(map)); try {/*from ww w .j a v a 2 s . c o m*/ processRecords(processor, hiveTarget, ImmutableList.of(record)); if (!supported) { Assert.fail("Type is not supported, but yet no exception was thrown"); } } catch (StageException se) { if (supported) { LOG.error("Processing exception", se); Assert.fail("Processing testing record unexpectedly failed: " + se.getMessage()); throw se; } else { // We're comparing string here as the class ContainerError is not on classpath Assert.assertEquals("CONTAINER_0010", se.getErrorCode().getCode()); Assert.assertTrue(se.getMessage().contains(Errors.HIVE_METADATA_09.name())); // No additional verification necessary return; } } assertTableExists("default.tbl"); assertQueryResult("select * from tbl", new QueryValidator() { @Override public void validateResultSet(ResultSet rs) throws Exception { assertResultSetStructure(rs, new ImmutablePair("tbl.col", hiveType), new ImmutablePair("tbl.part", hiveType)); Assert.assertTrue("Table tbl doesn't contain any rows", rs.next()); Assert.assertEquals(hiveValue, rs.getObject(1)); Assert.assertEquals(hiveValue, rs.getObject(2)); Assert.assertFalse("Table tbl contains more then one row", rs.next()); } }); }
From source file:com.nextdoor.bender.operation.conditional.ConditionalOperation.java
public Stream<InternalEvent> getOutputStream(Stream<InternalEvent> input) { /*//from www. j av a2s.c om * outputStreams keeps track of the output Stream of each Condition. */ List<Stream<InternalEvent>> outputStreams = new ArrayList<Stream<InternalEvent>>( this.conditionsAndProcs.size()); /* * From a list of operation configurations in each condition construct queues and streams. */ this.filtersAndQueues = new ArrayList<Pair<FilterOperation, Queue<InternalEvent>>>( this.conditionsAndProcs.size()); for (Pair<FilterOperation, List<OperationProcessor>> filterAndProcs : this.conditionsAndProcs) { FilterOperation filter = filterAndProcs.getLeft(); List<OperationProcessor> procs = filterAndProcs.getRight(); /* * Construct a Queue for each conditional. This is the input to each Condition. */ Queue<InternalEvent> queue = new Queue<InternalEvent>( new LinkedBlockingQueue<InternalEvent>(procs.size())); this.filtersAndQueues.add(new ImmutablePair<FilterOperation, Queue<InternalEvent>>(filter, queue)); /* * Connect the condition's input Queue with operations. Each operation returns a stream with its * operation concatenated on. */ Stream<InternalEvent> conditionInput = queue.jdkStream(); for (OperationProcessor proc : procs) { conditionInput = proc.perform(conditionInput); } /* * Last input is the output. */ outputStreams.add(conditionInput); } /* * Condition Consumer Threads * * Combine each condition's output stream and write to the output Queue. When all data is consumed * the last condition closes the output Queue. */ Queue<InternalEvent> outputQueue = new Queue<InternalEvent>( new LinkedBlockingQueue<InternalEvent>(this.conditionsAndProcs.size())); AtomicInteger lock = new AtomicInteger(outputStreams.size()); outputStreams.forEach(stream -> { this.es.execute(new StreamToQueue(stream, outputQueue, lock)); }); /* * Consume input Stream in a thread and publish to each condition's Queue. */ new Thread(new Runnable() { @Override public void run() { input.forEach(ievent -> { boolean matches = false; for (Pair<FilterOperation, Queue<InternalEvent>> filterAndQueue : filtersAndQueues) { FilterOperation filter = filterAndQueue.getLeft(); /* * If event passes the filter offer event to queue. */ if (filter.test(ievent)) { filterAndQueue.getRight().offer(ievent); matches = true; break; } } /* * Send to output queue if no case matches */ if (!matches && !filterNonMatch) { outputQueue.offer(ievent); } }); /* * Close queues when source queue is consumed. */ for (Pair<FilterOperation, Queue<InternalEvent>> filterAndQueue : filtersAndQueues) { filterAndQueue.getRight().close(); } } }).start(); return outputQueue.jdkStream(); }