List of usage examples for java.util Scanner next
public String next()
From source file:com.inmobi.conduit.distcp.tools.TestIntegration.java
@Test public void testJobConters() { try {/* w w w .ja v a 2 s.co m*/ Path listFile = new Path("target/tmp1/listing").makeQualified(fs); addEntries(listFile, "*"); createFileForAudit("/conduit/streams/test1/2013/10/10/10/10/file1.gz"); runTest(listFile, target, true); int numberOfCountersPerFile = 0; long sumOfCounterValues = 0; FileStatus[] statuses = fs.listStatus(counterOutputPath, new PathFilter() { public boolean accept(Path path) { return path.toString().contains("part"); } }); for (FileStatus status : statuses) { Scanner scanner = new Scanner(fs.open(status.getPath())); while (scanner.hasNext()) { String counterNameValue = null; try { counterNameValue = scanner.next(); String tmp[] = counterNameValue.split(ConduitConstants.AUDIT_COUNTER_NAME_DELIMITER); Assert.assertEquals(4, tmp.length); Long numOfMsgs = Long.parseLong(tmp[3]); numberOfCountersPerFile++; sumOfCounterValues += numOfMsgs; } catch (Exception e) { LOG.error("Counters file has malformed line with counter name = " + counterNameValue + " ..skipping the line ", e); } } } // should have 2 conters per file Assert.assertEquals(2, numberOfCountersPerFile); // sum of all counter values should equal to total number of messages Assert.assertEquals(3, sumOfCounterValues); checkResult(target, 1); } catch (IOException e) { LOG.error("Exception encountered while testing distcp", e); Assert.fail("distcp failure"); } finally { TestDistCpUtils.delete(fs, root); } }
From source file:org.opencastproject.composer.gstreamer.AbstractGSEncoderEngine.java
/** * Parses image extraction configuration in the following format: #{image_time_1}:#{image_width}x#{image_height}. * Multiple extraction configurations can be separated by comma. * //from w w w. j a v a 2 s . c o m * @param configuration * Configuration for image extraction * @param outputTemplate * output path template. Should be in the form /some_file_name_#{time}.jpg so that each image will have it's * unique path. * @return parsed List for image extraction */ protected List<ImageExtractionProperties> parseImageExtractionConfiguration(String configuration, String outputTemplate) { LinkedList<ImageExtractionProperties> propertiesList = new LinkedList<AbstractGSEncoderEngine.ImageExtractionProperties>(); Scanner scanner = new Scanner(configuration); scanner.useDelimiter(","); int counter = 0; while (scanner.hasNext()) { String nextToken = scanner.next().trim(); if (!nextToken.matches("[0-9]+:[0-9]+[x|X][0-9]+")) { throw new IllegalArgumentException("Invalid token found: " + nextToken); } String[] properties = nextToken.split("[:|x|X]"); String output = outputTemplate.replaceAll("#\\{time\\}", properties[0]); if (output.equals(outputTemplate)) { logger.warn("Output filename does not contain #{time} template: multiple images will overwrite"); } if (new File(output).exists()) { String outputFile = FilenameUtils.removeExtension(output); String extension = FilenameUtils.getExtension(output); output = outputFile + "_reencode." + extension; } ImageExtractionProperties imageProperties = new ImageExtractionProperties(counter++, Long.parseLong(properties[0]), Integer.parseInt(properties[1]), Integer.parseInt(properties[2]), output); propertiesList.add(imageProperties); } Collections.sort(propertiesList, new Comparator<ImageExtractionProperties>() { @Override public int compare(ImageExtractionProperties o1, ImageExtractionProperties o2) { return (int) (o2.timeInSeconds - o1.timeInSeconds); } }); return propertiesList; }
From source file:com.zero_x_baadf00d.partialize.Partialize.java
/** * Build a JSON object from data taken from the scanner and * the given class type and instance.//w w w . j a va 2 s. c o m * * @param depth The current depth * @param fields The field names to requests * @param clazz The class of the object to render * @param instance The instance of the object to render * @param partialObject The partial JSON document * @return A JSON Object * @since 16.01.18 */ private ObjectNode buildPartialObject(final int depth, String fields, final Class<?> clazz, final Object instance, final ObjectNode partialObject) { if (depth <= this.maximumDepth) { if (clazz.isAnnotationPresent(com.zero_x_baadf00d.partialize.annotation.Partialize.class)) { final List<String> closedFields = new ArrayList<>(); List<String> allowedFields = Arrays.asList(clazz .getAnnotation(com.zero_x_baadf00d.partialize.annotation.Partialize.class).allowedFields()); List<String> defaultFields = Arrays.asList(clazz .getAnnotation(com.zero_x_baadf00d.partialize.annotation.Partialize.class).defaultFields()); if (allowedFields.isEmpty()) { allowedFields = new ArrayList<>(); for (final Method m : clazz.getDeclaredMethods()) { final String methodName = m.getName(); if (methodName.startsWith("get") || methodName.startsWith("has")) { final char[] c = methodName.substring(3).toCharArray(); c[0] = Character.toLowerCase(c[0]); allowedFields.add(new String(c)); } else if (methodName.startsWith("is")) { final char[] c = methodName.substring(2).toCharArray(); c[0] = Character.toLowerCase(c[0]); allowedFields.add(new String(c)); } } } if (defaultFields.isEmpty()) { defaultFields = allowedFields.stream().map(f -> { if (this.aliases != null && this.aliases.containsValue(f)) { for (Map.Entry<String, String> e : this.aliases.entrySet()) { if (e.getValue().compareToIgnoreCase(f) == 0) { return e.getKey(); } } } return f; }).collect(Collectors.toList()); } if (fields == null || fields.length() == 0) { fields = defaultFields.stream().collect(Collectors.joining(",")); } Scanner scanner = new Scanner(fields); scanner.useDelimiter(com.zero_x_baadf00d.partialize.Partialize.SCANNER_DELIMITER); while (scanner.hasNext()) { String word = scanner.next(); String args = null; if (word.compareTo("*") == 0) { final StringBuilder sb = new StringBuilder(); if (scanner.hasNext()) { scanner.useDelimiter("\n"); sb.append(","); sb.append(scanner.next()); } final Scanner newScanner = new Scanner( allowedFields.stream().filter(f -> !closedFields.contains(f)).map(f -> { if (this.aliases != null && this.aliases.containsValue(f)) { for (Map.Entry<String, String> e : this.aliases.entrySet()) { if (e.getValue().compareToIgnoreCase(f) == 0) { return e.getKey(); } } } return f; }).collect(Collectors.joining(",")) + sb.toString()); newScanner.useDelimiter(com.zero_x_baadf00d.partialize.Partialize.SCANNER_DELIMITER); scanner.close(); scanner = newScanner; } if (word.contains("(")) { while (scanner.hasNext() && (StringUtils.countMatches(word, "(") != StringUtils.countMatches(word, ")"))) { word += "," + scanner.next(); } final Matcher m = this.fieldArgsPattern.matcher(word); if (m.find()) { word = m.group(1); args = m.group(2); } } final String aliasField = word; final String field = this.aliases != null && this.aliases.containsKey(aliasField) ? this.aliases.get(aliasField) : aliasField; if (allowedFields.stream().anyMatch( f -> f.toLowerCase(Locale.ENGLISH).compareTo(field.toLowerCase(Locale.ENGLISH)) == 0)) { if (this.accessPolicyFunction != null && !this.accessPolicyFunction.apply(new AccessPolicy(clazz, instance, field))) { continue; } closedFields.add(aliasField); try { final Method method = clazz.getMethod("get" + WordUtils.capitalize(field)); final Object object = method.invoke(instance); this.internalBuild(depth, aliasField, field, args, partialObject, clazz, object); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignore) { try { final Method method = clazz.getMethod(field); final Object object = method.invoke(instance); this.internalBuild(depth, aliasField, field, args, partialObject, clazz, object); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) { if (this.exceptionConsumer != null) { this.exceptionConsumer.accept(ex); } } } } } return partialObject; } else if (instance instanceof Map<?, ?>) { if (fields == null || fields.isEmpty() || fields.compareTo("*") == 0) { for (Map.Entry<?, ?> e : ((Map<?, ?>) instance).entrySet()) { this.internalBuild(depth, String.valueOf(e.getKey()), String.valueOf(e.getKey()), null, partialObject, e.getValue() == null ? Object.class : e.getValue().getClass(), e.getValue()); } } else { final Map<?, ?> tmpMap = (Map<?, ?>) instance; for (final String k : fields.split(",")) { if (k.compareTo("*") != 0) { final Object o = tmpMap.get(k); this.internalBuild(depth, k, k, null, partialObject, o == null ? Object.class : o.getClass(), o); } else { for (Map.Entry<?, ?> e : ((Map<?, ?>) instance).entrySet()) { this.internalBuild(depth, String.valueOf(e.getKey()), String.valueOf(e.getKey()), null, partialObject, e.getValue() == null ? Object.class : e.getValue().getClass(), e.getValue()); } } } } } else { throw new RuntimeException("Can't convert " + clazz.getCanonicalName()); } } return partialObject; }
From source file:tajo.engine.planner.physical.TestPhysicalPlanner.java
@Test public final void testPartitionedStorePlanWithEmptyGroupingSet() throws IOException { Fragment[] frags = sm.split("score"); QueryUnitAttemptId id = TUtil.newQueryUnitAttemptId(); File workDir = TajoTestingUtility.getTestDir("testPartitionedStorePlanWithEmptyGroupingSet"); TaskAttemptContext ctx = new TaskAttemptContext(conf, id, new Fragment[] { frags[0] }, workDir); PlanningContext context = analyzer.parse(QUERIES[15]); LogicalNode plan = planner.createPlan(context); int numPartitions = 1; StoreTableNode storeNode = new StoreTableNode("emptyset"); storeNode.setPartitions(SubQuery.PARTITION_TYPE.HASH, new Column[] {}, numPartitions); PlannerUtil.insertNode(plan, storeNode); plan = LogicalOptimizer.optimize(context, plan); TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(), StoreType.CSV); sm.initTableBase(outputMeta, "emptyset"); PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf, sm); PhysicalExec exec = phyPlanner.createPlan(ctx, plan); exec.init();//from w w w . j av a 2 s . com exec.next(); exec.close(); Path path = StorageUtil.concatPath(workDir.getAbsolutePath(), "out"); FileSystem fs = sm.getFileSystem(); assertEquals(numPartitions, fs.listStatus(StorageUtil.concatPath(path, "data")).length); Scanner scanner = sm.getTableScanner(path); Tuple tuple; int i = 0; while ((tuple = scanner.next()) != null) { assertEquals(60, tuple.get(2).asInt()); // sum assertEquals(3, tuple.get(3).asInt()); // max assertEquals(1, tuple.get(4).asInt()); // min i++; } assertEquals(1, i); scanner.close(); // Examine the statistics information assertEquals(1, ctx.getResultStats().getNumRows().longValue()); }
From source file:br.edu.ifes.bd2dao.cgt.Menu.java
private void atualizarAluno() { Scanner sc = new Scanner(System.in); listarAlunos();/*from ww w. j av a 2 s .c o m*/ System.out.println("\nInforme um ID:"); int id = sc.nextInt(); Aluno a = new Aluno().selecionar(new Long(id)); Field fields[] = Aluno.class.getDeclaredFields(); for (Field field : fields) { if (!field.getName().equals("id")) { System.out.println("Deseja atualizar o campo (" + field.getName().toUpperCase() + ") [n/s]?"); String opt = sc.next().toUpperCase(); if (opt.contains("S")) { setValue(field.getName(), a); } } } try { a.atualizar(a); } catch (IdNotFoundException ex) { ex.printStackTrace(); } }
From source file:edu.uci.ics.asterix.transaction.management.service.locking.LockManagerDeterministicUnitTest.java
public void readRequest() throws IOException, ACIDException { int i = 0;/*from w ww . jav a 2s . co m*/ LockRequest lockRequest = null; TransactionContext txnContext = null; HashMap<Integer, TransactionContext> jobMap = new HashMap<Integer, TransactionContext>(); int threadId; String requestType; int jobId; int datasetId; int PKHashVal; int waitTime; ArrayList<Integer> list = null; String lockMode; Scanner scanner = new Scanner(new FileInputStream(requestFileName)); while (scanner.hasNextLine()) { try { threadId = Integer.parseInt(scanner.next().substring(1)); requestType = scanner.next(); if (requestType.equals("CSQ") || requestType.equals("CST") || requestType.equals("END")) { log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType)); if (requestType.equals("CSQ") || requestType.equals("CST")) { list = new ArrayList<Integer>(); while (scanner.hasNextInt()) { threadId = scanner.nextInt(); if (threadId < 0) { break; } list.add(threadId); } expectedResultList.add(list); } } else if (requestType.equals("DW")) { defaultWaitTime = scanner.nextInt(); log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + "," + defaultWaitTime); continue; } else if (requestType.equals("W")) { waitTime = scanner.nextInt(); log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType), waitTime); } else { jobId = Integer.parseInt(scanner.next().substring(1)); datasetId = Integer.parseInt(scanner.next().substring(1)); PKHashVal = Integer.parseInt(scanner.next().substring(1)); lockMode = scanner.next(); txnContext = jobMap.get(jobId); if (txnContext == null) { txnContext = new TransactionContext(new JobId(jobId), txnProvider); jobMap.put(jobId, txnContext); } log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + ",J" + jobId + ",D" + datasetId + ",E" + PKHashVal + "," + lockMode); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType), new DatasetId(datasetId), PKHashVal, getLockMode(lockMode), txnContext); } requestList.add(lockRequest); } catch (NoSuchElementException e) { scanner.close(); break; } } }
From source file:tajo.engine.planner.physical.TestPhysicalPlanner.java
@Test public final void testPartitionedStorePlan() throws IOException { Fragment[] frags = sm.split("score"); QueryUnitAttemptId id = TUtil.newQueryUnitAttemptId(); File workDir = TajoTestingUtility.getTestDir("testPartitionedStorePlan"); TaskAttemptContext ctx = new TaskAttemptContext(conf, id, new Fragment[] { frags[0] }, workDir); PlanningContext context = analyzer.parse(QUERIES[7]); LogicalNode plan = planner.createPlan(context); int numPartitions = 3; Column key1 = new Column("score.deptName", DataType.STRING); Column key2 = new Column("score.class", DataType.STRING); StoreTableNode storeNode = new StoreTableNode("partition"); storeNode.setPartitions(SubQuery.PARTITION_TYPE.HASH, new Column[] { key1, key2 }, numPartitions); PlannerUtil.insertNode(plan, storeNode); plan = LogicalOptimizer.optimize(context, plan); TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(), StoreType.CSV); sm.initTableBase(outputMeta, "partition"); PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf, sm); PhysicalExec exec = phyPlanner.createPlan(ctx, plan); exec.init();/*from w ww. j a v a 2s .c o m*/ exec.next(); exec.close(); Path path = StorageUtil.concatPath(workDir.getAbsolutePath(), "out"); FileSystem fs = sm.getFileSystem(); FileStatus[] list = fs.listStatus(StorageUtil.concatPath(path, "data")); for (FileStatus status : list) { System.out.println(status.getPath() + ", " + status.getLen()); } assertEquals(numPartitions, fs.listStatus(StorageUtil.concatPath(path, "data")).length); Scanner scanner = sm.getTableScanner(path); Tuple tuple; int i = 0; while ((tuple = scanner.next()) != null) { assertEquals(6, tuple.get(2).asInt()); // sum assertEquals(3, tuple.get(3).asInt()); // max assertEquals(1, tuple.get(4).asInt()); // min i++; } assertEquals(10, i); scanner.close(); // Examine the statistics information assertEquals(10, ctx.getResultStats().getNumRows().longValue()); }
From source file:org.apache.fop.fonts.type1.Type1SubsetFile.java
protected String getEntryPart(String entry, int part) { Scanner s = new Scanner(entry).useDelimiter(" "); for (int i = 1; i < part; i++) { s.next(); }/*from www .j a v a 2 s . c om*/ return s.next(); }
From source file:tajo.engine.planner.physical.TestPhysicalPlanner.java
@Test public final void testStorePlan() throws IOException { Fragment[] frags = sm.split("score"); File workDir = TajoTestingUtility.getTestDir("testStorePlan"); TaskAttemptContext ctx = new TaskAttemptContext(conf, TUtil.newQueryUnitAttemptId(), new Fragment[] { frags[0] }, workDir); PlanningContext context = analyzer.parse(QUERIES[8]); LogicalNode plan = planner.createPlan(context); plan = LogicalOptimizer.optimize(context, plan); TableMeta outputMeta = TCatUtil.newTableMeta(plan.getOutSchema(), StoreType.CSV); sm.initTableBase(outputMeta, "grouped"); PhysicalPlanner phyPlanner = new PhysicalPlannerImpl(conf, sm); PhysicalExec exec = phyPlanner.createPlan(ctx, plan); exec.init();/* w w w .jav a 2 s . c om*/ exec.next(); exec.close(); Scanner scanner = sm.getScanner("grouped", ctx.getTaskId().toString()); Tuple tuple; int i = 0; while ((tuple = scanner.next()) != null) { assertEquals(6, tuple.get(2).asInt()); // sum assertEquals(3, tuple.get(3).asInt()); // max assertEquals(1, tuple.get(4).asInt()); // min i++; } assertEquals(10, i); scanner.close(); // Examine the statistics information assertEquals(10, ctx.getResultStats().getNumRows().longValue()); }
From source file:gov.nih.nci.caarray.magetab.idf.IdfDocument.java
private void handlePersonRole(String value, int valueIndex) { List<String> roles = new ArrayList<String>(); java.util.Scanner scanner = new java.util.Scanner(value).useDelimiter(";"); while (scanner.hasNext()) { roles.add(scanner.next()); }/*from w w w .ja v a2 s . com*/ Iterator<String> rolesIter = roles.iterator(); while (rolesIter.hasNext()) { investigation.getOrCreatePerson(valueIndex).getRoles() .add(addOntologyTerm(MageTabOntologyCategory.ROLES, rolesIter.next())); } }