List of usage examples for java.util Deque size
int size();
From source file:com.cloudbees.jenkins.plugins.amazonecs.ECSService.java
/** * Looks whether the latest task definition matches the desired one. If yes, returns the ARN of the existing one. * If no, register a new task definition with desired parameters and return the new ARN. *///from w w w . ja v a 2 s .c o m String registerTemplate(final ECSCloud cloud, final ECSTaskTemplate template, String clusterArn) { final AmazonECSClient client = getAmazonECSClient(); String familyName = fullQualifiedTemplateName(cloud, template); final ContainerDefinition def = new ContainerDefinition().withName(familyName) .withImage(template.getImage()).withEnvironment(template.getEnvironmentKeyValuePairs()) .withExtraHosts(template.getExtraHostEntries()).withMountPoints(template.getMountPointEntries()) .withCpu(template.getCpu()).withPrivileged(template.getPrivileged()).withEssential(true); /* at least one of memory or memoryReservation has to be set the form validation will highlight if the settings are inappropriate */ if (template.getMemoryReservation() > 0) /* this is the soft limit */ def.withMemoryReservation(template.getMemoryReservation()); if (template.getMemory() > 0) /* this is the hard limit */ def.withMemory(template.getMemory()); if (template.getEntrypoint() != null) def.withEntryPoint(StringUtils.split(template.getEntrypoint())); if (template.getJvmArgs() != null) def.withEnvironment(new KeyValuePair().withName("JAVA_OPTS").withValue(template.getJvmArgs())) .withEssential(true); if (template.getLogDriver() != null) { LogConfiguration logConfig = new LogConfiguration(); logConfig.setLogDriver(template.getLogDriver()); logConfig.setOptions(template.getLogDriverOptionsMap()); def.withLogConfiguration(logConfig); } String lastToken = null; Deque<String> taskDefinitions = new LinkedList<String>(); do { ListTaskDefinitionsResult listTaskDefinitions = client .listTaskDefinitions(new ListTaskDefinitionsRequest().withFamilyPrefix(familyName) .withMaxResults(100).withNextToken(lastToken)); taskDefinitions.addAll(listTaskDefinitions.getTaskDefinitionArns()); lastToken = listTaskDefinitions.getNextToken(); } while (lastToken != null); boolean templateMatchesExistingContainerDefinition = false; boolean templateMatchesExistingVolumes = false; boolean templateMatchesExistingTaskRole = false; DescribeTaskDefinitionResult describeTaskDefinition = null; if (taskDefinitions.size() > 0) { describeTaskDefinition = client.describeTaskDefinition( new DescribeTaskDefinitionRequest().withTaskDefinition(taskDefinitions.getLast())); templateMatchesExistingContainerDefinition = def .equals(describeTaskDefinition.getTaskDefinition().getContainerDefinitions().get(0)); LOGGER.log(Level.INFO, "Match on container defintion: {0}", new Object[] { templateMatchesExistingContainerDefinition }); LOGGER.log(Level.FINE, "Match on container defintion: {0}; template={1}; last={2}", new Object[] { templateMatchesExistingContainerDefinition, def, describeTaskDefinition.getTaskDefinition().getContainerDefinitions().get(0) }); templateMatchesExistingVolumes = ObjectUtils.equals(template.getVolumeEntries(), describeTaskDefinition.getTaskDefinition().getVolumes()); LOGGER.log(Level.INFO, "Match on volumes: {0}", new Object[] { templateMatchesExistingVolumes }); LOGGER.log(Level.FINE, "Match on volumes: {0}; template={1}; last={2}", new Object[] { templateMatchesExistingVolumes, template.getVolumeEntries(), describeTaskDefinition.getTaskDefinition().getVolumes() }); templateMatchesExistingTaskRole = template.getTaskrole() == null || template.getTaskrole().equals(describeTaskDefinition.getTaskDefinition().getTaskRoleArn()); LOGGER.log(Level.INFO, "Match on task role: {0}", new Object[] { templateMatchesExistingTaskRole }); LOGGER.log(Level.FINE, "Match on task role: {0}; template={1}; last={2}", new Object[] { templateMatchesExistingTaskRole, template.getTaskrole(), describeTaskDefinition.getTaskDefinition().getTaskRoleArn() }); } if (templateMatchesExistingContainerDefinition && templateMatchesExistingVolumes && templateMatchesExistingTaskRole) { LOGGER.log(Level.FINE, "Task Definition already exists: {0}", new Object[] { describeTaskDefinition.getTaskDefinition().getTaskDefinitionArn() }); return describeTaskDefinition.getTaskDefinition().getTaskDefinitionArn(); } else { final RegisterTaskDefinitionRequest request = new RegisterTaskDefinitionRequest().withFamily(familyName) .withVolumes(template.getVolumeEntries()).withContainerDefinitions(def); if (template.getTaskrole() != null) { request.withTaskRoleArn(template.getTaskrole()); } final RegisterTaskDefinitionResult result = client.registerTaskDefinition(request); String taskDefinitionArn = result.getTaskDefinition().getTaskDefinitionArn(); LOGGER.log(Level.FINE, "Created Task Definition {0}: {1}", new Object[] { taskDefinitionArn, request }); LOGGER.log(Level.INFO, "Created Task Definition: {0}", new Object[] { taskDefinitionArn }); return taskDefinitionArn; } }
From source file:edu.stanford.cfuller.colocalization3d.correction.PositionCorrector.java
/** * Determines the target registration error for a correction by successively leaving out each ImageObject in a set used to make a correction, * calculating a correction from the remaining objects, and assessing the error in correcting the object left out. * /* w ww.ja v a 2s . c o m*/ * @param imageObjects A Vector containing all the ImageObjects to be used for the correction * or in the order it appears in a multiwavelength image file. * @return The average value of the error over all objects. */ public double determineTRE(java.util.List<ImageObject> imageObjects) { int referenceChannel = this.parameters.getIntValueForKey(REF_CH_PARAM); int channelToCorrect = this.parameters.getIntValueForKey(CORR_CH_PARAM); RealVector treVector = new ArrayRealVector(imageObjects.size(), 0.0); RealVector treXYVector = new ArrayRealVector(imageObjects.size(), 0.0); java.util.Deque<TREThread> startedThreads = new java.util.LinkedList<TREThread>(); int maxThreads = 1; if (this.parameters.hasKey(THREAD_COUNT_PARAM)) { maxThreads = this.parameters.getIntValueForKey(THREAD_COUNT_PARAM); } final int threadWaitTime_ms = 1000; for (int removeIndex = 0; removeIndex < imageObjects.size(); removeIndex++) { if (removeIndex % 10 == 0) { java.util.logging.Logger .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .finer("calulating TRE: point " + (removeIndex + 1) + " of " + imageObjects.size()); } TREThread nextFit = new TREThread(imageObjects, referenceChannel, channelToCorrect, removeIndex, this); if (startedThreads.size() < maxThreads) { startedThreads.add(nextFit); nextFit.start(); } else { TREThread next = startedThreads.poll(); try { next.join(threadWaitTime_ms); } catch (InterruptedException e) { e.printStackTrace(); } while (next.isAlive()) { startedThreads.add(next); next = startedThreads.poll(); try { next.join(threadWaitTime_ms); } catch (InterruptedException e) { e.printStackTrace(); } } treVector.setEntry(next.getRemoveIndex(), next.getTre()); treXYVector.setEntry(next.getRemoveIndex(), next.getTreXY()); startedThreads.add(nextFit); nextFit.start(); } } java.util.List<Integer> unsuccessful_TRE = new java.util.ArrayList<Integer>(); while (startedThreads.size() > 0) { TREThread next = startedThreads.poll(); try { next.join(); if (next.getSuccess()) { treVector.setEntry(next.getRemoveIndex(), next.getTre()); } else { unsuccessful_TRE.add(next.getRemoveIndex()); } } catch (InterruptedException e) { e.printStackTrace(); } } RealVector treVector_mod = new ArrayRealVector(treVector.getDimension() - unsuccessful_TRE.size()); RealVector treXYVector_mod = new ArrayRealVector(treVector_mod.getDimension()); int c = 0; //unsuccessful TRE calculation results when there is incomplete coverage in the correction dataset for (int i = 0; i < treVector.getDimension(); ++i) { if (!unsuccessful_TRE.contains(i)) { treVector_mod.setEntry(c, treVector.getEntry(i)); treXYVector_mod.setEntry(c, treXYVector.getEntry(i)); ++c; } } treVector = treVector_mod; treXYVector = treXYVector_mod; double tre = treVector.getL1Norm() / treVector.getDimension(); double xy_tre = (treXYVector.getL1Norm() / treXYVector.getDimension()); java.util.logging.Logger.getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .info("TRE: " + tre); java.util.logging.Logger.getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .info("x-y TRE: " + xy_tre); return tre; }
From source file:com.blm.orc.OrcRawRecordMerger.java
@Override public ObjectInspector getObjectInspector() { // Read the configuration parameters String columnNameProperty = conf.get(serdeConstants.LIST_COLUMNS); // NOTE: if "columns.types" is missing, all columns will be of String type String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES); // Parse the configuration parameters ArrayList<String> columnNames = new ArrayList<String>(); Deque<Integer> virtualColumns = new ArrayDeque<Integer>(); if (columnNameProperty != null && columnNameProperty.length() > 0) { String[] colNames = columnNameProperty.split(","); for (int i = 0; i < colNames.length; i++) { if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(colNames[i])) { virtualColumns.addLast(i); } else { columnNames.add(colNames[i]); }/*from w w w .j av a 2 s.c o m*/ } } if (columnTypeProperty == null) { // Default type: all string StringBuilder sb = new StringBuilder(); for (int i = 0; i < columnNames.size(); i++) { if (i > 0) { sb.append(":"); } sb.append("string"); } columnTypeProperty = sb.toString(); } ArrayList<TypeInfo> fieldTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); while (virtualColumns.size() > 0) { fieldTypes.remove(virtualColumns.removeLast()); } StructTypeInfo rowType = new StructTypeInfo(); rowType.setAllStructFieldNames(columnNames); rowType.setAllStructFieldTypeInfos(fieldTypes); return OrcRecordUpdater.createEventSchema(OrcStruct.createObjectInspector(rowType)); }
From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java
public void findSimpleCycles(List<List<Integer>> cycles, List<List<Object>> mappings) { // First, index the edges List<List<Integer>> edges = new ArrayList<List<Integer>>(); for (int i = 0; i < _peers.size(); i++) { edges.add(new ArrayList<Integer>()); }/*from w w w .j a v a 2s . c om*/ for (List<Object> thisMapping : mappings) { edges.get((Integer) thisMapping.get(0)).add((Integer) thisMapping.get(1)); } for (List<Integer> thisEdge : edges) { Collections.sort(thisEdge); } // Find simple cycles as follows: // - Handle the peers in order // - Find simple cycles where the smallest node in the cycle // is the peer cycles.clear(); for (int i = 0; i < _peers.size(); i++) { Deque<List<Integer>> paths = new ArrayDeque<List<Integer>>(); paths.push(new ArrayList<Integer>()); paths.peek().add(i); while (0 != paths.size()) { List<Integer> path = paths.pop(); for (Integer j : edges.get(path.get(path.size() - 1))) { if (j.equals(i)) { List<Integer> cycle = new ArrayList<Integer>(); cycle.addAll(path); cycle.add(j); cycles.add(cycle); } else if (j > i && !path.contains(j)) { List<Integer> newPath = new ArrayList<Integer>(); newPath.addAll(path); newPath.add(j); paths.push(newPath); } } } } }
From source file:com.skelril.skree.content.world.wilderness.WildernessWorldWrapper.java
private PlayerCombatParser createFor(Cancellable event, int level) { return new PlayerCombatParser() { @Override// w ww .j a v a 2 s . c o m public void processPvP(Player attacker, Player defender) { if (allowsPvP(level)) { return; } Optional<PvPService> optService = Sponge.getServiceManager().provide(PvPService.class); if (optService.isPresent()) { PvPService service = optService.get(); if (service.getPvPState(attacker).allowByDefault() && service.getPvPState(defender).allowByDefault()) { return; } } attacker.sendMessage(Text.of(TextColors.RED, "PvP is opt-in only in this part of the Wilderness!")); attacker.sendMessage( Text.of(TextColors.RED, "Mandatory PvP is from level ", getFirstPvPLevel(), " and on.")); event.setCancelled(true); } @Override public void processMonsterAttack(Living attacker, Player defender) { if (!(event instanceof DamageEntityEvent)) { return; } DamageEntityEvent dEvent = (DamageEntityEvent) event; // If they're endermites they hit through armor, otherwise they get a damage boost if (attacker.getType() == EntityTypes.ENDERMITE) { for (Tuple<DamageModifier, Function<? super Double, Double>> modifier : dEvent.getModifiers()) { dEvent.setDamage(modifier.getFirst(), (a) -> 0D); } dEvent.setBaseDamage(1); } else { dEvent.setBaseDamage(dEvent.getBaseDamage() + getDamageMod(level)); } // Only apply scoring while in survival mode if (defender.get(Keys.GAME_MODE).orElse(GameModes.SURVIVAL) != GameModes.SURVIVAL) { return; } WildernessPlayerMeta meta = playerMetaMap.get(defender.getUniqueId()); if (meta != null) { meta.hit(); } } @Override public void processPlayerAttack(Player attacker, Living defender) { Task.builder().delayTicks(1) .execute(() -> healthPrinter.print(MessageChannel.fixed(attacker), defender)) .submit(SkreePlugin.inst()); if (!(defender instanceof Monster) || defender instanceof Creeper) { return; } // Only apply scoring while in survival mode if (attacker.get(Keys.GAME_MODE).orElse(GameModes.SURVIVAL) != GameModes.SURVIVAL) { return; } WildernessPlayerMeta meta = playerMetaMap.get(attacker.getUniqueId()); if (meta != null) { meta.attack(); if (meta.getRatio() > 30 && meta.getFactors() > 35) { Deque<Entity> spawned = new ArrayDeque<>(); for (int i = Probability.getRandom(5); i > 0; --i) { Entity entity = attacker.getWorld().createEntity(EntityTypes.ENDERMITE, defender.getLocation().getPosition()); entity.getWorld().spawnEntity(entity, Cause.source(SpawnCause.builder().type(SpawnTypes.PLUGIN).build()).build()); spawned.add(entity); } IntegratedRunnable runnable = new IntegratedRunnable() { @Override public boolean run(int times) { Entity mob = spawned.poll(); if (mob.isLoaded() && mob.getWorld().equals(attacker.getWorld())) { mob.setLocation(attacker.getLocation()); } return true; } @Override public void end() { } }; TimedRunnable timedRunnable = new TimedRunnable<>(runnable, spawned.size()); timedRunnable.setTask(Task.builder().execute(timedRunnable).delayTicks(40).intervalTicks(20) .submit(SkreePlugin.inst())); } if (System.currentTimeMillis() - meta.getLastReset() >= TimeUnit.MINUTES.toMillis(5)) { meta.reset(); } } } }; }
From source file:com.blackberry.logdriver.admin.LogMaintenance.java
@Override public int run(String[] args) throws Exception { Configuration conf = getConf(); // If run by Oozie, then load the Oozie conf too if (System.getProperty("oozie.action.conf.xml") != null) { conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml"))); }/*from w w w .ja va2 s . c o m*/ // For some reason, Oozie needs some options to be set in system instead of // in the confiuration. So copy the configs over. { Iterator<Entry<String, String>> i = conf.iterator(); while (i.hasNext()) { Entry<String, String> next = i.next(); System.setProperty(next.getKey(), next.getValue()); } } if (args.length < 3) { printUsage(); return 1; } String userName = args[0]; String dcNumber = args[1]; String service = args[2]; String date = null; String hour = null; if (args.length >= 4) { date = args[3]; } if (args.length >= 5) { hour = args[4]; } // Set from environment variables String mergeJobPropertiesFile = getConfOrEnv(conf, "MERGEJOB_CONF"); String filterJobPropertiesFile = getConfOrEnv(conf, "FILTERJOB_CONF"); String daysBeforeArchive = getConfOrEnv(conf, "DAYS_BEFORE_ARCHIVE"); String daysBeforeDelete = getConfOrEnv(conf, "DAYS_BEFORE_DELETE"); String maxConcurrentMR = getConfOrEnv(conf, "MAX_CONCURRENT_MR", "-1"); String zkConnectString = getConfOrEnv(conf, "ZK_CONNECT_STRING"); String logdir = getConfOrEnv(conf, "logdriver.logdir.name"); boolean resetOrphanedJobs = Boolean.parseBoolean(getConfOrEnv(conf, "reset.orphaned.jobs", "true")); String rootDir = getConfOrEnv(conf, "service.root.dir"); String maxTotalMR = getConfOrEnv(conf, "MAX_TOTAL_MR", "-1"); boolean doMerge = true; boolean doArchive = true; boolean doDelete = true; if (zkConnectString == null) { LOG.error("ZK_CONNECT_STRING is not set. Exiting."); return 1; } if (mergeJobPropertiesFile == null) { LOG.info("MERGEJOB_CONF is not set. Not merging."); doMerge = false; } if (filterJobPropertiesFile == null) { LOG.info("FILTERJOB_CONF is not set. Not archiving."); doArchive = false; } if (daysBeforeArchive == null) { LOG.info("DAYS_BEFORE_ARCHIVE is not set. Not archiving."); doArchive = false; } if (doArchive && Integer.parseInt(daysBeforeArchive) < 0) { LOG.info("DAYS_BEFORE_ARCHIVE is negative. Not archiving."); doArchive = false; } if (daysBeforeDelete == null) { LOG.info("DAYS_BEFORE_DELETE is not set. Not deleting."); doDelete = false; } if (doDelete && Integer.parseInt(daysBeforeDelete) < 0) { LOG.info("DAYS_BEFORE_DELETE is negative. Not deleting."); doDelete = false; } if (logdir == null) { LOG.info("LOGDRIVER_LOGDIR_NAME is not set. Using default value of 'logs'."); logdir = "logs"; } if (rootDir == null) { LOG.info("SERVICE_ROOT_DIR is not set. Using default value of 'service'."); rootDir = "/service"; } // We can hang if this fails. So make sure we abort if it fails. fs = null; try { fs = FileSystem.get(conf); fs.exists(new Path("/")); // Test if it works. } catch (IOException e) { LOG.error("Error getting filesystem.", e); return 1; } // Create the LockUtil instance lockUtil = new LockUtil(zkConnectString); // Now it's safe to create our Job Runner JobRunner jobRunner = new JobRunner(Integer.parseInt(maxConcurrentMR), Integer.parseInt(maxTotalMR)); Thread jobRunnerThread = new Thread(jobRunner); jobRunnerThread.setName("JobRunner"); jobRunnerThread.setDaemon(false); jobRunnerThread.start(); // Figure out what date we start filters on. String filterCutoffDate = ""; if (doArchive) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeArchive)); filterCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Archiving logs from before {}", filterCutoffDate); } String deleteCutoffDate = ""; if (doDelete) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeDelete)); deleteCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Deleting logs from before {}", deleteCutoffDate); } long now = System.currentTimeMillis(); // Various exceptions have been popping up here. So make sure I catch them // all. try { // Patterns to recognize hour, day and incoming directories, so that they // can be processed. Pattern datePathPattern; Pattern hourPathPattern; Pattern incomingPathPattern; Pattern dataPathPattern; Pattern archivePathPattern; Pattern workingPathPattern; if (hour != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/working/([^/]+)_(\\d+)"); } else if (date != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } else { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})"); incomingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } // Do a depth first search of the directory, processing anything that // looks // interesting along the way Deque<Path> paths = new ArrayDeque<Path>(); Path rootPath = new Path(rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/"); paths.push(rootPath); while (paths.size() > 0) { Path p = paths.pop(); LOG.debug("{}", p.toString()); if (!fs.exists(p)) { continue; } FileStatus dirStatus = fs.getFileStatus(p); FileStatus[] children = fs.listStatus(p); boolean addChildren = true; boolean old = dirStatus.getModificationTime() < now - WAIT_TIME; LOG.debug(" Was last modified {}ms ago", now - dirStatus.getModificationTime()); if (!old) { LOG.debug(" Skipping, since it's not old enough."); } else if ((!rootPath.equals(p)) && (children.length == 0 || (children.length == 1 && children[0].getPath().getName().equals(READY_MARKER)))) { // old and no children? Delete! LOG.info(" Deleting empty directory {}", p.toString()); fs.delete(p, true); } else { Matcher matcher = datePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking date directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } matcher = hourPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking hour directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } // Check to see if we have to run a merge matcher = incomingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking incoming directory"); String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doMerge) { // old, looks right, and has children? Run it! boolean hasMatchingChildren = false; boolean subdirTooYoung = false; for (FileStatus child : children) { if (!hasMatchingChildren) { FileStatus[] grandchildren = fs.listStatus(child.getPath()); for (FileStatus gc : grandchildren) { if (VALID_FILE.matcher(gc.getPath().getName()).matches()) { hasMatchingChildren = true; break; } } } if (!subdirTooYoung) { if (child.getModificationTime() >= now - WAIT_TIME) { subdirTooYoung = true; LOG.debug(" Subdir {} is too young.", child.getPath()); } } } if (!hasMatchingChildren) { LOG.debug(" No files match the expected pattern ({})", VALID_FILE.pattern()); } if (hasMatchingChildren && !subdirTooYoung) { LOG.info(" Run Merge job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); Properties jobProps = new Properties(); jobProps.load(new FileInputStream(mergeJobPropertiesFile)); jobProps.setProperty("jobType", "merge"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); jobRunner.submit(jobProps); addChildren = false; } } } // Check to see if we need to run a filter and archive matcher = dataPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doArchive && timestamp.compareTo(filterCutoffDate) < 0) { Properties jobProps = new Properties(); jobProps.load(new FileInputStream(filterJobPropertiesFile)); jobProps.setProperty("jobType", "filter"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); // Check to see if we should just keep all or delete all here. // The filter file should be here String appPath = jobProps.getProperty("oozie.wf.application.path"); appPath = appPath.replaceFirst("\\$\\{.*?\\}", ""); Path filterFile = new Path( appPath + "/" + conf.get("filter.definition.file", service + ".yaml")); LOG.info("Filter file is {}", filterFile); if (fs.exists(filterFile)) { List<BoomFilterMapper.Filter> filters = BoomFilterMapper.loadFilters(matchComponent, fs.open(filterFile)); if (filters == null) { LOG.warn( " Got null when getting filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 0) { LOG.warn(" Got no filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.KeepAllFilter) { LOG.info(" Keeping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); // Move files from data to archive // delete it all! String destination = rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent + "/archive/"; PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.mkdirs(new Path(destination)); for (FileStatus f : fs.listStatus(p)) { fs.rename(f.getPath(), new Path(destination)); } } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.DropAllFilter) { LOG.info(" Dropping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.delete(p, true); } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else { LOG.info(" Run Filter/Archive job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); jobRunner.submit(jobProps); } } else { LOG.warn("Skipping filter job, since no filter file exists"); } addChildren = false; } } matcher = archivePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } } matcher = workingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.info(" Matches working pattern ({})", p); if (resetOrphanedJobs) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); // Move everything from working/xxx/incoming/ to incoming/ PathInfo lockPathInfo = new PathInfo(logdir, rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent); lockUtil.acquireWriteLock(lockUtil.getLockPath(lockPathInfo)); FileStatus[] fileStatuses = fs.listStatus(new Path(p.toUri().getPath() + "/incoming/")); if (fileStatuses != null) { for (FileStatus fileStatus : fileStatuses) { Path toPath = new Path( fileStatus.getPath().getParent().getParent().getParent().getParent(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" Moving data from {} to {}", fileStatus.getPath(), toPath); LOG.info(" mkdir {}", toPath); fs.mkdirs(toPath); Path fromDir = new Path(p.toUri().getPath(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" moving from {}", fromDir); FileStatus[] files = fs.listStatus(fromDir); if (files == null || files.length == 0) { LOG.info(" Nothing to move from {}", fromDir); } else { for (FileStatus f : files) { LOG.info(" rename {} {}", f.getPath(), new Path(toPath, f.getPath().getName())); fs.rename(f.getPath(), new Path(toPath, f.getPath().getName())); } } LOG.info(" rm {}", fileStatus.getPath()); fs.delete(fileStatus.getPath(), true); } lockUtil.releaseWriteLock(lockUtil.getLockPath(lockPathInfo)); fs.delete(new Path(p.toUri().getPath()), true); } } addChildren = false; } } // Add any children which are directories to the stack. if (addChildren) { for (int i = children.length - 1; i >= 0; i--) { FileStatus child = children[i]; if (child.isDirectory()) { paths.push(child.getPath()); } } } } // Since we may have deleted a bunch of directories, delete any unused // locks // from ZooKeeper. { LOG.info("Checking for unused locks in ZooKeeper"); String scanPath = rootDir + "/" + dcNumber + "/" + service + "/" + logdir; if (date != null) { scanPath += "/" + date; if (hour != null) { scanPath += "/" + hour; } } List<LockInfo> lockInfo = lockUtil.scan(scanPath); for (LockInfo li : lockInfo) { // Check if the lock path still exists in HDFS. If it doesn't, then // delete it from ZooKeeper. String path = li.getPath(); String hdfsPath = path.substring(LockUtil.ROOT.length()); if (!fs.exists(new Path(hdfsPath))) { ZooKeeper zk = lockUtil.getZkClient(); while (!path.equals(LockUtil.ROOT)) { try { zk.delete(path, -1); } catch (KeeperException.NotEmptyException e) { // That's fine. just stop trying then. break; } catch (Exception e) { LOG.error("Caught exception trying to delete from ZooKeeper.", e); break; } LOG.info("Deleted from ZooKeeper: {}", path); path = path.substring(0, path.lastIndexOf('/')); } } } } // Now that we're done, wait for the Oozie Runner to stop, and print the // results. LOG.info("Waiting for Oozie jobs to complete."); jobRunner.shutdown(); jobRunnerThread.join(); LOG.info("Job Stats : Started={} Succeeded={} failed={} errors={}", new Object[] { jobRunner.getStarted(), jobRunner.getSucceeded(), jobRunner.getFailed(), jobRunner.getErrors() }); lockUtil.close(); } catch (Exception e) { LOG.error("Unexpected exception caught.", e); return 1; } return 0; }
From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java
public boolean decomposeAND(NetlistTerm term) { logger.info("Decomposition of " + term.toString()); Set<Signal> signals = netlist.getDrivenSignalsTransitive(term); if (signals.isEmpty()) { logger.warn("No signal(s) for term " + term + " found"); return false; } else if (signals.size() > 1) { logger.warn("Term " + term + " drives more than one signal. This is not supported yet"); return false; }/*from w w w. j a v a 2s .c o m*/ Signal origsig = signals.iterator().next(); if (!isAOC(term, origsig)) { logger.warn("Algorithm not applicable for non-AOC architectures"); return false; } int startgatesize = BDDHelper.numberOfVars(term.getBdd()); BDD bdd = term.getBdd(); Set<Signal> origrelevant = findRelevantSigs(bdd); if (origrelevant == null) { return false; } StateGraph sg2 = sghelper.getNewStateGraph(origrelevant, origsig); if (sg2 == null) { logger.warn("Failed to generate new SG. Using the original one."); sg2 = origsg; } BiMap<Signal, Signal> sigmap = HashBiMap.create(); Set<Signal> relevant = new HashSet<>(); boolean found; for (Signal oldSig : origrelevant) { found = false; for (Signal newSig : sg2.getAllSignals()) { if (oldSig.getName().equals(newSig.getName())) { sigmap.put(oldSig, newSig); found = true; break; } } if (!found) { logger.error("Signal " + oldSig.getName() + " not found"); return false; } relevant.add(sigmap.get(oldSig)); } found = false; for (Signal newSig : sg2.getAllSignals()) { if (origsig.getName().equals(newSig.getName())) { sigmap.put(origsig, newSig); found = true; break; } } if (!found) { logger.error("Signal " + origsig.getName() + " not found"); return false; } Signal sig = sigmap.get(origsig); Map<Signal, Boolean> posnegmap = getInputsPosOrNeg(term, sigmap); BDD newbdd = factory.one(); for (Entry<Signal, Boolean> entry : posnegmap.entrySet()) { if (entry.getValue()) { newbdd = newbdd.andWith(getPosBDD(entry.getKey())); } else { newbdd = newbdd.andWith(getNegBDD(entry.getKey())); } if (entry.getKey() instanceof QuasiSignal) { relevant.add(entry.getKey()); } } Set<State> startStates = new HashSet<>(); for (State s : sg2.getStates()) { for (Entry<Transition, State> entry2 : s.getNextStates().entrySet()) { if (entry2.getKey().getSignal() == sig) { startStates.add(entry2.getValue()); } } } List<List<Signal>> fallingPartitions = new ArrayList<>(); for (Signal sig2 : relevant) { List<Signal> tmp = new ArrayList<>(); tmp.add(sig2); fallingPartitions.add(tmp); } SortedSet<IOBehaviour> sequencesFront = new TreeSet<>(new SequenceFrontCmp()); SortedSet<IOBehaviour> sequencesBack = new TreeSet<>(new SequenceBackCmp()); Set<IOBehaviour> newSequences = new HashSet<>(); Set<IOBehaviour> rmSequences = new HashSet<>(); Deque<IOBehaviourSimulationStep> steps = new ArrayDeque<>(); pool = new IOBehaviourSimulationStepPool(new IOBehaviourSimulationStepFactory()); pool.setMaxTotal(-1); try { root = pool.borrowObject(); } catch (Exception e) { e.printStackTrace(); logger.error("Could not borrow object"); return false; } IOBehaviourSimulationStep newStep; for (State s : startStates) { try { newStep = pool.borrowObject(); } catch (Exception e) { e.printStackTrace(); logger.error("Could not borrow object"); return false; } root.getNextSteps().add(newStep); newStep.setPrevStep(root); newStep.setStart(s); newStep.setNextState(s); steps.add(newStep); } if (steps.isEmpty()) { return false; } final long checkThreshold = 100; long stepsEvaledTotal = 0; IOBehaviourSimulationStep step = null; while (!steps.isEmpty()) { step = steps.removeLast(); // System.out.println("#Step: " + step.toString()); getNewSteps(step, sig, newSequences, steps, relevant); stepsEvaledTotal++; if (newSequences.size() >= checkThreshold) { removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences); } } removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences); logger.debug("Sequences: " + sequencesFront.size() + " - Tmp Sequences: " + newSequences.size() + " - Steps to evaluate: " + steps.size() + " - Steps evaluated: " + stepsEvaledTotal); logger.debug("Pool: " + "Created: " + pool.getCreatedCount() + ", Borrowed: " + pool.getBorrowedCount() + ", Returned: " + pool.getReturnedCount() + ", Active: " + pool.getNumActive() + ", Idle: " + pool.getNumIdle()); logger.debug("RmSub: " + rmSub + " // RmFall: " + rmFall); SortedSet<IOBehaviour> sequences = new TreeSet<>(sequencesFront); sequencesFront.clear(); sequencesBack.clear(); // System.out.println(sequences.toString()); List<IOBehaviour> falling = new ArrayList<>(); List<IOBehaviour> rising = new ArrayList<>(); List<IOBehaviour> constant = new ArrayList<>(); if (!categoriseSequences(newbdd, sequences, falling, rising, constant)) { return false; } // System.out.println("Falling:"); // for(IOBehaviour beh : falling) { // System.out.println(beh.toString()); // } // System.out.println("Rising:"); // for(IOBehaviour beh : rising) { // System.out.println(beh.toString()); // } // System.out.println("Constant:"); // for(IOBehaviour beh : constant) { // System.out.println(beh.toString()); // } fallingPartitions = getPossiblePartitionsFromFalling(falling, relevant); // System.out.println("FallingPartitions: " + fallingPartitions.toString()); Map<Integer, List<Partition>> partitions = getPartitions(relevant, startgatesize); if (partitions == null) { logger.error("There was a problem while creating partions for signal " + sig.getName()); return false; } // System.out.println("Init:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } filterPartitions(partitions, fallingPartitions); if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Falling:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } // System.out.println("posneg: " + posnegmap.toString()); setPartitionBDDs(partitions, posnegmap); if (!checkRising(rising, partitions)) { logger.error("Check rising failed"); return false; } if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Rising:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } if (!checkConstant(constant, partitions)) { logger.error("Check constant failed"); return false; } if (partitions.isEmpty()) { logger.error("No suitable partions found"); return false; } // System.out.println("After filter Constant:"); // for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) { // System.out.println(entry.getKey()); // for(Partition p : entry.getValue()) { // System.out.println("\t" + p.toString()); // } // } applyDecoResult(term, partitions, posnegmap, sigmap); return true; }
From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java
public String warnUserOnUnstableVersions() { String warnMessage = null;//ww w . ja va 2 s .c om if (this.isAbstract) { Deque<Version> stableVersions = getStableVersions(); Long latestVersion = getLatestVersion(); if (stableVersions.size() > 0) { if (!this.versionStore.getVersion(latestVersion).getStability()) { warnMessage = Messages.InheritanceProject_OlderVersionMarkedAsStable(); } } else { warnMessage = Messages.InheritanceProject_NoVersionMarkedAsStable(); } } return warnMessage; }
From source file:ninja.undertow.util.UndertowHelper.java
static public void createOrMerge(Map<String, String[]> parameters, String name, Deque<FormData.FormValue> formValues) { String[] current = parameters.get(name); int index = 0; int size = formValues.size(); // prepare for merge or allocate new if (current != null) { index = current.length;/*from w ww . jav a 2 s . c o m*/ size += current.length; String[] future = new String[size]; System.arraycopy(current, 0, future, 0, current.length); current = future; } else { current = new String[size]; } // copy values! for (FormData.FormValue formValue : formValues) { // make sure to skip all file uploads. if (formValue.isFile()) { continue; } // standard form parameter current[index] = formValue.getValue(); index++; } parameters.put(name, current); }
From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java
private Point[][] findEdgePoints(int[] edgeData) { List<Deque<Point>> components = new ArrayList<Deque<Point>>(); // find close paths for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { if (edgeData[x + y * width] == BLACK && isBridge(edgeData, x, y)) { edgeData[x + y * width] = WHITE; Deque<Point> firstPart = null, secondPart = null; for (int k = 0; k < DX.length; k++) { int x2 = x + DX[k]; int y2 = y + DY[k]; if (x2 < 0 || x2 >= width || y2 < 0 || y2 >= height) { continue; }/*from w w w . j av a 2s.c o m*/ if (edgeData[x2 + y2 * width] == BLACK) { Deque<Point> points = findConnectedComponent(edgeData, x2, y2); if (firstPart == null) { firstPart = points; } else { secondPart = points; } } } firstPart.addFirst(new Point(x, y)); if (secondPart != null) { // the path is not closed join(firstPart, true, secondPart, true); } components.add(firstPart); } } } // remove contained components for (int i = 0; i < components.size() - 1; i++) { Rectangle r1 = getBounds(components.get(i)); for (int j = i + 1; j < components.size();) { Rectangle r2 = getBounds(components.get(j)); if (r1.contains(r2)) { components.remove(j); } else if (r2.contains(r1)) { components.set(i, components.get(j)); components.remove(j); } else { j++; } } } // try to connect some paths int connectedCount; do { connectedCount = 0; for (int i = 0; i < components.size() - 1; i++) { for (int j = i + 1; j < components.size(); j++) { Deque<Point> a = components.get(i); Deque<Point> b = components.get(j); double d0 = d(a.getFirst(), a.getLast()) + d(b.getFirst(), b.getLast()); double d1 = d(a.getFirst(), b.getFirst()) + d(a.getLast(), b.getLast()); double d2 = d(a.getFirst(), b.getLast()) + d(a.getLast(), b.getFirst()); double d3 = d(a.getFirst(), b.getFirst()); double d4 = d(a.getFirst(), b.getLast()); double d5 = d(a.getLast(), b.getFirst()); double d6 = d(a.getLast(), b.getLast()); if (d3 <= CLOSE_THRESHOLD && d3 <= d4) { join(a, true, b, true); components.remove(j); connectedCount++; } else if (d4 <= CLOSE_THRESHOLD && d4 <= d3) { join(a, true, b, false); components.remove(j); connectedCount++; } else if (d5 <= CLOSE_THRESHOLD && d5 <= d6) { join(a, false, b, true); components.remove(j); connectedCount++; } else if (d6 <= CLOSE_THRESHOLD && d6 <= d5) { join(a, false, b, false); components.remove(j); connectedCount++; } else if (d1 <= d0 && d1 <= d2) { if (d3 < d6) { join(a, true, b, true); } else { join(a, false, b, false); } components.remove(j); connectedCount++; } else if (d2 <= d0 && d2 <= d1) { if (d4 < d5) { join(a, true, b, false); } else { join(a, false, b, true); } components.remove(j); connectedCount++; } } // end of for j } // end of for i } while (connectedCount > 0); // choose (componentCount) biggest components SortedMap<Integer, Deque<Point>> componentMap = new TreeMap<Integer, Deque<Point>>(); for (Deque<Point> c : components) { componentMap.put(-c.size(), c); } // remove noise boolean firstPoint = true; for (Iterator<Entry<Integer, Deque<Point>>> iterator = componentMap.entrySet().iterator(); iterator .hasNext();) { Entry<Integer, Deque<Point>> entry = iterator.next(); Rectangle r = getBounds(entry.getValue()); if (r.width <= 10 && r.height <= 10) { if (firstPoint) { firstPoint = false; } else { iterator.remove(); } } } // convert components: normalize points, to array int foundComponentCount = Math.min(componentCount, componentMap.size()); componentArr = new Point[foundComponentCount][]; Rectangle r = getBounds(componentMap.get(componentMap.firstKey())); for (int c = 0; c < foundComponentCount; c++) { int key = componentMap.firstKey(); componentArr[c] = new Point[componentMap.get(key).size()]; normalize(componentMap.get(key)).toArray(componentArr[c]); componentMap.remove(key); for (int i = 0; i < componentArr[c].length; i++) { componentArr[c][i].x = (componentArr[c][i].x - r.x) / r.width; componentArr[c][i].y = (componentArr[c][i].y - r.y) / r.height; } } return componentArr; }