List of usage examples for java.util.concurrent ArrayBlockingQueue ArrayBlockingQueue
public ArrayBlockingQueue(int capacity)
From source file:org.github.gitswarm.GitSwarm.java
/** * Initialization/* ww w .j a v a2 s .co m*/ */ @Override public void setup() { int maxBackgroundThreads = 4; backgroundExecutor = new ThreadPoolExecutor(1, maxBackgroundThreads, Long.MAX_VALUE, TimeUnit.NANOSECONDS, new ArrayBlockingQueue<>(4 * maxBackgroundThreads), new ThreadPoolExecutor.CallerRunsPolicy()); showDebug = false; background = ColorUtil.toAwtColor(Config.getInstance().getBackground().getValue()).getRGB(); fontColor = ColorUtil.toAwtColor(Config.getInstance().getFontColor().getValue()).getRGB(); double framesperday = Config.getInstance().getFramesPerDay(); UPDATE_DELTA = (long) (86400000 / framesperday); smooth(); frameRate(24); // init data structures nodes = new HashMap<>(); edges = new HashMap<>(); people = new HashMap<>(); history = new LinkedList<>(); loadRepEvents(); if (commits.isEmpty()) { return; } prevDate = commits.get(0).getDate(); maxFramesSaved = (int) Math.pow(10, Config.getInstance().getScreenshotFileMask().getValue().replaceAll("[^#]", "").length()); // Create fonts String fontName = Config.getInstance().getFont(); String boldFontName = Config.getInstance().getBoldFont(); Integer fontSize = Config.getInstance().getFontSize().getValue(); Integer fontSizeBold = Config.getInstance().getBoldFontSize().getValue(); font = createFont(fontName, fontSize); boldFont = createFont(boldFontName, fontSizeBold); textFont(font); // Create the file particle image sprite = loadImage("src/main/resources/particle.png"); avatarMask = loadImage("src/main/resources/mask.png"); avatarMask.resize(40, 40); // Add translucency (using itself in this case) sprite.mask(sprite); }
From source file:org.apache.hadoop.yarn.server.resourcemanager.security.X509SecurityHandler.java
public X509SecurityHandler(RMContext rmContext, RMAppSecurityManager rmAppSecurityManager) { this.rmContext = rmContext; this.rmAppSecurityManager = rmAppSecurityManager; rng = new SecureRandom(); this.eventHandler = rmContext.getDispatcher().getEventHandler(); revocationEvents = new ArrayBlockingQueue<CertificateRevocationEvent>(REVOCATION_QUEUE_SIZE); renewalTasks = new ConcurrentHashMap<>(); }
From source file:org.bimserver.tests.TestSimultaniousDownloadWithCaching.java
private void start() { BimServerConfig config = new BimServerConfig(); Path homeDir = Paths.get("home"); try {/* w w w. j a va 2s .c o m*/ if (Files.isDirectory(homeDir)) { PathUtils.removeDirectoryWithContent(homeDir); } } catch (IOException e) { e.printStackTrace(); } config.setClassPath(System.getProperty("java.class.path")); config.setHomeDir(homeDir); config.setPort(8080); config.setStartEmbeddedWebServer(true); config.setResourceFetcher(new LocalDevelopmentResourceFetcher(Paths.get("../"))); final BimServer bimServer = new BimServer(config); try { LocalDevPluginLoader.loadPlugins(bimServer.getPluginManager(), null); bimServer.start(); if (bimServer.getServerInfo().getServerState() == ServerState.NOT_SETUP) { bimServer.getService(AdminInterface.class).setup("http://localhost", "Administrator", "admin@bimserver.org", "admin", null, null, null); } } catch (PluginException e2) { e2.printStackTrace(); } catch (ServerException e) { e.printStackTrace(); } catch (DatabaseInitException e) { e.printStackTrace(); } catch (BimserverDatabaseException e) { e.printStackTrace(); } catch (DatabaseRestartRequiredException e) { e.printStackTrace(); } catch (UserException e) { e.printStackTrace(); } try { final ServiceMap serviceMap = bimServer.getServiceFactory().get(AccessMethod.INTERNAL); ServiceInterface serviceInterface = serviceMap.get(ServiceInterface.class); SettingsInterface settingsInterface = serviceMap.get(SettingsInterface.class); final AuthInterface authInterface = serviceMap.get(AuthInterface.class); serviceInterface = bimServer.getServiceFactory() .get(authInterface.login("admin@bimserver.org", "admin"), AccessMethod.INTERNAL) .get(ServiceInterface.class); settingsInterface.setCacheOutputFiles(true); settingsInterface.setGenerateGeometryOnCheckin(false); final SProject project = serviceMap.getServiceInterface().addProject("test", "ifc2x3tc1"); SDeserializerPluginConfiguration deserializerByName = serviceMap.getServiceInterface() .getDeserializerByName("IfcStepDeserializer"); Path file = Paths.get("../TestData/data/AC11-Institute-Var-2-IFC.ifc"); serviceInterface.checkin(project.getOid(), "test", deserializerByName.getOid(), file.toFile().length(), file.getFileName().toString(), new DataHandler(new FileDataSource(file.toFile())), false, true); final SProject projectUpdate = serviceMap.getServiceInterface().getProjectByPoid(project.getOid()); ThreadPoolExecutor executor = new ThreadPoolExecutor(20, 20, 1, TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(1000)); for (int i = 0; i < 20; i++) { executor.execute(new Runnable() { @Override public void run() { try { ServiceMap serviceMap2 = bimServer.getServiceFactory().get( authInterface.login("admin@bimserver.org", "admin"), AccessMethod.INTERNAL); SSerializerPluginConfiguration serializerPluginConfiguration = serviceMap .getServiceInterface().getSerializerByName("Ifc2x3"); Long download = serviceMap2.getServiceInterface().download( Collections.singleton(projectUpdate.getLastRevisionId()), DefaultQueries.allAsString(), serializerPluginConfiguration.getOid(), true); SDownloadResult downloadData = serviceMap2.getServiceInterface() .getDownloadData(download); if (downloadData.getFile() .getDataSource() instanceof CacheStoringEmfSerializerDataSource) { CacheStoringEmfSerializerDataSource c = (CacheStoringEmfSerializerDataSource) downloadData .getFile().getDataSource(); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); c.writeToOutputStream(baos, null); System.out.println(baos.size()); } catch (SerializerException e) { e.printStackTrace(); } } else { ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(downloadData.getFile().getInputStream(), baos); System.out.println(baos.size()); } serviceMap2.getServiceInterface().cleanupLongAction(download); } catch (ServerException e) { e.printStackTrace(); } catch (UserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (PublicInterfaceNotFoundException e1) { e1.printStackTrace(); } } }); } executor.shutdown(); executor.awaitTermination(1, TimeUnit.HOURS); bimServer.stop(); } catch (ServerException e1) { e1.printStackTrace(); } catch (UserException e1) { e1.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } catch (PublicInterfaceNotFoundException e2) { e2.printStackTrace(); } }
From source file:net.yacy.document.importer.MediawikiImporter.java
@Override public void run() { this.start = System.currentTimeMillis(); final int threads = Math.max(2, Runtime.getRuntime().availableProcessors() - 1); // out keeps a outputfile open until poisened, to make sure underlaying thread gets the end condition // regardless of any exception (e.g. eof memory) a add(poison) is added to the most outer final block final BlockingQueue<wikiparserrecord> out = new ArrayBlockingQueue<wikiparserrecord>(threads * 10); final wikiparserrecord poison = newRecord(); try {/* w w w. j a v a2 s. c o m*/ String targetstub = this.sourcefile.getName(); int p = targetstub.lastIndexOf("\\."); if (p > 0) targetstub = targetstub.substring(0, p); InputStream is = new BufferedInputStream(new FileInputStream(this.sourcefile), 1024 * 1024); if (this.sourcefile.getName().endsWith(".bz2")) { is = new BZip2CompressorInputStream(is); } else if (this.sourcefile.getName().endsWith(".gz")) { is = new GZIPInputStream(is); } final BufferedReader r = new BufferedReader(new java.io.InputStreamReader(is, StandardCharsets.UTF_8), 4 * 1024 * 1024); String t; StringBuilder sb = new StringBuilder(); boolean page = false, text = false; String title = null; final BlockingQueue<wikiparserrecord> in = new ArrayBlockingQueue<wikiparserrecord>(threads * 10); final ExecutorService service = Executors.newCachedThreadPool( new NamePrefixThreadFactory(MediawikiImporter.class.getSimpleName() + ".convertConsumer")); final convertConsumer[] consumers = new convertConsumer[threads]; final Future<?>[] consumerResults = (Future<?>[]) Array.newInstance(Future.class, threads); for (int i = 0; i < threads; i++) { consumers[i] = new convertConsumer(in, out, poison); consumerResults[i] = service.submit(consumers[i]); } final convertWriter writer = new convertWriter(out, poison, this.targetdir, targetstub); final Future<Integer> writerResult = service.submit(writer); wikiparserrecord record; int q; while ((t = r.readLine()) != null) { if ((p = t.indexOf("<base>", 0)) >= 0 && (q = t.indexOf("</base>", p)) > 0) { //urlStub = "http://" + lang + ".wikipedia.org/wiki/"; this.urlStub = t.substring(p + 6, q); if (!this.urlStub.endsWith("/")) { q = this.urlStub.lastIndexOf('/'); if (q > 0) this.urlStub = this.urlStub.substring(0, q + 1); } final DigestURL uri = new DigestURL(this.urlStub); this.hostport = uri.getHost(); if (uri.getPort() != 80) this.hostport += ":" + uri.getPort(); continue; } if (t.indexOf(pagestart) >= 0) { page = true; continue; } if ((p = t.indexOf(textstart)) >= 0) { text = page; q = t.indexOf('>', p + textstart.length()); if (q > 0) { final int u = t.indexOf(textend, q + 1); if (u > q) { sb.append(t.substring(q + 1, u)); ConcurrentLog.info("WIKITRANSLATION", "[INJECT] Title: " + title); if (sb.length() == 0) { ConcurrentLog.info("WIKITRANSLATION", "ERROR: " + title + " has empty content"); continue; } record = newRecord(this.hostport, this.urlStub, title, sb); try { in.put(record); this.count++; } catch (final InterruptedException e1) { ConcurrentLog.logException(e1); } sb = new StringBuilder(200); continue; } sb.append(t.substring(q + 1)); } continue; } if (t.indexOf(textend) >= 0) { text = false; ConcurrentLog.info("WIKITRANSLATION", "[INJECT] Title: " + title); if (sb.length() == 0) { ConcurrentLog.info("WIKITRANSLATION", "ERROR: " + title + " has empty content"); continue; } record = newRecord(this.hostport, this.urlStub, title, sb); try { in.put(record); this.count++; } catch (final InterruptedException e1) { ConcurrentLog.logException(e1); } sb = new StringBuilder(200); continue; } if (t.indexOf(pageend) >= 0) { page = false; continue; } if ((p = t.indexOf("<title>", 0)) >= 0) { title = t.substring(p + 7); q = title.indexOf("</title>", 0); if (q >= 0) title = title.substring(0, q); continue; } if (text) { sb.append(t); sb.append('\n'); } } r.close(); try { for (int i = 0; i < threads; i++) { in.put(poison); } for (int i = 0; i < threads; i++) { consumerResults[i].get(10000, TimeUnit.MILLISECONDS); } } catch (final InterruptedException e) { ConcurrentLog.logException(e); } catch (final ExecutionException e) { ConcurrentLog.logException(e); } catch (final TimeoutException e) { ConcurrentLog.logException(e); } catch (final Exception e) { ConcurrentLog.logException(e); } finally { out.put(poison); // output thread condition (for file.close) writerResult.get(10000, TimeUnit.MILLISECONDS); } } catch (final IOException e) { ConcurrentLog.logException(e); } catch (final Exception e) { ConcurrentLog.logException(e); } finally { try { out.put(poison); // out keeps output file open until poisened, to close file if exception happend in this block } catch (InterruptedException ex) { } } }
From source file:disko.flow.analyzers.FullRelexAnalyzer.java
/** * Initialize the pool of LinkParserClients, creating CLIENT_POOL_SIZE * instances, which connects to ports FIRST_PORT, FIRST_PORT+1, ..., * FIRST_PORT+(CLIENT_POOL_SIZE-1)/* w w w. j a v a 2s . co m*/ */ private void initializePool() throws InterruptedException { sentenceAlgorithmApplier = new SentenceAlgorithmApplier(); // phraseMarkup = new PhraseMarkup(); if (morphy == null) morphy = MorphyFactory.getImplementation(); if ((hosts == null) || (hosts.size() == 0)) { for (int i = 0; i < DEFAULT_CLIENT_COUNT; i++) { addHost(DEFAULT_HOST, DEFAULT_FIRST_PORT + i); } } final ClassLoader loader = Thread.currentThread().getContextClassLoader(); // +1 submission thread exec = Executors.newFixedThreadPool(hosts.size() + 1, new ThreadFactory() { public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setContextClassLoader(loader); t.setDaemon(true); return t; } }); pool = new ArrayBlockingQueue<RelexContext>(hosts.size() + inProcessParsers); for (HostPort hp : hosts) { RemoteLGParser parser = new RemoteLGParser(); parser.getLinkGrammarClient().setHostname(hp.host); parser.getLinkGrammarClient().setPort(hp.port); configureParser(parser); RelexContext context = new RelexContext(parser, morphy); pool.put(context); } for (int i = hosts.size(); i < pool.size(); i++) { LocalLGParser parser = new LocalLGParser(); configureParser(parser); RelexContext context = new RelexContext(parser, morphy); pool.put(context); } }
From source file:org.lilyproject.indexer.batchbuild.IndexingMapper.java
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); try {/*from www. j a v a 2s .c o m*/ Configuration jobConf = context.getConfiguration(); Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", jobConf.get("hbase.zookeeper.quorum")); conf.set("hbase.zookeeper.property.clientPort", jobConf.get("hbase.zookeeper.property.clientPort")); idGenerator = new IdGeneratorImpl(); String zkConnectString = jobConf.get("org.lilyproject.indexer.batchbuild.zooKeeperConnectString"); int zkSessionTimeout = getIntProp("org.lilyproject.indexer.batchbuild.zooKeeperSessionTimeout", null, jobConf); zk = ZkUtil.connect(zkConnectString, zkSessionTimeout); hbaseTableFactory = new HBaseTableFactoryImpl(conf, null, null); TypeManager typeManager = new HBaseTypeManager(idGenerator, conf, zk, hbaseTableFactory); BlobStoreAccessFactory blobStoreAccessFactory = LilyClient.getBlobStoreAccess(zk); RowLog wal = new DummyRowLog("The write ahead log should not be called from within MapReduce jobs."); repository = new HBaseRepository(typeManager, idGenerator, blobStoreAccessFactory, wal, conf, hbaseTableFactory); byte[] indexerConfBytes = Base64.decode(jobConf.get("org.lilyproject.indexer.batchbuild.indexerconf")); IndexerConf indexerConf = IndexerConfBuilder.build(new ByteArrayInputStream(indexerConfBytes), repository); Map<String, String> solrShards = new HashMap<String, String>(); for (int i = 1; true; i++) { String shardName = jobConf.get("org.lilyproject.indexer.batchbuild.solrshard.name." + i); String shardAddress = jobConf.get("org.lilyproject.indexer.batchbuild.solrshard.address." + i); if (shardName == null) break; solrShards.put(shardName, shardAddress); } ShardSelector shardSelector; String shardingConf = jobConf.get("org.lilyproject.indexer.batchbuild.shardingconf"); if (shardingConf != null) { byte[] shardingConfBytes = Base64.decode(shardingConf); shardSelector = JsonShardSelectorBuilder.build(shardingConfBytes); } else { shardSelector = DefaultShardSelectorBuilder.createDefaultSelector(solrShards); } connectionManager = new MultiThreadedHttpConnectionManager(); connectionManager.getParams().setDefaultMaxConnectionsPerHost(5); connectionManager.getParams().setMaxTotalConnections(50); HttpClient httpClient = new HttpClient(connectionManager); SolrServers solrServers = new SolrServers(solrShards, shardSelector, httpClient); indexLocker = new IndexLocker(zk); indexer = new Indexer(indexerConf, repository, solrServers, indexLocker, new IndexerMetrics("dummy")); int workers = getIntProp("org.lilyproject.indexer.batchbuild.threads", 5, jobConf); executor = new ThreadPoolExecutor(workers, workers, 10, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(1000)); executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy()); } catch (Exception e) { throw new IOException("Error in index build map task setup.", e); } }
From source file:com.quantiply.druid.HTTPTranquilityLoader.java
/** * Druid Tranquility HTTP Loader//from w w w . j a va 2 s .c o m * * Methods in this class run in the client's thread * * Error handling: * - connection/protocol errors are handled here and considered fatal - they are detected on blocking operations * - addAction (when cmd queue is full) * - flush * - API errors are checked here and are also considered fatal * - No internal retry support - restart the process to retry */ public HTTPTranquilityLoader(String dataSource, WriterConfig config, Optional<Consumer<BulkReport>> onFlushOpt) { this.dataSource = dataSource; this.writerCmdQueue = new ArrayBlockingQueue<>(config.flushMaxRecords); final String name = config.name; this.writerExecSvc = Executors.newFixedThreadPool(1, r -> new Thread(r, name + " Tranquility Writer")); this.writer = new Writer(config, writerCmdQueue, onFlushOpt); }
From source file:org.eclipse.smila.connectivity.framework.compound.zip.ZipCompoundCrawler.java
/** {@inheritDoc} */ @Override/*from ww w . j a va2 s . c om*/ public void initialize(final DataSourceConnectionConfig config) throws CrawlerException, CrawlerCriticalException { if (config == null) { throw new CrawlerCriticalException("parameter config is null"); } final Record record = getCompoundRecord(); if (record == null) { throw new CrawlerCriticalException("the compound record was not set"); } try { // get configured compound attributes and attachments final CompoundHandling.CompoundAttributes attributes = config.getCompoundHandling() .getCompoundAttributes(); final List<CompoundHandling.CompoundAttribute> attrs = attributes.getCompoundAttributes(); _compoundAttributes = attrs.toArray(new CompoundHandling.CompoundAttribute[attrs.size()]); final List<String> attachmentsNames = new ArrayList<String>(); for (final CompoundHandling.CompoundAttribute a : _compoundAttributes) { if (a.isAttachment()) { attachmentsNames.add(a.getName()); } } _attachmentNames = attachmentsNames.toArray(new String[attachmentsNames.size()]); // get the content of the compound object final String contentAttachmentName = config.getCompoundHandling().getContentAttachment(); byte[] content = record.getAttachment(contentAttachmentName); if (content == null) { content = new byte[0]; } final File workingDir = WorkspaceHelper.createWorkingDir(BUNDLE_ID); final File file = new File(workingDir, createTempFileNape(new ConnectivityId(record.getSource(), record.getId()))); IOUtils.copy(new ByteArrayInputStream(content), new FileOutputStream(file)); _zipFile = new ZipFile(file); _queue = new ArrayBlockingQueue<DataReference>(CAPACITY); _entryMap = new HashMap<ConnectivityId, ZipEntry>(); _producerThread = new ZipEntryProducerThread(this); _producerThread.start(); _initialized = true; } catch (Throwable e) { final String msg = "Error during initialization"; if (_log.isErrorEnabled()) { _log.error(msg, e); } try { close(); } catch (Exception ex) { if (_log.isErrorEnabled()) { _log.error("Error during close in initialization", ex); } } throw new CrawlerCriticalException(msg, e); } }
From source file:codeswarm.code_swarm.java
/** * Initialisation// w ww . j ava 2 s . co m */ public void setup() { width = cfg.getIntProperty(CodeSwarmConfig.WIDTH_KEY, 640); if (width <= 0) { width = 640; } height = cfg.getIntProperty(CodeSwarmConfig.HEIGHT_KEY, 480); if (height <= 0) { height = 480; } maxBackgroundThreads = cfg.getIntProperty(CodeSwarmConfig.MAX_THREADS_KEY, 4); if (maxBackgroundThreads <= 0) { maxBackgroundThreads = 4; } backgroundExecutor = new ThreadPoolExecutor(1, maxBackgroundThreads, Long.MAX_VALUE, TimeUnit.NANOSECONDS, new ArrayBlockingQueue<Runnable>(4 * maxBackgroundThreads), new ThreadPoolExecutor.CallerRunsPolicy()); if (cfg.getBooleanProperty(CodeSwarmConfig.USE_OPEN_GL, false)) { size(width, height, OPENGL); } else { size(width, height); } showLegend = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_LEGEND, false); showHistogram = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_HISTORY, false); showDate = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DATE, false); showEdges = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_EDGES, false); showDebug = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DEBUG, false); takeSnapshots = cfg.getBooleanProperty(CodeSwarmConfig.TAKE_SNAPSHOTS_KEY, false); drawNamesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_SHARP, true); drawNamesHalos = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_HALOS, false); drawFilesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_SHARP, false); drawFilesFuzzy = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_FUZZY, true); drawFilesJelly = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_JELLY, false); background = cfg.getBackground().getRGB(); UPDATE_DELTA = cfg.getIntProperty(CodeSwarmConfig.MSEC_PER_FRAME_KEY, -1); if (UPDATE_DELTA == -1) { int framesperday = cfg.getIntProperty(CodeSwarmConfig.FRAMES_PER_DAY_KEY, 4); if (framesperday > 0) { UPDATE_DELTA = (86400000 / framesperday); } } if (UPDATE_DELTA <= 0) { // Default to 4 frames per day. UPDATE_DELTA = 21600000; } isInputSorted = cfg.getBooleanProperty(CodeSwarmConfig.IS_INPUT_SORTED_KEY, false); /** * This section loads config files and calls the setup method for all physics engines. */ physicsEngineConfigDir = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_CONF_DIR, "physics_engine"); File f = new File(physicsEngineConfigDir); String[] configFiles = null; if (f.exists() && f.isDirectory()) { configFiles = f.list(); } for (int i = 0; configFiles != null && i < configFiles.length; i++) { if (configFiles[i].endsWith(".config")) { Properties p = new Properties(); String ConfigPath = physicsEngineConfigDir + System.getProperty("file.separator") + configFiles[i]; try { p.load(new FileInputStream(ConfigPath)); } catch (FileNotFoundException e) { e.printStackTrace(); System.exit(1); } catch (IOException e) { e.printStackTrace(); System.exit(1); } String ClassName = p.getProperty("name", "__DEFAULT__"); if (!ClassName.equals("__DEFAULT__")) { PhysicsEngine pe = getPhysicsEngine(ClassName); pe.setup(p); mPhysicsEngineChoices.add(pe); } else { logger.error("Skipping config file '" + ConfigPath + "'. Must specify class name via the 'name' parameter."); System.exit(1); } } } if (mPhysicsEngineChoices.size() == 0) { logger.error("No physics engine config files found in '" + physicsEngineConfigDir + "'."); System.exit(1); } // Physics engine configuration and instantiation physicsEngineSelection = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_SELECTION, PHYSICS_ENGINE_LEGACY); for (PhysicsEngine p : mPhysicsEngineChoices) { if (physicsEngineSelection.equals(p.getClass().getName())) { mPhysicsEngine = p; } } if (mPhysicsEngine == null) { logger.error("No physics engine matches your choice of '" + physicsEngineSelection + "'. Check '" + physicsEngineConfigDir + "' for options."); System.exit(1); } smooth(); frameRate(FRAME_RATE); // init data structures nodes = new CopyOnWriteArrayList<FileNode>(); edges = new CopyOnWriteArrayList<Edge>(); people = new CopyOnWriteArrayList<PersonNode>(); history = new LinkedList<ColorBins>(); if (isInputSorted) { //If the input is sorted, we only need to store the next few events eventsQueue = new ArrayBlockingQueue<FileEvent>(5000); } else { //Otherwise we need to store them all at once in a data structure that will sort them eventsQueue = new PriorityBlockingQueue<FileEvent>(); } // Init color map initColors(); loadRepEvents(cfg.getStringProperty(CodeSwarmConfig.INPUT_FILE_KEY)); // event formatted (this is the standard) synchronized (this) { while (!finishedLoading && eventsQueue.isEmpty()) { try { wait(); } catch (InterruptedException e) { logger.error("The ready-check thread was interrupted", e); } } } prevDate = eventsQueue.peek().getDate(); SCREENSHOT_FILE = cfg.getStringProperty(CodeSwarmConfig.SNAPSHOT_LOCATION_KEY); maxFramesSaved = (int) Math.pow(10, SCREENSHOT_FILE.replaceAll("[^#]", "").length()); // Create fonts String fontName = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY, "SansSerif"); String fontNameBold = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY_BOLD, "SansSerif"); Integer fontSize = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE, 10); Integer fontSizeBold = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE_BOLD, 14); font = createFont(fontName, fontSize); boldFont = createFont(fontNameBold, fontSizeBold); textFont(font); // Create the file particle image sprite = loadImage(cfg.getStringProperty(CodeSwarmConfig.SPRITE_FILE_KEY, "particle.png")); // Add translucency (using itself in this case) sprite.mask(sprite); }
From source file:org.eclipse.smila.connectivity.framework.crawler.filesystem.FileSystemCrawler.java
/** * {@inheritDoc}/*from w w w. j a va 2 s . c om*/ * * @see org.eclipse.smila.connectivity.framework.Crawler# * initialize(org.eclipse.smila.connectivity.framework.schema.config.DataSourceConnectionConfig) */ @Override public void initialize(final DataSourceConnectionConfig config) throws CrawlerException, CrawlerCriticalException { _log.info("Initializing FileSystemCrawler..."); synchronized (_openedMonitor) { if (_opened) { throw new CrawlerCriticalException( "Crawler is busy (it should not happen because new instances are created by ComponentFactories)"); } checkFolders(config); _opened = true; } _forceClosing = false; _isProducerRunning = true; _queue = new ArrayBlockingQueue<DataReference>(CAPACITY); _idToPath = new HashMap<ConnectivityId, File>(); final Attributes attributes = config.getAttributes(); final List<IAttribute> attrs = attributes.getAttribute(); _performanceCounters = new CrawlerPerformanceCounterHelper<FileSystemCrawlerPerformanceAgent>(config, hashCode(), FileSystemCrawlerPerformanceAgent.class); _attributes = attrs.toArray(new Attribute[attrs.size()]); final List<String> attachmentsNames = new ArrayList<String>(); for (final Attribute a : _attributes) { if (a.isAttachment()) { attachmentsNames.add(a.getName()); } } _attachmentNames = attachmentsNames.toArray(new String[attachmentsNames.size()]); _crawlThread = new CrawlingProducerThread(this, config); _crawlThread.start(); }