Example usage for java.util.concurrent ThreadPoolExecutor ThreadPoolExecutor

List of usage examples for java.util.concurrent ThreadPoolExecutor ThreadPoolExecutor

Introduction

In this page you can find the example usage for java.util.concurrent ThreadPoolExecutor ThreadPoolExecutor.

Prototype

public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit,
        BlockingQueue<Runnable> workQueue) 

Source Link

Document

Creates a new ThreadPoolExecutor with the given initial parameters, the default thread factory and the default rejected execution handler.

Usage

From source file:com.amazonaws.services.simpleworkflow.flow.worker.GenericWorker.java

@Override
public void start() {
    if (log.isInfoEnabled()) {
        log.info("start: " + toString());
    }/*w w  w  .j  a  va  2s . c o m*/
    checkStarted();
    checkRequiredProperty(service, "service");
    checkRequiredProperty(domain, "domain");
    checkRequiredProperty(taskListToPoll, "taskListToPoll");
    checkRequredProperties();

    if (registerDomain) {
        registerDomain();
    }

    if (!disableTypeRegitrationOnStart) {
        registerTypesToPoll();
    }

    if (maximumPollRatePerSecond > 0.0) {
        pollRateThrottler = new Throttler("pollRateThrottler " + taskListToPoll, maximumPollRatePerSecond,
                maximumPollRateIntervalMilliseconds);
    }

    pollExecutor = new ThreadPoolExecutor(pollThreadCount, pollThreadCount, 1, TimeUnit.MINUTES,
            new LinkedBlockingQueue<Runnable>(pollThreadCount));
    ExecutorThreadFactory pollExecutorThreadFactory = getExecutorThreadFactory();
    pollExecutor.setThreadFactory(pollExecutorThreadFactory);

    pollBackoffThrottler = new BackoffThrottler(pollBackoffInitialInterval, pollBackoffMaximumInterval,
            pollBackoffCoefficient);
    poller = createPoller();
    for (int i = 0; i < pollThreadCount; i++) {
        pollExecutor.execute(new PollServiceTask(poller));
    }
}

From source file:com.xerox.amazonws.sdb.Domain.java

public ThreadPoolExecutor getThreadPoolExecutor() {
    if (executor != null) {
        return executor;
    } else {//from  w  w  w  .  j  a va2 s  .  c  o  m
        return new ThreadPoolExecutor(maxThreads, maxThreads, 5, TimeUnit.SECONDS,
                new ArrayBlockingQueue(maxThreads));
    }
}

From source file:metlos.executors.batch.BatchExecutorTest.java

private long rapidFireSimpleExecutorTime(final int taskDurationMillis, int nofJobs, int nofThreads)
        throws Exception {

    ThreadPoolExecutor ex = new ThreadPoolExecutor(nofThreads, nofThreads, 0, TimeUnit.NANOSECONDS,
            new LinkedBlockingQueue<Runnable>());
    List<Callable<Void>> payload = getCallables(taskDurationMillis, nofJobs);

    return measureExecutionTime(System.currentTimeMillis(), ex.invokeAll(payload));
}

From source file:eu.edisonproject.training.wsd.Wikidata.java

private Map<CharSequence, List<CharSequence>> getCategories(Set<Term> terms)
        throws MalformedURLException, InterruptedException, ExecutionException {
    Map<CharSequence, List<CharSequence>> cats = new HashMap<>();

    if (terms.size() > 0) {
        int maxT = 2;
        BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue(maxT);
        ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            ExecutorService pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<CharSequence, List<CharSequence>>>> set1 = new HashSet<>();
        String prop = "P910";
        for (Term t : terms) {
            URL url = new URL(
                    PAGE + "?action=wbgetclaims&format=json&props=&property=" + prop + "&entity=" + t.getUid());
            Logger.getLogger(Wikidata.class.getName()).log(Level.FINE, url.toString());
            WikiRequestor req = new WikiRequestor(url, t.getUid().toString(), 1);
            Future<Map<CharSequence, List<CharSequence>>> future = pool.submit(req);
            set1.add(future);/*from   ww w.j  a  v a2s.  c o  m*/
        }
        pool.shutdown();

        Map<CharSequence, List<CharSequence>> map = new HashMap<>();
        for (Future<Map<CharSequence, List<CharSequence>>> future : set1) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<CharSequence, List<CharSequence>> c = future.get();
            if (c != null) {
                map.putAll(c);
            }
        }
        workQueue = new ArrayBlockingQueue(maxT);
        pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<CharSequence, List<CharSequence>>>> set2 = new HashSet<>();
        for (Term t : terms) {
            List<CharSequence> catIDs = map.get(t.getUid());
            for (CharSequence catID : catIDs) {
                URL url = new URL(
                        PAGE + "?action=wbgetentities&format=json&props=labels&languages=en&ids=" + catID);
                Logger.getLogger(Wikidata.class.getName()).log(Level.FINE, url.toString());
                WikiRequestor req = new WikiRequestor(url, t.getUid().toString(), 2);
                Future<Map<CharSequence, List<CharSequence>>> future = pool.submit(req);
                set2.add(future);
            }
        }
        pool.shutdown();

        for (Future<Map<CharSequence, List<CharSequence>>> future : set2) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<CharSequence, List<CharSequence>> c = future.get();
            if (c != null) {
                cats.putAll(c);
            }
        }
    }

    return cats;
}

From source file:org.apache.hadoop.mapreduce.jobhistory.JobHistory.java

private void startFileMoverThreads() {
    executor = new ThreadPoolExecutor(1, 3, 1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>());
}

From source file:org.scenarioo.api.ScenarioDocuWriter.java

/**
 * Creates an executor that queues the passed tasks for execution by one single additional thread. The excutor will
 * start to block further executions as soon as more than the configured write tasks are waiting for execution.
 */// w w w . j  a v a 2s . co  m
private static ExecutorService newAsyncWriteExecutor() {
    return new ThreadPoolExecutor(1, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(
            ScenarioDocuGeneratorConfiguration.INSTANCE.getAsyncWriteBufferSize()));
}

From source file:org.bimserver.geometry.StreamingGeometryGenerator.java

@SuppressWarnings("unchecked")
public GenerateGeometryResult generateGeometry(long uoid, final DatabaseSession databaseSession,
        QueryContext queryContext, long nrObjects)
        throws BimserverDatabaseException, GeometryGeneratingException {
    GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult();
    packageMetaData = queryContext.getPackageMetaData();
    productClass = packageMetaData.getEClass("IfcProduct");
    geometryFeature = productClass.getEStructuralFeature("geometry");
    representationFeature = productClass.getEStructuralFeature("Representation");
    representationsFeature = packageMetaData.getEClass("IfcProductDefinitionShape")
            .getEStructuralFeature("Representations");
    itemsFeature = packageMetaData.getEClass("IfcShapeRepresentation").getEStructuralFeature("Items");
    mappingSourceFeature = packageMetaData.getEClass("IfcMappedItem").getEStructuralFeature("MappingSource");

    GregorianCalendar now = new GregorianCalendar();
    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
    debugIdentifier = dateFormat.format(now.getTime()) + " (" + report.getOriginalIfcFileName() + ")";

    long start = System.nanoTime();
    String pluginName = "";
    if (queryContext.getPackageMetaData().getSchema() == Schema.IFC4) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepStreamingSerializerPlugin";
    } else if (queryContext.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepStreamingSerializerPlugin";
    } else {/*from  w  w w .j a va  2s .  c o m*/
        throw new GeometryGeneratingException(
                "Unknown schema " + queryContext.getPackageMetaData().getSchema());
    }

    reuseGeometry = bimServer.getServerSettingsCache().getServerSettings().isReuseGeometry();
    optimizeMappedItems = bimServer.getServerSettingsCache().getServerSettings().isOptimizeMappedItems();

    report.setStart(new GregorianCalendar());
    report.setIfcSchema(queryContext.getPackageMetaData().getSchema());
    report.setUseMappingOptimization(optimizeMappedItems);
    report.setReuseGeometry(reuseGeometry);

    try {
        final StreamingSerializerPlugin ifcSerializerPlugin = (StreamingSerializerPlugin) bimServer
                .getPluginManager().getPlugin(pluginName, true);
        if (ifcSerializerPlugin == null) {
            throw new UserException("No IFC serializer found");
        }

        User user = (User) databaseSession.get(uoid, org.bimserver.database.OldQuery.getDefault());
        UserSettings userSettings = user.getUserSettings();

        report.setUserName(user.getName());
        report.setUserUserName(user.getUsername());

        RenderEnginePluginConfiguration renderEngine = null;
        if (eoid != -1) {
            renderEngine = databaseSession.get(eoid, OldQuery.getDefault());
        } else {
            renderEngine = userSettings.getDefaultRenderEngine();
        }
        if (renderEngine == null) {
            throw new UserException("No default render engine has been selected for this user");
        }
        renderEngineName = renderEngine.getName();

        int availableProcessors = Runtime.getRuntime().availableProcessors();
        report.setAvailableProcessors(availableProcessors);

        int maxSimultanousThreads = Math.min(
                bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(),
                availableProcessors);
        if (maxSimultanousThreads < 1) {
            maxSimultanousThreads = 1;
        }

        final RenderEngineSettings settings = new RenderEngineSettings();
        settings.setPrecision(Precision.SINGLE);
        settings.setIndexFormat(IndexFormat.AUTO_DETECT);
        settings.setGenerateNormals(true);
        settings.setGenerateTriangles(true);
        settings.setGenerateWireFrame(false);

        final RenderEngineFilter renderEngineFilter = new RenderEngineFilter();

        RenderEnginePool renderEnginePool = bimServer.getRenderEnginePools().getRenderEnginePool(
                packageMetaData.getSchema(), renderEngine.getPluginDescriptor().getPluginClassName(),
                bimServer.getPluginSettingsCache().getPluginSettings(renderEngine.getOid()));

        report.setRenderEngineName(renderEngine.getName());
        report.setRenderEnginePluginVersion(
                renderEngine.getPluginDescriptor().getPluginBundleVersion().getVersion());

        try (RenderEngine engine = renderEnginePool.borrowObject()) {
            VersionInfo versionInfo = renderEnginePool.getRenderEngineFactory().getVersionInfo();
            report.setRenderEngineVersion(versionInfo);
            applyLayerSets = engine.isApplyLayerSets();
            report.setApplyLayersets(applyLayerSets);
            calculateQuantities = engine.isCalculateQuantities();
            report.setCalculateQuantities(calculateQuantities);
        }

        // TODO reuse, pool the pools :) Or something smarter
        // TODO reuse queue, or try to determine a realistic size, or don't use a fixed-size queue
        ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads, 24,
                TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000000));

        JsonQueryObjectModelConverter jsonQueryObjectModelConverter = new JsonQueryObjectModelConverter(
                packageMetaData);
        String queryNameSpace = packageMetaData.getSchema().name().toLowerCase() + "-stdlib";

        // Al references should already be direct, since this is now done in BimServer on startup, quite the hack...
        Include objectPlacement = jsonQueryObjectModelConverter
                .getDefineFromFile(queryNameSpace + ":ObjectPlacement", true);

        Set<EClass> classes = null;
        if (queryContext.getOidCounters() != null) {
            classes = queryContext.getOidCounters().keySet();
        } else {
            classes = packageMetaData.getEClasses();
        }

        float multiplierToMm = processUnits(databaseSession, queryContext);
        generateGeometryResult.setMultiplierToMm(multiplierToMm);

        // Phase 1 (mapped item detection) sometimes detects that mapped items have invalid (unsupported) RepresentationIdentifier values, this set keeps track of objects to skip in Phase 2 because of that
        Set<Long> toSkip = new HashSet<>();

        for (EClass eClass : classes) {
            if (packageMetaData.getEClass("IfcProduct").isSuperTypeOf(eClass)) {
                int nrObjectsForType = 0;

                Query query2 = new Query(eClass.getName() + "Main query", packageMetaData);
                QueryPart queryPart2 = query2.createQueryPart();
                queryPart2.addType(eClass, false);
                Include representationInclude = queryPart2.createInclude();
                representationInclude.addType(eClass, false);
                representationInclude.addFieldDirect("Representation");
                Include representationsInclude = representationInclude.createInclude();
                representationsInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                representationsInclude.addFieldDirect("Representations");
                Include itemsInclude = representationsInclude.createInclude();
                itemsInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), false);
                itemsInclude.addFieldDirect("Items");
                itemsInclude.addFieldDirect("ContextOfItems");
                Include mappingSourceInclude = itemsInclude.createInclude();
                mappingSourceInclude.addType(packageMetaData.getEClass("IfcMappedItem"), false);
                mappingSourceInclude.addFieldDirect("MappingSource");
                mappingSourceInclude.addFieldDirect("MappingTarget");
                Include representationMap = mappingSourceInclude.createInclude();
                representationMap.addType(packageMetaData.getEClass("IfcRepresentationMap"), false);
                representationMap.addFieldDirect("MappedRepresentation");
                Include createInclude = representationMap.createInclude();
                createInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), true);

                Include targetInclude = mappingSourceInclude.createInclude();
                targetInclude.addType(packageMetaData.getEClass("IfcCartesianTransformationOperator3D"), false);
                targetInclude.addFieldDirect("Axis1");
                targetInclude.addFieldDirect("Axis2");
                targetInclude.addFieldDirect("Axis3");
                targetInclude.addFieldDirect("LocalOrigin");

                queryPart2.addInclude(objectPlacement);

                Map<Long, Map<Long, ProductDef>> representationMapToProduct = new HashMap<>();

                QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer,
                        query2, Collections.singleton(queryContext.getRoid()), packageMetaData);
                HashMapVirtualObject next = queryObjectProvider2.next();
                int nrProductsWithRepresentation = 0;
                while (next != null) {
                    if (next.eClass() == eClass) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> representations = representation
                                    .getDirectListFeature(representationsFeature);
                            if (representations != null) {
                                boolean foundValidContext = false;
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (usableContext(representationItem)) {
                                        foundValidContext = true;
                                    }
                                }
                                if (foundValidContext) {
                                    nrProductsWithRepresentation++;
                                }
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (!usableContext(representationItem) && foundValidContext) {
                                        continue;
                                    }
                                    if (hasValidRepresentationIdentifier(representationItem)) {
                                        Set<HashMapVirtualObject> items = representationItem
                                                .getDirectListFeature(itemsFeature);
                                        if (items == null || items.size() > 1) {
                                            // Only if there is just one item, we'll store this for reuse
                                            continue;
                                        }
                                        // So this next loop always results in 1 (or no) loops
                                        for (HashMapVirtualObject item : items) {
                                            report.addRepresentationItem(item.eClass().getName());
                                            if (!packageMetaData.getEClass("IfcMappedItem")
                                                    .isSuperTypeOf(item.eClass())) {
                                                nrObjectsForType++;
                                                continue; // All non IfcMappedItem objects will be done in phase 2
                                            }
                                            AbstractHashMapVirtualObject mappingTarget = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingTarget"));
                                            AbstractHashMapVirtualObject mappingSourceOfMappedItem = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingSource"));
                                            if (mappingSourceOfMappedItem == null) {
                                                LOGGER.info("No mapping source");
                                                continue;
                                            }
                                            AbstractHashMapVirtualObject mappedRepresentation = mappingSourceOfMappedItem
                                                    .getDirectFeature(packageMetaData.getEReference(
                                                            "IfcRepresentationMap", "MappedRepresentation"));

                                            if (!hasValidRepresentationIdentifier(mappedRepresentation)) {
                                                // Skip this mapping, we should store somewhere that this object should also be skipped in the normal way
                                                // TODO too many log statements, should log only 1 line for the complete model
                                                //                                       LOGGER.info("Skipping because of invalid RepresentationIdentifier in mapped item (" + (String) mappedRepresentation.get("RepresentationIdentifier") + ")");
                                                report.addSkippedBecauseOfInvalidRepresentationIdentifier(
                                                        (String) mappedRepresentation
                                                                .get("RepresentationIdentifier"));
                                                toSkip.add(next.getOid());
                                                continue;
                                            }
                                            double[] mappingMatrix = Matrix.identity();
                                            double[] productMatrix = Matrix.identity();
                                            if (mappingTarget != null) {
                                                AbstractHashMapVirtualObject axis1 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis1"));
                                                AbstractHashMapVirtualObject axis2 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis2"));
                                                AbstractHashMapVirtualObject axis3 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis3"));
                                                AbstractHashMapVirtualObject localOrigin = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator",
                                                                "LocalOrigin"));

                                                double[] a1 = null;
                                                double[] a2 = null;
                                                double[] a3 = null;

                                                if (axis3 != null) {
                                                    List<Double> list = (List<Double>) axis3
                                                            .get("DirectionRatios");
                                                    a3 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                } else {
                                                    a3 = new double[] { 0, 0, 1, 1 };
                                                    Vector.normalize(a3);
                                                }

                                                if (axis1 != null) {
                                                    List<Double> list = (List<Double>) axis1
                                                            .get("DirectionRatios");
                                                    a1 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a1);
                                                } else {
                                                    //                                          if (a3[0] == 1 && a3[1] == 0 && a3[2] == 0) {
                                                    a1 = new double[] { 1, 0, 0, 1 };
                                                    //                                          } else {
                                                    //                                             a1 = new double[]{0, 1, 0, 1};
                                                    //                                          }
                                                }

                                                double[] xVec = Vector.scalarProduct(Vector.dot(a1, a3), a3);
                                                double[] xAxis = Vector.subtract(a1, xVec);
                                                Vector.normalize(xAxis);

                                                if (axis2 != null) {
                                                    List<Double> list = (List<Double>) axis2
                                                            .get("DirectionRatios");
                                                    a2 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a2);
                                                } else {
                                                    a2 = new double[] { 0, 1, 0, 1 };
                                                }

                                                double[] tmp = Vector.scalarProduct(Vector.dot(a2, a3), a3);
                                                double[] yAxis = Vector.subtract(a2, tmp);
                                                tmp = Vector.scalarProduct(Vector.dot(a2, xAxis), xAxis);
                                                yAxis = Vector.subtract(yAxis, tmp);
                                                Vector.normalize(yAxis);

                                                a2 = yAxis;
                                                a1 = xAxis;

                                                List<Double> t = (List<Double>) localOrigin.get("Coordinates");
                                                mappingMatrix = new double[] { a1[0], a1[1], a1[2], 0, a2[0],
                                                        a2[1], a2[2], 0, a3[0], a3[1], a3[2], 0,
                                                        t.get(0).doubleValue(), t.get(1).doubleValue(),
                                                        t.get(2).doubleValue(), 1 };
                                            }

                                            AbstractHashMapVirtualObject placement = next
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcProduct", "ObjectPlacement"));
                                            if (placement != null) {
                                                productMatrix = placementToMatrix(placement);
                                            }

                                            AbstractHashMapVirtualObject mappingSource = item
                                                    .getDirectFeature(mappingSourceFeature);
                                            if (mappingSource != null) {
                                                Map<Long, ProductDef> map = representationMapToProduct
                                                        .get(((HashMapVirtualObject) mappingSource).getOid());
                                                if (map == null) {
                                                    map = new LinkedHashMap<>();
                                                    representationMapToProduct.put(
                                                            ((HashMapVirtualObject) mappingSource).getOid(),
                                                            map);
                                                }
                                                ProductDef pd = new ProductDef(next.getOid());
                                                pd.setMappedItemOid(item.getOid());
                                                pd.setObject(next);

                                                pd.setProductMatrix(productMatrix);
                                                pd.setMappingMatrix(mappingMatrix);
                                                map.put(next.getOid(), pd);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }

                Set<Long> done = new HashSet<>();

                for (Long repMapId : representationMapToProduct.keySet()) {
                    Map<Long, ProductDef> map = representationMapToProduct.get(repMapId);

                    // When there is more than one instance using this mapping
                    if (map.size() > 1) {
                        Query query = new Query("Reuse query " + eClass.getName(), packageMetaData);
                        QueryPart queryPart = query.createQueryPart();
                        //                     QueryPart queryPart3 = query.createQueryPart();
                        queryPart.addType(eClass, false);
                        //                     queryPart3.addType(packageMetaData.getEClass("IfcMappedItem"), false);

                        long masterOid = map.values().iterator().next().getOid();

                        double[] inverted = Matrix.identity();
                        ProductDef masterProductDef = map.get(masterOid);
                        if (!Matrix.invertM(inverted, 0, masterProductDef.getMappingMatrix(), 0)) {
                            LOGGER.debug("No inverse, this mapping will be skipped and processed as normal");
                            // This is probably because of mirroring of something funky

                            // TODO we should however be able to squeeze out a little more reuse by finding another master...
                            continue;
                        }

                        for (ProductDef pd : map.values()) {
                            done.add(pd.getOid());
                            if (!optimizeMappedItems) {
                                queryPart.addOid(pd.getOid());

                                // In theory these should be fused together during querying
                                //                           queryPart3.addOid(pd.getMappedItemOid());
                            } else {
                                pd.setMasterOid(masterOid);
                            }
                        }
                        if (optimizeMappedItems) {
                            queryPart.addOid(masterOid);
                        }

                        LOGGER.debug("Running " + map.size()
                                + " objects in one batch because of reused geometry " + (eClass.getName()));

                        //                     queryPart3.addInclude(jsonQueryObjectModelConverter.getDefineFromFile("ifc2x3tc1-stdlib:IfcMappedItem"));

                        processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                                settings, renderEngineFilter, renderEnginePool, executor, eClass, query,
                                queryPart, true, map, map.size());
                    }
                }

                Query query3 = new Query("Remaining " + eClass.getName(), packageMetaData);
                QueryPart queryPart3 = query3.createQueryPart();
                queryPart3.addType(eClass, false);
                Include include3 = queryPart3.createInclude();
                include3.addType(eClass, false);
                include3.addFieldDirect("Representation");
                Include rInclude = include3.createInclude();
                rInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                rInclude.addFieldDirect("Representations");
                Include representationsInclude2 = rInclude.createInclude();
                representationsInclude2.addType(packageMetaData.getEClass("IfcShapeModel"), true);
                representationsInclude2.addFieldDirect("ContextOfItems");

                queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query3,
                        Collections.singleton(queryContext.getRoid()), packageMetaData);
                next = queryObjectProvider2.next();

                Query query = new Query("Main " + eClass.getName(), packageMetaData);
                QueryPart queryPart = query.createQueryPart();
                int written = 0;

                int maxObjectsPerFile = 0;
                if (nrProductsWithRepresentation <= 100) {
                    maxObjectsPerFile = 1;
                } else if (nrProductsWithRepresentation < 10000) {
                    maxObjectsPerFile = (int) (nrProductsWithRepresentation / 100);
                } else {
                    maxObjectsPerFile = 100;
                }

                //               LOGGER.info(report.getOriginalIfcFileName());
                //               LOGGER.info("Max objects per file: " + maxObjectsPerFile + " (" + eClass.getName() + ": " + nrProductsWithRepresentation + ")");

                report.setMaxPerFile(maxObjectsPerFile);

                while (next != null) {
                    if (next.eClass() == eClass && !done.contains(next.getOid())
                            && !toSkip.contains(next.getOid())) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> list = representation.getDirectListFeature(packageMetaData
                                    .getEReference("IfcProductRepresentation", "Representations"));
                            boolean goForIt = goForIt(list);
                            if (goForIt) {
                                if (next.eClass() == eClass && !done.contains(next.getOid())) {
                                    representation = next.getDirectFeature(representationFeature);
                                    if (representation != null) {
                                        list = representation.getDirectListFeature(packageMetaData
                                                .getEReference("IfcProductRepresentation", "Representations"));
                                        boolean goForIt2 = goForIt(list);
                                        if (goForIt2) {
                                            queryPart.addOid(next.getOid());
                                            written++;
                                            if (written >= maxObjectsPerFile) {
                                                processQuery(databaseSession, queryContext,
                                                        generateGeometryResult, ifcSerializerPlugin, settings,
                                                        renderEngineFilter, renderEnginePool, executor, eClass,
                                                        query, queryPart, false, null, written);
                                                query = new Query("Main " + eClass.getName(), packageMetaData);
                                                queryPart = query.createQueryPart();
                                                written = 0;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }
                if (written > 0) {
                    processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                            settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart,
                            false, null, written);
                }
            }
        }

        allJobsPushed = true;

        executor.shutdown();
        executor.awaitTermination(24, TimeUnit.HOURS);

        // Need total bounds
        //         float[] quantizationMatrix = createQuantizationMatrixFromBounds(boundsMm);
        //         ByteBuffer verticesQuantized = quantizeVertices(vertices, quantizationMatrix, generateGeometryResult.getMultiplierToMm());
        //         geometryData.setAttribute(GeometryPackage.eINSTANCE.getGeometryData_VerticesQuantized(), verticesQuantized.array());

        LOGGER.debug("Generating quantized vertices");
        double[] quantizationMatrix = createQuantizationMatrixFromBounds(
                generateGeometryResult.getBoundsUntransformed(), multiplierToMm);
        for (Long id : geometryDataMap.keySet()) {
            Tuple<HashMapVirtualObject, ByteBuffer> tuple = geometryDataMap.get(id);

            HashMapVirtualObject buffer = new HashMapVirtualObject(queryContext,
                    GeometryPackage.eINSTANCE.getBuffer());
            //            Buffer buffer = databaseSession.create(Buffer.class);
            buffer.set("data",
                    quantizeVertices(tuple.getB().asDoubleBuffer(), quantizationMatrix, multiplierToMm)
                            .array());
            //            buffer.setData(quantizeVertices(tuple.getB(), quantizationMatrix, multiplierToMm).array());
            //            databaseSession.store(buffer);
            buffer.save();
            HashMapVirtualObject geometryData = tuple.getA();
            geometryData.set("verticesQuantized", buffer.getOid());
            int reused = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_Reused());
            int nrTriangles = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_NrIndices())
                    / 3;
            int saveableTriangles = Math.max(0, (reused - 1)) * nrTriangles;
            geometryData.set("saveableTriangles", saveableTriangles);
            //            if (saveableTriangles > 0) {
            //               System.out.println("Saveable triangles: " + saveableTriangles);
            //            }
            geometryData.saveOverwrite();
        }

        long end = System.nanoTime();
        long total = totalBytes.get()
                - (bytesSavedByHash.get() + bytesSavedByTransformation.get() + bytesSavedByMapping.get());
        LOGGER.info("Rendertime: " + Formatters.nanosToString(end - start) + ", " + "Reused (by hash): "
                + Formatters.bytesToString(bytesSavedByHash.get()) + ", Reused (by transformation): "
                + Formatters.bytesToString(bytesSavedByTransformation.get()) + ", Reused (by mapping): "
                + Formatters.bytesToString(bytesSavedByMapping.get()) + ", Total: "
                + Formatters.bytesToString(totalBytes.get()) + ", Final: " + Formatters.bytesToString(total));
        if (report.getNumberOfDebugFiles() > 0) {
            LOGGER.error("Number of erroneous files: " + report.getNumberOfDebugFiles());
        }
        Map<String, Integer> skipped = report.getSkippedBecauseOfInvalidRepresentationIdentifier();
        if (skipped.size() > 0) {
            LOGGER.error("Number of representations skipped:");
            for (String identifier : skipped.keySet()) {
                LOGGER.error("\t" + identifier + ": " + skipped.get(identifier));
            }
        }
        String dump = geometryGenerationDebugger.dump();
        if (dump != null) {
            LOGGER.info(dump);
        }
    } catch (Exception e) {
        running = false;
        LOGGER.error("", e);
        report.setEnd(new GregorianCalendar());
        throw new GeometryGeneratingException(e);
    }
    report.setEnd(new GregorianCalendar());
    try {
        if (report.getNumberOfDebugFiles() > 0) {
            writeDebugFile();
        }
    } catch (IOException e) {
        LOGGER.debug("", e);
    }
    return generateGeometryResult;
}

From source file:org.apache.hadoop.hdfs.server.namenode.SnapshotNode.java

/**
 * Tries to get the most up to date lengths of files under construction.
 *///from w  ww  .ja  v a 2 s  . co m
void updateLeasedFiles(SnapshotStorage ssStore) throws IOException {
    FSNamesystem fsNamesys = ssStore.getFSNamesystem();
    List<Block> blocksForNN = new ArrayList<Block>();

    leaseUpdateThreadPool = new ThreadPoolExecutor(1, maxLeaseUpdateThreads, 60, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());
    ((ThreadPoolExecutor) leaseUpdateThreadPool).allowCoreThreadTimeOut(true);

    // Try to update lengths for leases from DN
    LightWeightLinkedSet<Lease> sortedLeases = fsNamesys.leaseManager.getSortedLeases();
    Iterator<Lease> itr = sortedLeases.iterator();
    while (itr.hasNext()) {
        Lease lease = itr.next();
        for (String path : lease.getPaths()) {
            // Update file lengths using worker threads to increase throughput
            leaseUpdateThreadPool.execute(new LeaseUpdateWorker(conf, path, fsNamesys, blocksForNN));
        }
    }

    try {
        leaseUpdateThreadPool.shutdown();
        // Wait till update tasks finish successfully (max 20 mins?)
        if (!leaseUpdateThreadPool.awaitTermination(1200, TimeUnit.SECONDS)) {
            throw new IOException("Updating lease files failed");
        }
    } catch (InterruptedException e) {
        throw new IOException("Snapshot creation interrupted while updating leased files");
    }

    // Fetch block lengths for renamed/deleted leases from NN
    long[] blockIds = new long[blocksForNN.size()];

    for (int i = 0; i < blocksForNN.size(); ++i) {
        blockIds[i] = blocksForNN.get(i).getBlockId();
    }

    long[] lengths = namenode.getBlockLengths(blockIds);

    for (int i = 0; i < blocksForNN.size(); ++i) {
        if (lengths[i] == -1) {
            // Couldn't update block length, keep preferred length
            LOG.error("Couldn't update length for block " + blocksForNN.get(i));
        } else {
            blocksForNN.get(i).setNumBytes(lengths[i]);
        }
    }
}

From source file:org.opennms.newts.gsod.ImportRunner.java

private Observable<Boolean> parMap(Observable<List<Sample>> samples, MetricRegistry metrics,
        Func1<List<Sample>, Boolean> insert) {

    final Timer waitTime = metrics.timer("wait-time");

    @SuppressWarnings("serial")
    final BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<Runnable>(
            m_maxThreadQueueSize == 0 ? m_threadCount * 3 : m_maxThreadQueueSize) {

        @Override/*from  w  w  w.  j a  v  a 2 s  . c o  m*/
        public boolean offer(Runnable r) {
            try (Context time = waitTime.time()) {
                this.put(r);
                return true;
            } catch (InterruptedException e) {
                throw Exceptions.propagate(e);
            }
        }

        @Override
        public boolean add(Runnable r) {
            try (Context time = waitTime.time()) {
                this.put(r);
                return true;
            } catch (InterruptedException e) {
                throw Exceptions.propagate(e);
            }
        }

    };
    final ThreadPoolExecutor executor = new ThreadPoolExecutor(m_threadCount, m_threadCount, 0L,
            TimeUnit.MILLISECONDS, workQueue);

    metrics.register("active-threads", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getActiveCount();
        }

    });

    metrics.register("pool-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getPoolSize();
        }

    });
    metrics.register("largest-pool-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getLargestPoolSize();
        }

    });

    metrics.register("work-queue-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return workQueue.size();
        }

    });

    return parMap(samples, executor, metrics, insert);
}

From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java

@Test
public void testBatchConcurrency() throws Exception {

    BatchManager bm = Framework.getService(BatchManager.class);

    // Initialize batches with one file concurrently
    int nbBatches = 100;
    String[] batchIds = new String[nbBatches];
    ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>(nbBatches + 1));

    for (int i = 0; i < nbBatches; i++) {
        final int batchIndex = i;
        tpe.submit(new Runnable() {
            @Override/*w ww . j ava2s .  c o m*/
            public void run() {
                try {
                    String batchId = bm.initBatch();
                    bm.addStream(batchId, "0",
                            new ByteArrayInputStream(
                                    ("SomeContent_" + batchId).getBytes(StandardCharsets.UTF_8)),
                            "MyBatchFile.txt", "text/plain");
                    batchIds[batchIndex] = batchId;
                } catch (IOException e) {
                    fail(e.getMessage());
                }
            }
        });
    }

    tpe.shutdown();
    boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS);
    assertTrue("timeout", finish);

    // Check batches
    for (String batchId : batchIds) {
        assertNotNull(batchId);
    }
    // Test indexes 0, 9, 99, ..., nbFiles - 1
    int nbDigits = (int) (Math.log10(nbBatches) + 1);
    int divisor = nbBatches;
    for (int i = 0; i < nbDigits; i++) {
        int batchIndex = nbBatches / divisor - 1;
        String batchId = batchIds[batchIndex];
        Blob blob = bm.getBlob(batchId, "0");
        assertNotNull(blob);
        assertEquals("MyBatchFile.txt", blob.getFilename());
        assertEquals("SomeContent_" + batchId, blob.getString());
        divisor = divisor / 10;
    }

    // Check storage size
    TransientStore ts = bm.getTransientStore();
    assertTrue(((AbstractTransientStore) ts).getStorageSize() > 12 * nbBatches);

    // Clean batches
    for (String batchId : batchIds) {
        bm.clean(batchId);
    }
    assertEquals(ts.getStorageSizeMB(), 0);
}