Example usage for java.util.concurrent ThreadPoolExecutor awaitTermination

List of usage examples for java.util.concurrent ThreadPoolExecutor awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ThreadPoolExecutor awaitTermination.

Prototype

public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException 

Source Link

Usage

From source file:org.apache.hama.graph.GraphJobRunner.java

/**
 * Do the main logic of a superstep, namely checking if vertices are active,
 * feeding compute with messages and controlling combiners/aggregators. We
 * iterate over our messages and vertices in sorted order. That means that we
 * need to seek the first vertex that has the same ID as the iterated message.
 *///from   w ww  . ja  va2 s. com
@SuppressWarnings("unchecked")
private void doSuperstep(GraphJobMessage currentMessage,
        BSPPeer<Writable, Writable, Writable, Writable, GraphJobMessage> peer) throws IOException {
    this.errorCount.set(0);
    long startTime = System.currentTimeMillis();

    this.changedVertexCnt = 0;
    vertices.startSuperstep();

    ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool();
    executor.setMaximumPoolSize(conf.getInt(DEFAULT_THREAD_POOL_SIZE, 64));
    executor.setRejectedExecutionHandler(retryHandler);

    long loopStartTime = System.currentTimeMillis();
    while (currentMessage != null) {
        executor.execute(new ComputeRunnable(currentMessage));

        currentMessage = peer.getCurrentMessage();
    }
    LOG.info("Total time spent for superstep-" + peer.getSuperstepCount() + " looping: "
            + (System.currentTimeMillis() - loopStartTime) + " ms");

    executor.shutdown();
    try {
        executor.awaitTermination(60, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }

    if (errorCount.get() > 0) {
        throw new IOException("there were " + errorCount + " exceptions during compute vertices.");
    }

    Iterator it = vertices.iterator();
    while (it.hasNext()) {
        Vertex<V, E, M> vertex = (Vertex<V, E, M>) it.next();
        if (!vertex.isHalted() && !vertex.isComputed()) {
            vertex.compute(Collections.<M>emptyList());
            vertices.finishVertexComputation(vertex);
        }
    }

    getAggregationRunner().sendAggregatorValues(peer, vertices.getActiveVerticesNum(), this.changedVertexCnt);
    this.iteration++;

    LOG.info("Total time spent for superstep-" + peer.getSuperstepCount() + " computing vertices: "
            + (System.currentTimeMillis() - startTime) + " ms");

    startTime = System.currentTimeMillis();
    finishSuperstep();
    LOG.info("Total time spent for superstep-" + peer.getSuperstepCount() + " synchronizing: "
            + (System.currentTimeMillis() - startTime) + " ms");
}

From source file:org.apache.hama.graph.GraphJobRunner.java

/**
 * Seed the vertices first with their own values in compute. This is the first
 * superstep after the vertices have been loaded.
 *//*from ww  w  .jav a2  s  .  c  o  m*/
private void doInitialSuperstep(BSPPeer<Writable, Writable, Writable, Writable, GraphJobMessage> peer)
        throws IOException {
    this.changedVertexCnt = 0;
    this.errorCount.set(0);
    vertices.startSuperstep();

    ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool();
    executor.setMaximumPoolSize(conf.getInt(DEFAULT_THREAD_POOL_SIZE, 64));
    executor.setRejectedExecutionHandler(retryHandler);

    for (V v : vertices.keySet()) {
        executor.execute(new ComputeRunnable(v));
    }

    executor.shutdown();
    try {
        executor.awaitTermination(60, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }

    if (errorCount.get() > 0) {
        throw new IOException("there were " + errorCount + " exceptions during compute vertices.");
    }

    getAggregationRunner().sendAggregatorValues(peer, 1, this.changedVertexCnt);
    iteration++;
    finishSuperstep();
}

From source file:org.apache.hama.graph.GraphJobRunner.java

/**
 * Loads vertices into memory of each peer.
 *//*from ww w  .j  a v a2  s.co m*/
@SuppressWarnings("unchecked")
private void loadVertices(BSPPeer<Writable, Writable, Writable, Writable, GraphJobMessage> peer)
        throws IOException, SyncException, InterruptedException {

    for (int i = 0; i < peer.getNumPeers(); i++) {
        partitionMessages.put(i, new GraphJobMessage());
    }

    VertexInputReader<Writable, Writable, V, E, M> reader = (VertexInputReader<Writable, Writable, V, E, M>) ReflectionUtils
            .newInstance(conf.getClass(Constants.RUNTIME_PARTITION_RECORDCONVERTER, VertexInputReader.class));

    ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool();
    executor.setMaximumPoolSize(conf.getInt(DEFAULT_THREAD_POOL_SIZE, 64));
    executor.setRejectedExecutionHandler(retryHandler);

    KeyValuePair<Writable, Writable> next = null;

    while ((next = peer.readNext()) != null) {
        Vertex<V, E, M> vertex = GraphJobRunner.<V, E, M>newVertexInstance(VERTEX_CLASS);

        boolean vertexFinished = false;
        try {
            vertexFinished = reader.parseVertex(next.getKey(), next.getValue(), vertex);
        } catch (Exception e) {
            throw new IOException("Parse exception occured: " + e);
        }

        if (!vertexFinished) {
            continue;
        }

        Runnable worker = new Parser(vertex);
        executor.execute(worker);

    }

    executor.shutdown();
    executor.awaitTermination(60, TimeUnit.SECONDS);

    Iterator<Entry<Integer, GraphJobMessage>> it;
    it = partitionMessages.entrySet().iterator();
    while (it.hasNext()) {
        Entry<Integer, GraphJobMessage> e = it.next();
        it.remove();
        GraphJobMessage msg = e.getValue();
        msg.setFlag(GraphJobMessage.PARTITION_FLAG);
        peer.send(getHostName(e.getKey()), msg);
    }

    peer.sync();

    executor = (ThreadPoolExecutor) Executors.newCachedThreadPool();
    executor.setMaximumPoolSize(conf.getInt(DEFAULT_THREAD_POOL_SIZE, 64));
    executor.setRejectedExecutionHandler(retryHandler);

    GraphJobMessage msg;
    while ((msg = peer.getCurrentMessage()) != null) {
        executor.execute(new AddVertex(msg));
    }

    executor.shutdown();
    executor.awaitTermination(60, TimeUnit.SECONDS);

    LOG.info(vertices.size() + " vertices are loaded into " + peer.getPeerName());
}

From source file:org.apache.jmeter.protocol.http.sampler.HTTPSamplerBaseClassifier.java

/**
 * Download the resources of an HTML page.
 * /*from   w w w  .j  a  v a 2 s  . c  o m*/
 * @param res
 *            result of the initial request - must contain an HTML response
 * @param container
 *            for storing the results, if any
 * @param frameDepth
 *            Depth of this target in the frame structure. Used only to
 *            prevent infinite recursion.
 * @return res if no resources exist, otherwise the "Container" result with
 *         one subsample per request issued
 */
protected HTTPSampleResult downloadPageResources(HTTPSampleResult res, HTTPSampleResult container,
        int frameDepth) {
    Iterator<URL> urls = null;
    try {
        final byte[] responseData = res.getResponseData();
        if (responseData.length > 0) { // Bug 39205
            String parserName = getParserClass(res);
            if (parserName != null) {
                final HTMLParser parser = parserName.length() > 0 ? // we
                // have
                // a
                // name
                        HTMLParser.getParser(parserName) : HTMLParser.getParser(); // we don't; use the
                // default parser
                urls = parser.getEmbeddedResourceURLs(responseData, res.getURL(),
                        res.getDataEncodingWithDefault());
            }
        }
    } catch (HTMLParseException e) {
        // Don't break the world just because this failed:
        res.addSubResult(errorResult(e, new HTTPSampleResult(res)));
        setParentSampleSuccess(res, false);
    }

    // Iterate through the URLs and download each image:
    if (urls != null && urls.hasNext()) {
        if (container == null) {
            // TODO needed here because currently done on sample completion
            // in JMeterThread,
            // but that only catches top-level samples.
            res.setThreadName(Thread.currentThread().getName());
            container = new HTTPSampleResult(res);
            container.addRawSubResult(res);
        }
        res = container;

        // Get the URL matcher
        String re = getEmbeddedUrlRE();
        Perl5Matcher localMatcher = null;
        Pattern pattern = null;
        if (re.length() > 0) {
            try {
                pattern = JMeterUtils.getPattern(re);
                localMatcher = JMeterUtils.getMatcher();// don't fetch
                // unless pattern
                // compiles
            } catch (MalformedCachePatternException e) {
                log.warn("Ignoring embedded URL match string: " + e.getMessage());
            }
        }

        // For concurrent get resources
        final List<Callable<AsynSamplerResultHolder>> liste = new ArrayList<Callable<AsynSamplerResultHolder>>();

        while (urls.hasNext()) {
            Object binURL = urls.next(); // See catch clause below
            try {
                URL url = (URL) binURL;
                if (url == null) {
                    log.warn("Null URL detected (should not happen)");
                } else {
                    String urlstr = url.toString();
                    String urlStrEnc = encodeSpaces(urlstr);
                    if (!urlstr.equals(urlStrEnc)) {// There were some
                        // spaces in the URL
                        try {
                            url = new URL(urlStrEnc);
                        } catch (MalformedURLException e) {
                            res.addSubResult(errorResult(new Exception(urlStrEnc + " is not a correct URI"),
                                    new HTTPSampleResult(res)));
                            setParentSampleSuccess(res, false);
                            continue;
                        }
                    }
                    // I don't think localMatcher can be null here, but
                    // check just in case
                    if (pattern != null && localMatcher != null && !localMatcher.matches(urlStrEnc, pattern)) {
                        continue; // we have a pattern and the URL does not
                                  // match, so skip it
                    }

                    if (isConcurrentDwn()) {
                        // if concurrent download emb. resources, add to a
                        // list for async gets later
                        liste.add(new ASyncSample(url, HTTPConstants.GET, false, frameDepth + 1,
                                getCookieManager(), this));
                    } else {
                        // default: serial download embedded resources
                        HTTPSampleResult binRes = sample(url, HTTPConstants.GET, false, frameDepth + 1);
                        res.addSubResult(binRes);
                        setParentSampleSuccess(res, res.isSuccessful() && binRes.isSuccessful());
                    }

                }
            } catch (ClassCastException e) { // TODO can this happen?
                res.addSubResult(errorResult(new Exception(binURL + " is not a correct URI"),
                        new HTTPSampleResult(res)));
                setParentSampleSuccess(res, false);
                continue;
            }
        }
        // IF for download concurrent embedded resources
        if (isConcurrentDwn()) {
            int poolSize = CONCURRENT_POOL_SIZE; // init with default value
            try {
                poolSize = Integer.parseInt(getConcurrentPool());
            } catch (NumberFormatException nfe) {
                log.warn("Concurrent download resources selected, "// $NON-NLS-1$
                        + "but pool size value is bad. Use default value");// $NON-NLS-1$
            }
            // Thread pool Executor to get resources
            // use a LinkedBlockingQueue, note: max pool size doesn't effect
            final ThreadPoolExecutor exec = new ThreadPoolExecutor(poolSize, poolSize, KEEPALIVETIME,
                    TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new ThreadFactory() {

                        public Thread newThread(final Runnable r) {
                            Thread t = new CleanerThread(new Runnable() {

                                public void run() {
                                    try {
                                        r.run();
                                    } finally {
                                        ((CleanerThread) Thread.currentThread()).notifyThreadEnd();
                                    }
                                }
                            });
                            return t;
                        }
                    });

            boolean tasksCompleted = false;
            try {
                // sample all resources with threadpool
                final List<Future<AsynSamplerResultHolder>> retExec = exec.invokeAll(liste);
                // call normal shutdown (wait ending all tasks)
                exec.shutdown();
                // put a timeout if tasks couldn't terminate
                exec.awaitTermination(AWAIT_TERMINATION_TIMEOUT, TimeUnit.SECONDS);
                CookieManager cookieManager = getCookieManager();
                // add result to main sampleResult
                for (Future<AsynSamplerResultHolder> future : retExec) {
                    AsynSamplerResultHolder binRes;
                    try {
                        binRes = future.get(1, TimeUnit.MILLISECONDS);
                        if (cookieManager != null) {
                            CollectionProperty cookies = binRes.getCookies();
                            PropertyIterator iter = cookies.iterator();
                            while (iter.hasNext()) {
                                Cookie cookie = (Cookie) iter.next().getObjectValue();
                                cookieManager.add(cookie);
                            }
                        }
                        res.addSubResult(binRes.getResult());
                        setParentSampleSuccess(res, res.isSuccessful() && binRes.getResult().isSuccessful());
                    } catch (TimeoutException e) {
                        errorResult(e, res);
                    }
                }
                tasksCompleted = exec.awaitTermination(1, TimeUnit.MILLISECONDS); // did all the tasks finish?
            } catch (InterruptedException ie) {
                log.warn("Interruped fetching embedded resources", ie); // $NON-NLS-1$
            } catch (ExecutionException ee) {
                log.warn("Execution issue when fetching embedded resources", ee); // $NON-NLS-1$
            } finally {
                if (!tasksCompleted) {
                    exec.shutdownNow(); // kill any remaining tasks
                }
            }
        }
    }
    return res;
}

From source file:org.batoo.jpa.benchmark.BenchmarkTest.java

private void waitUntilFinish(ThreadPoolExecutor executor) {
    final BlockingQueue<Runnable> workQueue = executor.getQueue();
    try {/* ww  w .  j  a va2s .  c o  m*/
        final long started = System.currentTimeMillis();

        int lastToGo = workQueue.size();

        final int total = workQueue.size();
        int performed = 0;

        int maxStatusMessageLength = 0;
        while (!workQueue.isEmpty()) {
            final float doneNow = lastToGo - workQueue.size();
            performed += doneNow;

            final float elapsed = (System.currentTimeMillis() - started) / 1000;

            lastToGo = workQueue.size();

            if (performed > 0) {
                final float throughput = performed / elapsed;
                final float eta = ((elapsed * total) / performed) - elapsed;

                final float percentDone = (100 * (float) lastToGo) / total;
                final int gaugeDone = (int) ((100 - percentDone) / 5);
                final String gauge = "[" + StringUtils.repeat("", gaugeDone)
                        + StringUtils.repeat("-", 20 - gaugeDone) + "]";

                final String sampling = this.profilingQueue.size() > 0
                        ? MessageFormat.format(" | Samples {0}", this.profilingQueue.size())
                        : "";

                if ((maxStatusMessageLength != 0) || (eta > 5)) {
                    String statusMessage = MessageFormat.format(
                            "\r{4} %{5,number,00.00} | ETA {2} | LAST TPS {0} ops / sec | AVG TPS {1,number,#.0} | LEFT {3}{6}", //
                            doneNow, throughput, this.etaToString((int) eta), workQueue.size(), gauge,
                            percentDone, sampling);

                    maxStatusMessageLength = Math.max(statusMessage.length(), maxStatusMessageLength);
                    statusMessage = StringUtils.leftPad(statusMessage,
                            maxStatusMessageLength - statusMessage.length());
                    System.out.print(statusMessage);
                }
            }

            if (elapsed > BenchmarkTest.MAX_TEST_TIME) {
                throw new IllegalStateException("Max allowed test time exceeded");
            }

            Thread.sleep(1000);
        }

        if (maxStatusMessageLength > 0) {
            System.out.print("\r" + StringUtils.repeat(" ", maxStatusMessageLength) + "\r");
        }

        executor.shutdown();

        if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
            BenchmarkTest.LOG.warn("Forcefully shutting down the thread pool");

            executor.shutdownNow();
        }

        BenchmarkTest.LOG.warn("Iterations completed");
    } catch (final InterruptedException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.bimserver.geometry.StreamingGeometryGenerator.java

@SuppressWarnings("unchecked")
public GenerateGeometryResult generateGeometry(long uoid, final DatabaseSession databaseSession,
        QueryContext queryContext, long nrObjects)
        throws BimserverDatabaseException, GeometryGeneratingException {
    GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult();
    packageMetaData = queryContext.getPackageMetaData();
    productClass = packageMetaData.getEClass("IfcProduct");
    geometryFeature = productClass.getEStructuralFeature("geometry");
    representationFeature = productClass.getEStructuralFeature("Representation");
    representationsFeature = packageMetaData.getEClass("IfcProductDefinitionShape")
            .getEStructuralFeature("Representations");
    itemsFeature = packageMetaData.getEClass("IfcShapeRepresentation").getEStructuralFeature("Items");
    mappingSourceFeature = packageMetaData.getEClass("IfcMappedItem").getEStructuralFeature("MappingSource");

    GregorianCalendar now = new GregorianCalendar();
    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
    debugIdentifier = dateFormat.format(now.getTime()) + " (" + report.getOriginalIfcFileName() + ")";

    long start = System.nanoTime();
    String pluginName = "";
    if (queryContext.getPackageMetaData().getSchema() == Schema.IFC4) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepStreamingSerializerPlugin";
    } else if (queryContext.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepStreamingSerializerPlugin";
    } else {//ww w . j  av  a  2  s.  c  o  m
        throw new GeometryGeneratingException(
                "Unknown schema " + queryContext.getPackageMetaData().getSchema());
    }

    reuseGeometry = bimServer.getServerSettingsCache().getServerSettings().isReuseGeometry();
    optimizeMappedItems = bimServer.getServerSettingsCache().getServerSettings().isOptimizeMappedItems();

    report.setStart(new GregorianCalendar());
    report.setIfcSchema(queryContext.getPackageMetaData().getSchema());
    report.setUseMappingOptimization(optimizeMappedItems);
    report.setReuseGeometry(reuseGeometry);

    try {
        final StreamingSerializerPlugin ifcSerializerPlugin = (StreamingSerializerPlugin) bimServer
                .getPluginManager().getPlugin(pluginName, true);
        if (ifcSerializerPlugin == null) {
            throw new UserException("No IFC serializer found");
        }

        User user = (User) databaseSession.get(uoid, org.bimserver.database.OldQuery.getDefault());
        UserSettings userSettings = user.getUserSettings();

        report.setUserName(user.getName());
        report.setUserUserName(user.getUsername());

        RenderEnginePluginConfiguration renderEngine = null;
        if (eoid != -1) {
            renderEngine = databaseSession.get(eoid, OldQuery.getDefault());
        } else {
            renderEngine = userSettings.getDefaultRenderEngine();
        }
        if (renderEngine == null) {
            throw new UserException("No default render engine has been selected for this user");
        }
        renderEngineName = renderEngine.getName();

        int availableProcessors = Runtime.getRuntime().availableProcessors();
        report.setAvailableProcessors(availableProcessors);

        int maxSimultanousThreads = Math.min(
                bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(),
                availableProcessors);
        if (maxSimultanousThreads < 1) {
            maxSimultanousThreads = 1;
        }

        final RenderEngineSettings settings = new RenderEngineSettings();
        settings.setPrecision(Precision.SINGLE);
        settings.setIndexFormat(IndexFormat.AUTO_DETECT);
        settings.setGenerateNormals(true);
        settings.setGenerateTriangles(true);
        settings.setGenerateWireFrame(false);

        final RenderEngineFilter renderEngineFilter = new RenderEngineFilter();

        RenderEnginePool renderEnginePool = bimServer.getRenderEnginePools().getRenderEnginePool(
                packageMetaData.getSchema(), renderEngine.getPluginDescriptor().getPluginClassName(),
                bimServer.getPluginSettingsCache().getPluginSettings(renderEngine.getOid()));

        report.setRenderEngineName(renderEngine.getName());
        report.setRenderEnginePluginVersion(
                renderEngine.getPluginDescriptor().getPluginBundleVersion().getVersion());

        try (RenderEngine engine = renderEnginePool.borrowObject()) {
            VersionInfo versionInfo = renderEnginePool.getRenderEngineFactory().getVersionInfo();
            report.setRenderEngineVersion(versionInfo);
            applyLayerSets = engine.isApplyLayerSets();
            report.setApplyLayersets(applyLayerSets);
            calculateQuantities = engine.isCalculateQuantities();
            report.setCalculateQuantities(calculateQuantities);
        }

        // TODO reuse, pool the pools :) Or something smarter
        // TODO reuse queue, or try to determine a realistic size, or don't use a fixed-size queue
        ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads, 24,
                TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000000));

        JsonQueryObjectModelConverter jsonQueryObjectModelConverter = new JsonQueryObjectModelConverter(
                packageMetaData);
        String queryNameSpace = packageMetaData.getSchema().name().toLowerCase() + "-stdlib";

        // Al references should already be direct, since this is now done in BimServer on startup, quite the hack...
        Include objectPlacement = jsonQueryObjectModelConverter
                .getDefineFromFile(queryNameSpace + ":ObjectPlacement", true);

        Set<EClass> classes = null;
        if (queryContext.getOidCounters() != null) {
            classes = queryContext.getOidCounters().keySet();
        } else {
            classes = packageMetaData.getEClasses();
        }

        float multiplierToMm = processUnits(databaseSession, queryContext);
        generateGeometryResult.setMultiplierToMm(multiplierToMm);

        // Phase 1 (mapped item detection) sometimes detects that mapped items have invalid (unsupported) RepresentationIdentifier values, this set keeps track of objects to skip in Phase 2 because of that
        Set<Long> toSkip = new HashSet<>();

        for (EClass eClass : classes) {
            if (packageMetaData.getEClass("IfcProduct").isSuperTypeOf(eClass)) {
                int nrObjectsForType = 0;

                Query query2 = new Query(eClass.getName() + "Main query", packageMetaData);
                QueryPart queryPart2 = query2.createQueryPart();
                queryPart2.addType(eClass, false);
                Include representationInclude = queryPart2.createInclude();
                representationInclude.addType(eClass, false);
                representationInclude.addFieldDirect("Representation");
                Include representationsInclude = representationInclude.createInclude();
                representationsInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                representationsInclude.addFieldDirect("Representations");
                Include itemsInclude = representationsInclude.createInclude();
                itemsInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), false);
                itemsInclude.addFieldDirect("Items");
                itemsInclude.addFieldDirect("ContextOfItems");
                Include mappingSourceInclude = itemsInclude.createInclude();
                mappingSourceInclude.addType(packageMetaData.getEClass("IfcMappedItem"), false);
                mappingSourceInclude.addFieldDirect("MappingSource");
                mappingSourceInclude.addFieldDirect("MappingTarget");
                Include representationMap = mappingSourceInclude.createInclude();
                representationMap.addType(packageMetaData.getEClass("IfcRepresentationMap"), false);
                representationMap.addFieldDirect("MappedRepresentation");
                Include createInclude = representationMap.createInclude();
                createInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), true);

                Include targetInclude = mappingSourceInclude.createInclude();
                targetInclude.addType(packageMetaData.getEClass("IfcCartesianTransformationOperator3D"), false);
                targetInclude.addFieldDirect("Axis1");
                targetInclude.addFieldDirect("Axis2");
                targetInclude.addFieldDirect("Axis3");
                targetInclude.addFieldDirect("LocalOrigin");

                queryPart2.addInclude(objectPlacement);

                Map<Long, Map<Long, ProductDef>> representationMapToProduct = new HashMap<>();

                QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer,
                        query2, Collections.singleton(queryContext.getRoid()), packageMetaData);
                HashMapVirtualObject next = queryObjectProvider2.next();
                int nrProductsWithRepresentation = 0;
                while (next != null) {
                    if (next.eClass() == eClass) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> representations = representation
                                    .getDirectListFeature(representationsFeature);
                            if (representations != null) {
                                boolean foundValidContext = false;
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (usableContext(representationItem)) {
                                        foundValidContext = true;
                                    }
                                }
                                if (foundValidContext) {
                                    nrProductsWithRepresentation++;
                                }
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (!usableContext(representationItem) && foundValidContext) {
                                        continue;
                                    }
                                    if (hasValidRepresentationIdentifier(representationItem)) {
                                        Set<HashMapVirtualObject> items = representationItem
                                                .getDirectListFeature(itemsFeature);
                                        if (items == null || items.size() > 1) {
                                            // Only if there is just one item, we'll store this for reuse
                                            continue;
                                        }
                                        // So this next loop always results in 1 (or no) loops
                                        for (HashMapVirtualObject item : items) {
                                            report.addRepresentationItem(item.eClass().getName());
                                            if (!packageMetaData.getEClass("IfcMappedItem")
                                                    .isSuperTypeOf(item.eClass())) {
                                                nrObjectsForType++;
                                                continue; // All non IfcMappedItem objects will be done in phase 2
                                            }
                                            AbstractHashMapVirtualObject mappingTarget = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingTarget"));
                                            AbstractHashMapVirtualObject mappingSourceOfMappedItem = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingSource"));
                                            if (mappingSourceOfMappedItem == null) {
                                                LOGGER.info("No mapping source");
                                                continue;
                                            }
                                            AbstractHashMapVirtualObject mappedRepresentation = mappingSourceOfMappedItem
                                                    .getDirectFeature(packageMetaData.getEReference(
                                                            "IfcRepresentationMap", "MappedRepresentation"));

                                            if (!hasValidRepresentationIdentifier(mappedRepresentation)) {
                                                // Skip this mapping, we should store somewhere that this object should also be skipped in the normal way
                                                // TODO too many log statements, should log only 1 line for the complete model
                                                //                                       LOGGER.info("Skipping because of invalid RepresentationIdentifier in mapped item (" + (String) mappedRepresentation.get("RepresentationIdentifier") + ")");
                                                report.addSkippedBecauseOfInvalidRepresentationIdentifier(
                                                        (String) mappedRepresentation
                                                                .get("RepresentationIdentifier"));
                                                toSkip.add(next.getOid());
                                                continue;
                                            }
                                            double[] mappingMatrix = Matrix.identity();
                                            double[] productMatrix = Matrix.identity();
                                            if (mappingTarget != null) {
                                                AbstractHashMapVirtualObject axis1 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis1"));
                                                AbstractHashMapVirtualObject axis2 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis2"));
                                                AbstractHashMapVirtualObject axis3 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis3"));
                                                AbstractHashMapVirtualObject localOrigin = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator",
                                                                "LocalOrigin"));

                                                double[] a1 = null;
                                                double[] a2 = null;
                                                double[] a3 = null;

                                                if (axis3 != null) {
                                                    List<Double> list = (List<Double>) axis3
                                                            .get("DirectionRatios");
                                                    a3 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                } else {
                                                    a3 = new double[] { 0, 0, 1, 1 };
                                                    Vector.normalize(a3);
                                                }

                                                if (axis1 != null) {
                                                    List<Double> list = (List<Double>) axis1
                                                            .get("DirectionRatios");
                                                    a1 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a1);
                                                } else {
                                                    //                                          if (a3[0] == 1 && a3[1] == 0 && a3[2] == 0) {
                                                    a1 = new double[] { 1, 0, 0, 1 };
                                                    //                                          } else {
                                                    //                                             a1 = new double[]{0, 1, 0, 1};
                                                    //                                          }
                                                }

                                                double[] xVec = Vector.scalarProduct(Vector.dot(a1, a3), a3);
                                                double[] xAxis = Vector.subtract(a1, xVec);
                                                Vector.normalize(xAxis);

                                                if (axis2 != null) {
                                                    List<Double> list = (List<Double>) axis2
                                                            .get("DirectionRatios");
                                                    a2 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a2);
                                                } else {
                                                    a2 = new double[] { 0, 1, 0, 1 };
                                                }

                                                double[] tmp = Vector.scalarProduct(Vector.dot(a2, a3), a3);
                                                double[] yAxis = Vector.subtract(a2, tmp);
                                                tmp = Vector.scalarProduct(Vector.dot(a2, xAxis), xAxis);
                                                yAxis = Vector.subtract(yAxis, tmp);
                                                Vector.normalize(yAxis);

                                                a2 = yAxis;
                                                a1 = xAxis;

                                                List<Double> t = (List<Double>) localOrigin.get("Coordinates");
                                                mappingMatrix = new double[] { a1[0], a1[1], a1[2], 0, a2[0],
                                                        a2[1], a2[2], 0, a3[0], a3[1], a3[2], 0,
                                                        t.get(0).doubleValue(), t.get(1).doubleValue(),
                                                        t.get(2).doubleValue(), 1 };
                                            }

                                            AbstractHashMapVirtualObject placement = next
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcProduct", "ObjectPlacement"));
                                            if (placement != null) {
                                                productMatrix = placementToMatrix(placement);
                                            }

                                            AbstractHashMapVirtualObject mappingSource = item
                                                    .getDirectFeature(mappingSourceFeature);
                                            if (mappingSource != null) {
                                                Map<Long, ProductDef> map = representationMapToProduct
                                                        .get(((HashMapVirtualObject) mappingSource).getOid());
                                                if (map == null) {
                                                    map = new LinkedHashMap<>();
                                                    representationMapToProduct.put(
                                                            ((HashMapVirtualObject) mappingSource).getOid(),
                                                            map);
                                                }
                                                ProductDef pd = new ProductDef(next.getOid());
                                                pd.setMappedItemOid(item.getOid());
                                                pd.setObject(next);

                                                pd.setProductMatrix(productMatrix);
                                                pd.setMappingMatrix(mappingMatrix);
                                                map.put(next.getOid(), pd);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }

                Set<Long> done = new HashSet<>();

                for (Long repMapId : representationMapToProduct.keySet()) {
                    Map<Long, ProductDef> map = representationMapToProduct.get(repMapId);

                    // When there is more than one instance using this mapping
                    if (map.size() > 1) {
                        Query query = new Query("Reuse query " + eClass.getName(), packageMetaData);
                        QueryPart queryPart = query.createQueryPart();
                        //                     QueryPart queryPart3 = query.createQueryPart();
                        queryPart.addType(eClass, false);
                        //                     queryPart3.addType(packageMetaData.getEClass("IfcMappedItem"), false);

                        long masterOid = map.values().iterator().next().getOid();

                        double[] inverted = Matrix.identity();
                        ProductDef masterProductDef = map.get(masterOid);
                        if (!Matrix.invertM(inverted, 0, masterProductDef.getMappingMatrix(), 0)) {
                            LOGGER.debug("No inverse, this mapping will be skipped and processed as normal");
                            // This is probably because of mirroring of something funky

                            // TODO we should however be able to squeeze out a little more reuse by finding another master...
                            continue;
                        }

                        for (ProductDef pd : map.values()) {
                            done.add(pd.getOid());
                            if (!optimizeMappedItems) {
                                queryPart.addOid(pd.getOid());

                                // In theory these should be fused together during querying
                                //                           queryPart3.addOid(pd.getMappedItemOid());
                            } else {
                                pd.setMasterOid(masterOid);
                            }
                        }
                        if (optimizeMappedItems) {
                            queryPart.addOid(masterOid);
                        }

                        LOGGER.debug("Running " + map.size()
                                + " objects in one batch because of reused geometry " + (eClass.getName()));

                        //                     queryPart3.addInclude(jsonQueryObjectModelConverter.getDefineFromFile("ifc2x3tc1-stdlib:IfcMappedItem"));

                        processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                                settings, renderEngineFilter, renderEnginePool, executor, eClass, query,
                                queryPart, true, map, map.size());
                    }
                }

                Query query3 = new Query("Remaining " + eClass.getName(), packageMetaData);
                QueryPart queryPart3 = query3.createQueryPart();
                queryPart3.addType(eClass, false);
                Include include3 = queryPart3.createInclude();
                include3.addType(eClass, false);
                include3.addFieldDirect("Representation");
                Include rInclude = include3.createInclude();
                rInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                rInclude.addFieldDirect("Representations");
                Include representationsInclude2 = rInclude.createInclude();
                representationsInclude2.addType(packageMetaData.getEClass("IfcShapeModel"), true);
                representationsInclude2.addFieldDirect("ContextOfItems");

                queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query3,
                        Collections.singleton(queryContext.getRoid()), packageMetaData);
                next = queryObjectProvider2.next();

                Query query = new Query("Main " + eClass.getName(), packageMetaData);
                QueryPart queryPart = query.createQueryPart();
                int written = 0;

                int maxObjectsPerFile = 0;
                if (nrProductsWithRepresentation <= 100) {
                    maxObjectsPerFile = 1;
                } else if (nrProductsWithRepresentation < 10000) {
                    maxObjectsPerFile = (int) (nrProductsWithRepresentation / 100);
                } else {
                    maxObjectsPerFile = 100;
                }

                //               LOGGER.info(report.getOriginalIfcFileName());
                //               LOGGER.info("Max objects per file: " + maxObjectsPerFile + " (" + eClass.getName() + ": " + nrProductsWithRepresentation + ")");

                report.setMaxPerFile(maxObjectsPerFile);

                while (next != null) {
                    if (next.eClass() == eClass && !done.contains(next.getOid())
                            && !toSkip.contains(next.getOid())) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> list = representation.getDirectListFeature(packageMetaData
                                    .getEReference("IfcProductRepresentation", "Representations"));
                            boolean goForIt = goForIt(list);
                            if (goForIt) {
                                if (next.eClass() == eClass && !done.contains(next.getOid())) {
                                    representation = next.getDirectFeature(representationFeature);
                                    if (representation != null) {
                                        list = representation.getDirectListFeature(packageMetaData
                                                .getEReference("IfcProductRepresentation", "Representations"));
                                        boolean goForIt2 = goForIt(list);
                                        if (goForIt2) {
                                            queryPart.addOid(next.getOid());
                                            written++;
                                            if (written >= maxObjectsPerFile) {
                                                processQuery(databaseSession, queryContext,
                                                        generateGeometryResult, ifcSerializerPlugin, settings,
                                                        renderEngineFilter, renderEnginePool, executor, eClass,
                                                        query, queryPart, false, null, written);
                                                query = new Query("Main " + eClass.getName(), packageMetaData);
                                                queryPart = query.createQueryPart();
                                                written = 0;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }
                if (written > 0) {
                    processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                            settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart,
                            false, null, written);
                }
            }
        }

        allJobsPushed = true;

        executor.shutdown();
        executor.awaitTermination(24, TimeUnit.HOURS);

        // Need total bounds
        //         float[] quantizationMatrix = createQuantizationMatrixFromBounds(boundsMm);
        //         ByteBuffer verticesQuantized = quantizeVertices(vertices, quantizationMatrix, generateGeometryResult.getMultiplierToMm());
        //         geometryData.setAttribute(GeometryPackage.eINSTANCE.getGeometryData_VerticesQuantized(), verticesQuantized.array());

        LOGGER.debug("Generating quantized vertices");
        double[] quantizationMatrix = createQuantizationMatrixFromBounds(
                generateGeometryResult.getBoundsUntransformed(), multiplierToMm);
        for (Long id : geometryDataMap.keySet()) {
            Tuple<HashMapVirtualObject, ByteBuffer> tuple = geometryDataMap.get(id);

            HashMapVirtualObject buffer = new HashMapVirtualObject(queryContext,
                    GeometryPackage.eINSTANCE.getBuffer());
            //            Buffer buffer = databaseSession.create(Buffer.class);
            buffer.set("data",
                    quantizeVertices(tuple.getB().asDoubleBuffer(), quantizationMatrix, multiplierToMm)
                            .array());
            //            buffer.setData(quantizeVertices(tuple.getB(), quantizationMatrix, multiplierToMm).array());
            //            databaseSession.store(buffer);
            buffer.save();
            HashMapVirtualObject geometryData = tuple.getA();
            geometryData.set("verticesQuantized", buffer.getOid());
            int reused = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_Reused());
            int nrTriangles = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_NrIndices())
                    / 3;
            int saveableTriangles = Math.max(0, (reused - 1)) * nrTriangles;
            geometryData.set("saveableTriangles", saveableTriangles);
            //            if (saveableTriangles > 0) {
            //               System.out.println("Saveable triangles: " + saveableTriangles);
            //            }
            geometryData.saveOverwrite();
        }

        long end = System.nanoTime();
        long total = totalBytes.get()
                - (bytesSavedByHash.get() + bytesSavedByTransformation.get() + bytesSavedByMapping.get());
        LOGGER.info("Rendertime: " + Formatters.nanosToString(end - start) + ", " + "Reused (by hash): "
                + Formatters.bytesToString(bytesSavedByHash.get()) + ", Reused (by transformation): "
                + Formatters.bytesToString(bytesSavedByTransformation.get()) + ", Reused (by mapping): "
                + Formatters.bytesToString(bytesSavedByMapping.get()) + ", Total: "
                + Formatters.bytesToString(totalBytes.get()) + ", Final: " + Formatters.bytesToString(total));
        if (report.getNumberOfDebugFiles() > 0) {
            LOGGER.error("Number of erroneous files: " + report.getNumberOfDebugFiles());
        }
        Map<String, Integer> skipped = report.getSkippedBecauseOfInvalidRepresentationIdentifier();
        if (skipped.size() > 0) {
            LOGGER.error("Number of representations skipped:");
            for (String identifier : skipped.keySet()) {
                LOGGER.error("\t" + identifier + ": " + skipped.get(identifier));
            }
        }
        String dump = geometryGenerationDebugger.dump();
        if (dump != null) {
            LOGGER.info(dump);
        }
    } catch (Exception e) {
        running = false;
        LOGGER.error("", e);
        report.setEnd(new GregorianCalendar());
        throw new GeometryGeneratingException(e);
    }
    report.setEnd(new GregorianCalendar());
    try {
        if (report.getNumberOfDebugFiles() > 0) {
            writeDebugFile();
        }
    } catch (IOException e) {
        LOGGER.debug("", e);
    }
    return generateGeometryResult;
}

From source file:org.bimserver.GeometryGenerator.java

@SuppressWarnings("unchecked")
public GenerateGeometryResult generateGeometry(long uoid, final PluginManager pluginManager,
        final DatabaseSession databaseSession, final IfcModelInterface model, final int pid, final int rid,
        final boolean store, GeometryCache geometryCache)
        throws BimserverDatabaseException, GeometryGeneratingException {
    GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult();
    packageMetaData = model.getPackageMetaData();
    productClass = packageMetaData.getEClass("IfcProduct");
    productRepresentationClass = packageMetaData.getEClass("IfcProductRepresentation");
    geometryFeature = productClass.getEStructuralFeature("geometry");
    representationFeature = productClass.getEStructuralFeature("Representation");
    representationsFeature = productRepresentationClass.getEStructuralFeature("Representations");

    if (geometryCache != null && !geometryCache.isEmpty()) {
        returnCachedData(model, geometryCache, databaseSession, pid, rid);
        return null;
    }/*from  w w  w .j  a  v  a  2 s.co m*/
    long start = System.nanoTime();
    String pluginName = "";
    if (model.getPackageMetaData().getSchema() == Schema.IFC4) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepSerializerPlugin";
    } else if (model.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepSerializerPlugin";
    }

    try {
        final SerializerPlugin ifcSerializerPlugin = (SerializerPlugin) pluginManager.getPlugin(pluginName,
                true);
        if (ifcSerializerPlugin == null) {
            throw new UserException("No IFC serializer found");
        }

        User user = (User) databaseSession.get(uoid, OldQuery.getDefault());
        UserSettings userSettings = user.getUserSettings();
        RenderEnginePluginConfiguration defaultRenderEngine = userSettings.getDefaultRenderEngine();
        if (defaultRenderEngine == null) {
            throw new UserException("No default render engine has been selected for this user");
        }
        final RenderEnginePlugin renderEnginePlugin = pluginManager
                .getRenderEngine(defaultRenderEngine.getPluginDescriptor().getPluginClassName(), true);
        if (renderEnginePlugin == null) {
            throw new UserException("No (enabled) render engine found of type "
                    + defaultRenderEngine.getPluginDescriptor().getPluginClassName());
        }

        int maxSimultanousThreads = Math.min(
                bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(),
                Runtime.getRuntime().availableProcessors());
        if (maxSimultanousThreads < 1) {
            maxSimultanousThreads = 1;
        }

        final RenderEngineSettings settings = new RenderEngineSettings();
        settings.setPrecision(Precision.SINGLE);
        settings.setIndexFormat(IndexFormat.AUTO_DETECT);
        settings.setGenerateNormals(true);
        settings.setGenerateTriangles(true);
        settings.setGenerateWireFrame(false);

        final RenderEngineFilter renderEngineFilter = new RenderEngineFilter();

        if (maxSimultanousThreads == 1) {
            Runner runner = new Runner(null, renderEnginePlugin, databaseSession, settings, store, model,
                    ifcSerializerPlugin, model, pid, rid, null, renderEngineFilter, generateGeometryResult);
            runner.run();
        } else {
            Set<EClass> classes = new HashSet<>();
            for (IdEObject object : model.getAllWithSubTypes(packageMetaData.getEClass("IfcProduct"))) {
                IdEObject representation = (IdEObject) object.eGet(representationFeature);
                if (representation != null
                        && ((List<?>) representation.eGet(representationsFeature)).size() > 0) {
                    classes.add(object.eClass());
                }
            }

            if (classes.size() == 0) {
                return null;
            }

            classes.remove(packageMetaData.getEClass("IfcAnnotation"));
            classes.remove(packageMetaData.getEClass("IfcOpeningElement"));

            LOGGER.debug("Using " + maxSimultanousThreads + " processes for geometry generation");
            ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads,
                    24, TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(classes.size()));

            final Map<IdEObject, IdEObject> bigMap = new HashMap<IdEObject, IdEObject>();

            HideAllInversesObjectIDM idm = new HideAllInversesObjectIDM(
                    CollectionUtils.singleSet(packageMetaData.getEPackage()),
                    pluginManager.getMetaDataManager().getPackageMetaData("ifc2x3tc1").getSchemaDefinition());
            OidProvider oidProvider = new OidProvider() {
                @Override
                public long newOid(EClass eClass) {
                    return databaseSession.newOid(eClass);
                }
            };
            for (final EClass eClass : classes) {
                final BasicIfcModel targetModel = new BasicIfcModel(
                        pluginManager.getMetaDataManager().getPackageMetaData("ifc2x3tc1"), null);
                ModelHelper modelHelper = new ModelHelper(bimServer.getMetaDataManager(), targetModel);
                modelHelper.setOidProvider(oidProvider);
                modelHelper.setObjectIDM(idm);

                IdEObject newOwnerHistory = modelHelper.copyBasicObjects(model, bigMap);

                for (IdEObject idEObject : model.getAll(eClass)) {
                    IdEObject newObject = modelHelper.copy(idEObject, false,
                            ModelHelper.createObjectIdm(idEObject.eClass()));
                    modelHelper.copyDecomposes(idEObject, newOwnerHistory);
                    bigMap.put(newObject, idEObject);
                    if (eClass.getName().equals("IfcWallStandardCase")) {
                        EStructuralFeature hasOpeningsFeature = idEObject.eClass()
                                .getEStructuralFeature("HasOpenings");
                        for (IdEObject ifcRelVoidsElement : ((List<IdEObject>) idEObject
                                .eGet(hasOpeningsFeature))) {
                            bigMap.put(modelHelper.copy(ifcRelVoidsElement, false), ifcRelVoidsElement);
                            EStructuralFeature relatedOpeningElementFeature = ifcRelVoidsElement.eClass()
                                    .getEStructuralFeature("RelatedOpeningElement");
                            IdEObject relatedOpeningElement = (IdEObject) ifcRelVoidsElement
                                    .eGet(relatedOpeningElementFeature);
                            if (relatedOpeningElement != null) {
                                bigMap.put(modelHelper.copy(relatedOpeningElement, false),
                                        relatedOpeningElement);
                            }
                        }
                    }
                }

                executor.submit(new Runner(eClass, renderEnginePlugin, databaseSession, settings, store,
                        targetModel, ifcSerializerPlugin, model, pid, rid, bigMap, renderEngineFilter,
                        generateGeometryResult));
            }
            executor.shutdown();
            executor.awaitTermination(24, TimeUnit.HOURS);
        }

        long end = System.nanoTime();
        LOGGER.info("Rendertime: " + ((end - start) / 1000000) + "ms, " + "Reused: "
                + Formatters.bytesToString(bytesSaved.get()) + ", Total: "
                + Formatters.bytesToString(totalBytes.get()) + ", Final: "
                + Formatters.bytesToString(totalBytes.get() - bytesSaved.get()));
    } catch (Exception e) {
        LOGGER.error("", e);
        throw new GeometryGeneratingException(e);
    }
    return generateGeometryResult;
}

From source file:org.bimserver.tests.TestSimultaniousDownloadWithCaching.java

private void start() {
    BimServerConfig config = new BimServerConfig();
    Path homeDir = Paths.get("home");
    try {//from   w  ww  . j a  v  a  2s .c om
        if (Files.isDirectory(homeDir)) {
            PathUtils.removeDirectoryWithContent(homeDir);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    config.setClassPath(System.getProperty("java.class.path"));
    config.setHomeDir(homeDir);
    config.setPort(8080);
    config.setStartEmbeddedWebServer(true);
    config.setResourceFetcher(new LocalDevelopmentResourceFetcher(Paths.get("../")));
    final BimServer bimServer = new BimServer(config);
    try {
        LocalDevPluginLoader.loadPlugins(bimServer.getPluginManager(), null);
        bimServer.start();
        if (bimServer.getServerInfo().getServerState() == ServerState.NOT_SETUP) {
            bimServer.getService(AdminInterface.class).setup("http://localhost", "Administrator",
                    "admin@bimserver.org", "admin", null, null, null);
        }
    } catch (PluginException e2) {
        e2.printStackTrace();
    } catch (ServerException e) {
        e.printStackTrace();
    } catch (DatabaseInitException e) {
        e.printStackTrace();
    } catch (BimserverDatabaseException e) {
        e.printStackTrace();
    } catch (DatabaseRestartRequiredException e) {
        e.printStackTrace();
    } catch (UserException e) {
        e.printStackTrace();
    }

    try {
        final ServiceMap serviceMap = bimServer.getServiceFactory().get(AccessMethod.INTERNAL);
        ServiceInterface serviceInterface = serviceMap.get(ServiceInterface.class);
        SettingsInterface settingsInterface = serviceMap.get(SettingsInterface.class);
        final AuthInterface authInterface = serviceMap.get(AuthInterface.class);
        serviceInterface = bimServer.getServiceFactory()
                .get(authInterface.login("admin@bimserver.org", "admin"), AccessMethod.INTERNAL)
                .get(ServiceInterface.class);
        settingsInterface.setCacheOutputFiles(true);
        settingsInterface.setGenerateGeometryOnCheckin(false);
        final SProject project = serviceMap.getServiceInterface().addProject("test", "ifc2x3tc1");
        SDeserializerPluginConfiguration deserializerByName = serviceMap.getServiceInterface()
                .getDeserializerByName("IfcStepDeserializer");
        Path file = Paths.get("../TestData/data/AC11-Institute-Var-2-IFC.ifc");
        serviceInterface.checkin(project.getOid(), "test", deserializerByName.getOid(), file.toFile().length(),
                file.getFileName().toString(), new DataHandler(new FileDataSource(file.toFile())), false, true);
        final SProject projectUpdate = serviceMap.getServiceInterface().getProjectByPoid(project.getOid());
        ThreadPoolExecutor executor = new ThreadPoolExecutor(20, 20, 1, TimeUnit.HOURS,
                new ArrayBlockingQueue<Runnable>(1000));
        for (int i = 0; i < 20; i++) {
            executor.execute(new Runnable() {
                @Override
                public void run() {
                    try {
                        ServiceMap serviceMap2 = bimServer.getServiceFactory().get(
                                authInterface.login("admin@bimserver.org", "admin"), AccessMethod.INTERNAL);
                        SSerializerPluginConfiguration serializerPluginConfiguration = serviceMap
                                .getServiceInterface().getSerializerByName("Ifc2x3");
                        Long download = serviceMap2.getServiceInterface().download(
                                Collections.singleton(projectUpdate.getLastRevisionId()),
                                DefaultQueries.allAsString(), serializerPluginConfiguration.getOid(), true);
                        SDownloadResult downloadData = serviceMap2.getServiceInterface()
                                .getDownloadData(download);
                        if (downloadData.getFile()
                                .getDataSource() instanceof CacheStoringEmfSerializerDataSource) {
                            CacheStoringEmfSerializerDataSource c = (CacheStoringEmfSerializerDataSource) downloadData
                                    .getFile().getDataSource();
                            try {
                                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                                c.writeToOutputStream(baos, null);
                                System.out.println(baos.size());
                            } catch (SerializerException e) {
                                e.printStackTrace();
                            }
                        } else {
                            ByteArrayOutputStream baos = new ByteArrayOutputStream();
                            IOUtils.copy(downloadData.getFile().getInputStream(), baos);
                            System.out.println(baos.size());
                        }
                        serviceMap2.getServiceInterface().cleanupLongAction(download);
                    } catch (ServerException e) {
                        e.printStackTrace();
                    } catch (UserException e) {
                        e.printStackTrace();
                    } catch (FileNotFoundException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    } catch (PublicInterfaceNotFoundException e1) {
                        e1.printStackTrace();
                    }
                }
            });
        }
        executor.shutdown();
        executor.awaitTermination(1, TimeUnit.HOURS);
        bimServer.stop();
    } catch (ServerException e1) {
        e1.printStackTrace();
    } catch (UserException e1) {
        e1.printStackTrace();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (PublicInterfaceNotFoundException e2) {
        e2.printStackTrace();
    }
}

From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java

@Test
public void testBatchConcurrency() throws Exception {

    BatchManager bm = Framework.getService(BatchManager.class);

    // Initialize batches with one file concurrently
    int nbBatches = 100;
    String[] batchIds = new String[nbBatches];
    ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>(nbBatches + 1));

    for (int i = 0; i < nbBatches; i++) {
        final int batchIndex = i;
        tpe.submit(new Runnable() {
            @Override//from ww w  . ja  v  a2 s. c  o m
            public void run() {
                try {
                    String batchId = bm.initBatch();
                    bm.addStream(batchId, "0",
                            new ByteArrayInputStream(
                                    ("SomeContent_" + batchId).getBytes(StandardCharsets.UTF_8)),
                            "MyBatchFile.txt", "text/plain");
                    batchIds[batchIndex] = batchId;
                } catch (IOException e) {
                    fail(e.getMessage());
                }
            }
        });
    }

    tpe.shutdown();
    boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS);
    assertTrue("timeout", finish);

    // Check batches
    for (String batchId : batchIds) {
        assertNotNull(batchId);
    }
    // Test indexes 0, 9, 99, ..., nbFiles - 1
    int nbDigits = (int) (Math.log10(nbBatches) + 1);
    int divisor = nbBatches;
    for (int i = 0; i < nbDigits; i++) {
        int batchIndex = nbBatches / divisor - 1;
        String batchId = batchIds[batchIndex];
        Blob blob = bm.getBlob(batchId, "0");
        assertNotNull(blob);
        assertEquals("MyBatchFile.txt", blob.getFilename());
        assertEquals("SomeContent_" + batchId, blob.getString());
        divisor = divisor / 10;
    }

    // Check storage size
    TransientStore ts = bm.getTransientStore();
    assertTrue(((AbstractTransientStore) ts).getStorageSize() > 12 * nbBatches);

    // Clean batches
    for (String batchId : batchIds) {
        bm.clean(batchId);
    }
    assertEquals(ts.getStorageSizeMB(), 0);
}

From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java

@Test
public void testFileConcurrency() throws Exception {

    // Initialize a batch
    BatchManager bm = Framework.getService(BatchManager.class);
    String batchId = bm.initBatch();

    // Add files concurrently
    int nbFiles = 100;
    ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>(nbFiles + 1));

    for (int i = 0; i < nbFiles; i++) {
        final String fileIndex = String.valueOf(i);
        tpe.submit(new Runnable() {
            @Override//from   w  w w. ja  v  a2 s.  c  o  m
            public void run() {
                try {
                    bm.addStream(batchId, fileIndex,
                            new ByteArrayInputStream(
                                    ("SomeContent_" + fileIndex).getBytes(StandardCharsets.UTF_8)),
                            fileIndex + ".txt", "text/plain");
                } catch (IOException e) {
                    fail(e.getMessage());
                }
            }
        });
    }

    tpe.shutdown();
    boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS);
    assertTrue("timeout", finish);

    // Check blobs
    List<Blob> blobs = bm.getBlobs(batchId);
    assertEquals(nbFiles, blobs.size());
    // Test indexes 0, 9, 99, ..., nbFiles - 1
    int nbDigits = (int) (Math.log10(nbFiles) + 1);
    int divisor = nbFiles;
    for (int i = 0; i < nbDigits; i++) {
        int fileIndex = nbFiles / divisor - 1;
        assertEquals(fileIndex + ".txt", blobs.get(fileIndex).getFilename());
        assertEquals("SomeContent_" + fileIndex, blobs.get(fileIndex).getString());
        divisor = divisor / 10;
    }

    // Check storage size
    TransientStore ts = bm.getTransientStore();
    assertTrue(((AbstractTransientStore) ts).getStorageSize() > 12 * nbFiles);

    // Clean batch
    bm.clean(batchId);
    assertEquals(ts.getStorageSizeMB(), 0);
}