List of usage examples for java.util.concurrent ExecutorService execute
void execute(Runnable command);
From source file:org.apache.hive.service.cli.CLIServiceTest.java
@Test public void testExecuteStatementParallel() throws Exception { Map<String, String> confOverlay = new HashMap<String, String>(); String tableName = "TEST_EXEC_PARALLEL"; String columnDefinitions = "(ID STRING)"; // Open a session and set up the test data SessionHandle sessionHandle = setupTestData(tableName, columnDefinitions, confOverlay); assertNotNull(sessionHandle);/*from w w w .j ava2s . co m*/ long longPollingTimeout = HiveConf.getTimeVar(new HiveConf(), HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT, TimeUnit.MILLISECONDS); confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT.varname, longPollingTimeout + "ms"); int THREAD_COUNT = 10, QUERY_COUNT = 10; // TODO: refactor this into an utility, LLAP tests use this pattern a lot ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT); CountDownLatch cdlIn = new CountDownLatch(THREAD_COUNT), cdlOut = new CountDownLatch(1); @SuppressWarnings("unchecked") Callable<Void>[] cs = (Callable<Void>[]) new Callable[3]; // Create callables with different queries. String query = "SELECT ID + %1$d FROM " + tableName; cs[0] = createQueryCallable(query, confOverlay, longPollingTimeout, QUERY_COUNT, cdlIn, cdlOut); query = "SELECT t1.ID, SUM(t2.ID) + %1$d FROM " + tableName + " t1 CROSS JOIN " + tableName + " t2 GROUP BY t1.ID HAVING t1.ID > 1"; cs[1] = createQueryCallable(query, confOverlay, longPollingTimeout, QUERY_COUNT, cdlIn, cdlOut); query = "SELECT b.a FROM (SELECT (t1.ID + %1$d) as a , t2.* FROM " + tableName + " t1 INNER JOIN " + tableName + " t2 ON t1.ID = t2.ID WHERE t2.ID > 2) b"; cs[2] = createQueryCallable(query, confOverlay, longPollingTimeout, QUERY_COUNT, cdlIn, cdlOut); @SuppressWarnings("unchecked") FutureTask<Void>[] tasks = (FutureTask<Void>[]) new FutureTask[THREAD_COUNT]; for (int i = 0; i < THREAD_COUNT; ++i) { tasks[i] = new FutureTask<Void>(cs[i % cs.length]); executor.execute(tasks[i]); } try { cdlIn.await(); // Wait for all threads to be ready. cdlOut.countDown(); // Release them at the same time. for (int i = 0; i < THREAD_COUNT; ++i) { tasks[i].get(); } } catch (Throwable t) { throw new RuntimeException(t); } // Cleanup client.executeStatement(sessionHandle, "DROP TABLE " + tableName, confOverlay); client.closeSession(sessionHandle); }
From source file:org.gbif.portal.harvest.taxonomy.TaxonomyUtils.java
/** * A utility that will effectively ensure that the taxonomy from one data resource is represented fully in another. * For all concepts that exists in the source, the target is checked to see if there exists a concept representing the same * classification (note that the target may be a more complete classification that the source). If the concept does not exist, * then the concept is created. /*from w w w.j a v a2 s. c om*/ * * Typically this method would be used to build a NUB taxonomy. Taxonomic data resources would be imported with allowCreateKingdoms first, * and then inferred taxonomies would be imported with unknownKingdoms collated. * * This will import accepted concepts and then non accepted concepts in order of rank * * @param sourceDataResourceId The resource holding the concepts that are to be imported into the target * @param targetDataResourceId The target resource to ensure encapsualtes all concepts in the source * @param targetDataProviderId The data provider for the resource owning the taxonomy being built - this MUST own the targetDataResourceId * @param allowCreateUnknownKingdoms If this is set to false then the TaxonomyUtils.nameOfUnknownKingdom is used for any kingdom that * @param majorRanksOnly If this is set to true, then only major ranks will be imported * @param unpartneredOnly If this is set to true, then only concepts with no partner concept id will be imported * is not represented in the target taxonomy. If set to true, then the kingdoms are imported from the source. * @throws InterruptedException */ public void importTaxonomyFromDataResource(long sourceDataResourceId, long targetDataResourceId, long targetDataProviderId, boolean allowCreateUnknownKingdoms, boolean majorRanksOnly, boolean unpartneredOnly) throws InterruptedException { List<Integer> ranksToImport = null; if (unpartneredOnly) { ranksToImport = taxonConceptDAO.getUnpartneredRanksWithinResource(sourceDataResourceId); } else { ranksToImport = taxonConceptDAO.getRanksWithinResource(sourceDataResourceId); } logger.debug("There are " + ranksToImport.size() + " ranks to import from data resource[" + sourceDataResourceId + "]: " + ranksToImport); ExecutorService es = Executors.newCachedThreadPool(); for (int i = 0; i < ranksToImport.size(); i++) { int rank = (ranksToImport.get(i)); es.execute(new Thread(new TaxonomyThread(relationshipAssertionDAO, taxonConceptDAO, targetDataResourceId, targetDataProviderId, allowCreateUnknownKingdoms, majorRanksOnly, unpartneredOnly, sourceDataResourceId, rank))); } es.shutdown(); while (!es.isTerminated()) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } System.out.println("Finalizados todos los hilos del recurso : " + sourceDataResourceId); }
From source file:eu.esdihumboldt.hale.io.wfs.AbstractWFSWriter.java
@Override public IOReport execute(ProgressIndicator progress) throws IOProviderConfigurationException, IOException { progress.begin("WFS Transaction", ProgressIndicator.UNKNOWN); // configure internal provider internalProvider.setDocumentWrapper(createTransaction()); final PipedInputStream pIn = new PipedInputStream(); PipedOutputStream pOut = new PipedOutputStream(pIn); currentExecuteStream = pOut;/*from w w w .ja va2 s. c o m*/ Future<Response> futureResponse = null; IOReporter reporter = createReporter(); ExecutorService executor = Executors.newSingleThreadExecutor(); try { // read the stream (in another thread) futureResponse = executor.submit(new Callable<Response>() { @Override public Response call() throws Exception { Proxy proxy = ProxyUtil.findProxy(targetWfs.getLocation()); Request request = Request.Post(targetWfs.getLocation()).bodyStream(pIn, ContentType.APPLICATION_XML); Executor executor = FluentProxyUtil.setProxy(request, proxy); // authentication String user = getParameter(PARAM_USER).as(String.class); String password = getParameter(PARAM_PASSWORD).as(String.class); if (user != null) { // target host int port = targetWfs.getLocation().getPort(); String hostName = targetWfs.getLocation().getHost(); String scheme = targetWfs.getLocation().getScheme(); HttpHost host = new HttpHost(hostName, port, scheme); // add credentials Credentials cred = ClientProxyUtil.createCredentials(user, password); executor.auth(new AuthScope(host), cred); executor.authPreemptive(host); } try { return executor.execute(request); } finally { pIn.close(); } } }); // write the stream SubtaskProgressIndicator subprogress = new SubtaskProgressIndicator(progress); reporter = (IOReporter) super.execute(subprogress); } finally { executor.shutdown(); } try { Response response = futureResponse.get(); HttpResponse res = response.returnResponse(); int statusCode = res.getStatusLine().getStatusCode(); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); if (statusCode >= 200 && statusCode < 300) { // success reporter.setSuccess(reporter.isSuccess()); // construct summary from response try { Document responseDoc = parseResponse(res.getEntity()); // totalInserted String inserted = xpath.compile("//TransactionSummary/totalInserted").evaluate(responseDoc); // XXX totalUpdated // XXX totalReplaced // XXX totalDeleted reporter.setSummary("Inserted " + inserted + " features."); } catch (XPathExpressionException e) { log.error("Error in XPath used to evaluate service response"); } catch (ParserConfigurationException | SAXException e) { reporter.error(new IOMessageImpl(MessageFormat.format( "Server returned status code {0}, but could not parse server response", statusCode), e)); reporter.setSuccess(false); } } else { // failure reporter.error( new IOMessageImpl("Server reported failure with code " + res.getStatusLine().getStatusCode() + ": " + res.getStatusLine().getReasonPhrase(), null)); reporter.setSuccess(false); try { Document responseDoc = parseResponse(res.getEntity()); String errorText = xpath.compile("//ExceptionText/text()").evaluate(responseDoc); reporter.setSummary("Request failed: " + errorText); } catch (XPathExpressionException e) { log.error("Error in XPath used to evaluate service response"); } catch (ParserConfigurationException | SAXException e) { reporter.error(new IOMessageImpl("Could not parse server response", e)); reporter.setSuccess(false); } } } catch (ExecutionException | InterruptedException e) { reporter.error(new IOMessageImpl("Failed to execute WFS-T request", e)); reporter.setSuccess(false); } progress.end(); return reporter; }
From source file:MSUmpire.LCMSPeakStructure.LCMSPeakDIAMS2.java
private void PrepareMGF_MS1Cluster(LCMSPeakMS1 ms1lcms) throws IOException { ArrayList<PseudoMSMSProcessing> ScanList = new ArrayList<>(); ExecutorService executorPool = Executors.newFixedThreadPool(NoCPUs); for (PeakCluster ms1cluster : ms1lcms.PeakClusters) { final ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index); if (frags != null && DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz() && DIA_MZ_Range.getY() >= ms1cluster.TargetMz()) { // if (DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz() && DIA_MZ_Range.getY() >= ms1cluster.TargetMz() && FragmentsClu2Cur.containsKey(ms1cluster.Index)) { // ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index); ms1cluster.GroupedFragmentPeaks.addAll(frags); if (Last_MZ_Range == null || Last_MZ_Range.getY() < ms1cluster.TargetMz()) { PseudoMSMSProcessing mSMSProcessing = new PseudoMSMSProcessing(ms1cluster, parameter); ScanList.add(mSMSProcessing); }//from w w w.ja v a2 s. c o m } } for (PseudoMSMSProcessing proc : ScanList) { executorPool.execute(proc); } executorPool.shutdown(); try { executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Logger.getRootLogger().info("interrupted.."); } String mgffile = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ1Name() + ".mgf.temp"; String mgffile2 = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ2Name() + ".mgf.temp"; // FileWriter mapwriter = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q1", true); // FileWriter mapwriter2 = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q2", true); // FileWriter mgfWriter = new FileWriter(mgffile, true); // FileWriter mgfWriter2 = new FileWriter(mgffile2, true); final BufferedWriter mapwriter = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q1, FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q1"), mapwriter2 = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q2, FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q2"), mgfWriter = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q1, mgffile), mgfWriter2 = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q2, mgffile2); for (PseudoMSMSProcessing mSMSProcessing : ScanList) { if (MatchedFragmentMap.size() > 0) { mSMSProcessing.RemoveMatchedFrag(MatchedFragmentMap); } XYPointCollection Scan = mSMSProcessing.GetScan(); if (Scan != null && Scan.PointCount() > parameter.MinFrag) { // StringBuilder mgfString = new StringBuilder(); if (mSMSProcessing.Precursorcluster.IsotopeComplete(3)) { final BufferedWriter mgfString = mgfWriter; parentDIA.Q1Scan++; mgfString.append("BEGIN IONS\n"); mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz())) .append("\n"); mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)) .append("+\n"); mgfString.append("RTINSECONDS=") .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f)) .append("\n"); mgfString.append("TITLE=").append(GetQ1Name()).append(".") .append(String.valueOf(parentDIA.Q1Scan)).append(".") .append(String.valueOf(parentDIA.Q1Scan)).append(".") .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n"); for (int i = 0; i < Scan.PointCount(); i++) { mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ") .append(String.valueOf(Scan.Data.get(i).getY())).append("\n"); } mgfString.append("END IONS\n\n"); mapwriter.write(parentDIA.Q1Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n"); // mgfWriter.write(mgfString.toString()); //} else if (mSMSProcessing.Precursorcluster.IsotopeComplete(2)) { } else { final BufferedWriter mgfString = mgfWriter2; parentDIA.Q2Scan++; mgfString.append("BEGIN IONS\n"); mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz())) .append("\n"); mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)) .append("+\n"); mgfString.append("RTINSECONDS=") .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f)) .append("\n"); mgfString.append("TITLE=").append(GetQ2Name()).append(".") .append(String.valueOf(parentDIA.Q2Scan)).append(".") .append(String.valueOf(parentDIA.Q2Scan)).append(".") .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n"); for (int i = 0; i < Scan.PointCount(); i++) { mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ") .append(String.valueOf(Scan.Data.get(i).getY())).append("\n"); } mgfString.append("END IONS\n\n"); mapwriter2.write(parentDIA.Q2Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n"); // mgfWriter2.write(mgfString.toString()); } } mSMSProcessing.Precursorcluster.GroupedFragmentPeaks.clear(); } // mgfWriter2.close(); // mgfWriter.close(); // mapwriter.close(); // mapwriter2.close(); }
From source file:com.fluidops.iwb.HTMLProvider.HTMLProvider.java
/** * HINT: The gather(List<Statement> res) method collects the statements * extracted by the provider. Use the following guidelinges: * /*ww w .j av a2 s . co m*/ * 1.) Make sure to have a clear documentation, structure, and * modularization. Use helper methods wherever possible to increase * readability of the method. * * 2.) Whenever there is a need to create statements, use the helper methods * in {@link ProviderUtils}. This class helps you in generating "safe" URIs, * replacing invalid characters etc. It also offers common functionality for * filtering statements, e.g. removing statements containing null values. * * 3.) Re-use existing ontologies! The {@link Vocabulary} class provides a * mix of vocabulary from common ontologies and can be easily extended. You * should not define URIs inside the provider itself, except these URIs are * absolutely provider-specific. * * 4.) Concerning exception handling, it is best practice to throw * exceptions whenever the provider run cannot be finished in a regular way. * Since these exception will be propagated to the UI, it is recommended to * catch Exceptions locally first, log them, and wrap them into * (Runtime)Exceptions with a human-readable description. When logging * exceptions, the log level "warn" is appropriate. */ @Override public void gather(List<Statement> res) throws Exception { URL registryUrl = new URL(config.location); HttpURLConnection registryConnection = (HttpURLConnection) registryUrl.openConnection(); registryConnection.setRequestMethod("GET"); // ////////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////// STEP // 1 logger.info("Retrieving packages from CKAN..."); if (registryConnection.getResponseCode() != HttpURLConnection.HTTP_OK) { String msg = "Connection with the registry could not be established. (" + registryConnection.getResponseCode() + ", " + registryConnection.getResponseMessage() + ")"; logger.warn(msg); throw new RuntimeException(msg); // propagate to UI } String siteContent = GenUtil.readUrl(registryConnection.getInputStream()); JSONObject groupAsJson = null; JSONArray packageListJsonArray = null; try { groupAsJson = new JSONObject(new JSONTokener(siteContent)); packageListJsonArray = groupAsJson.getJSONArray("packages"); } catch (JSONException e) { String msg = "Returned content " + siteContent + " is not valid JSON. Check if the registry URL is valid."; logger.warn(msg); throw new RuntimeException(msg); // propagate to UI } logger.info("-> found " + packageListJsonArray.length() + " packages"); // ////////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////// STEP // 2 logger.info("Registering LOD catalog in metadata repository"); /** * HINT: the method createStatement allows to create statements if * subject, predicate and object are all known; use this method instead * of opening a value factory */ res.add(ProviderUtils.createStatement(CKANVocabulary.CKAN_CATALOG, RDF.TYPE, Vocabulary.DCAT.CATALOG)); res.add(ProviderUtils.createStatement(CKANVocabulary.CKAN_CATALOG, RDFS.LABEL, CKANVocabulary.CKAN_CATALOG_LABEL)); logger.info("-> done"); // ////////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////// STEP // 3 logger.info("Extracting metdata for the individual data sets listed in CKAN"); /** * HINT: Set up an Apache HTTP client with a manager for multiple * threads; as a general guideline, use parallelization whenever * crawling web sources! */ MultiThreadedHttpConnectionManager connectionManager = new MultiThreadedHttpConnectionManager(); HttpClient client = new HttpClient(connectionManager); ExecutorService pool = Executors.newFixedThreadPool(10); // we store the data in a temporary memory store, which allows us // to perform transformation on the result set Repository repository = null; RepositoryConnection connection = null; try { // initialize repository and connection repository = new SailRepository(new MemoryStore()); repository.initialize(); connection = repository.getConnection(); // Fire up a thread for every package logger.info("-> Fire up threads for the individual packages..."); for (int i = 0; i < packageListJsonArray.length(); i++) { // we use the JSON representation to get a base URI to resolve // relative // URIs in the XML later on. (and a fallback solution) String host = "http://www.ckan.net/package/" + packageListJsonArray.get(i).toString(); String baseUri = findBaseUri( "http://www.ckan.net/api/rest/package/" + packageListJsonArray.get(i).toString()); baseUri = (baseUri == null) ? host : baseUri; pool.execute(new MetadataReader(client, host, baseUri, CKANVocabulary.CKAN_CATALOG, connection)); } logger.info("-> Waiting for all tasks to complete (" + packageListJsonArray.length() + "tasks/data sources)..."); pool.shutdown(); pool.awaitTermination(4, TimeUnit.HOURS); /** * Now the extraction has finished, all statements are available in * our temporary repository. We apply some conversions and * transformations to align the extracted statements with our target * ontology. * * !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! * !!!!!!!!!!!!! !!! NOTE: this code is /NOT/ best practice, we * should eventually extend !!! !!! ProviderUtils to deal with at * least lightweight transformations !!! !!! (such as changing * property names) or realize such tasks using !!! !!! an integrated * mapping framework. !!! * !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! * !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! */ // Extraction from temporary repository, phase 1: logger.info( "-> Extract dcterms:title AS rdfs:label, dcterms:contributor AS dcterms:creator, and dcterms:rights AS dcterms:license"); String mappingQuery = mappingQuery(); GraphQuery mappingGraphQuery = connection.prepareGraphQuery(QueryLanguage.SPARQL, mappingQuery); GraphQueryResult result = mappingGraphQuery.evaluate(); logger.info("-> Appending extracted result to statement list"); ProviderUtils.appendGraphQueryResultToListAndClose(result, res); // Label the distribution nodes logger.info("-> Generate labels for distributions"); String labelDistributionQuery = labelDistributionQuery(); GraphQuery labelDistributionGraphQuery = connection.prepareGraphQuery(QueryLanguage.SPARQL, labelDistributionQuery); GraphQueryResult result2 = labelDistributionGraphQuery.evaluate(); logger.info("-> Appending extracted result to statement list"); ProviderUtils.appendGraphQueryResultToListAndClose(result2, res); // Extraction from temporary repository, phase 2: logger.info("-> Deleting previously extracted triples and additional, not required information..."); String deleteQuery = deleteQuery(); Update deleteGraphQuery = connection.prepareUpdate(QueryLanguage.SPARQL, deleteQuery); deleteGraphQuery.execute(); // Extraction from temporary repository, phase 3: logger.info("-> Deleting dcat:distribution and dcat:accessUrl information from" + "temp repository for which format information is missing..."); String cleanDistQuery = cleanDistQuery(); Update cleanupGraphQuery = connection.prepareUpdate(QueryLanguage.SPARQL, cleanDistQuery); cleanupGraphQuery.execute(); logger.info("-> Appending remaining statements to result..."); connection.getStatements(null, null, null, false).addTo(res); logger.info("Provider run finished successfully"); } catch (Exception e) { logger.warn(e.getMessage()); throw new RuntimeException(e); } finally { if (connection != null) connection.close(); if (repository != null) repository.shutDown(); } // in the end, make sure there are no statements containing null in // any of the position (did not take special care when creating // statements) logger.info("-> cleaning up null statements"); res = ProviderUtils.filterNullStatements(res); }
From source file:org.jactr.core.runtime.controller.OldController.java
/** * actually start the model//from w w w . j a v a2s. c om * * @param model * @param suspendOnStart */ final protected void startModel(final IModel model, boolean suspendOnStart) { try { _lock.lock(); /* * attach the model listener that lets us block at the start of a cycle.. */ model.addListener(getModelListener(), ExecutorServices.INLINE_EXECUTOR); /* * create the executor on which the model will run */ ExecutorService executor = createExecutorService(model); _executors.put(model, executor); /* * create the suspender that will control the model execution */ Suspender suspender = new Suspender(model); suspender.setShouldSuspend(suspendOnStart); _suspensionState.put(model, suspender); /* * now the actual runnable */ final Runnable modelRunner = createModelRunner(model, executor); Runnable actual = new Runnable() { public void run() { try { modelRunner.run(); } finally { destroyModelRunner(modelRunner, model); } } }; /* * and start the beatch up. */ executor.execute(actual); } finally { _lock.unlock(); } }
From source file:org.apache.metamodel.jdbc.JdbcDataContextTest.java
public void testReleaseConnectionsInCompiledQuery() throws Exception { final int connectionPoolSize = 2; final int threadCount = 4; final int noOfCallsPerThreads = 30; final BasicDataSource ds = new BasicDataSource(); ds.setDriverClassName("org.hsqldb.jdbcDriver"); ds.setUrl("jdbc:hsqldb:res:metamodel"); ds.setInitialSize(connectionPoolSize); ds.setMaxActive(connectionPoolSize); ds.setMaxWait(10000);/*from w w w. j a va 2s . c o m*/ ds.setMinEvictableIdleTimeMillis(1800000); ds.setMinIdle(0); ds.setMaxIdle(connectionPoolSize); ds.setNumTestsPerEvictionRun(3); ds.setTimeBetweenEvictionRunsMillis(-1); ds.setDefaultTransactionIsolation(java.sql.Connection.TRANSACTION_READ_COMMITTED); final JdbcDataContext dataContext = new JdbcDataContext(ds, new TableType[] { TableType.TABLE, TableType.VIEW }, null); final JdbcCompiledQuery compiledQuery = (JdbcCompiledQuery) dataContext.query().from("CUSTOMERS") .select("CUSTOMERNAME").where("CUSTOMERNUMBER").eq(new QueryParameter()).compile(); assertEquals(0, compiledQuery.getActiveLeases()); assertEquals(0, compiledQuery.getIdleLeases()); final String compliedQueryString = compiledQuery.toSql(); assertEquals( "SELECT _CUSTOMERS_._CUSTOMERNAME_ FROM PUBLIC._CUSTOMERS_ WHERE _CUSTOMERS_._CUSTOMERNUMBER_ = ?", compliedQueryString.replace('\"', '_')); assertEquals(0, compiledQuery.getActiveLeases()); assertEquals(0, compiledQuery.getIdleLeases()); ExecutorService executorService = Executors.newFixedThreadPool(threadCount); final CountDownLatch latch = new CountDownLatch(threadCount); final List<Throwable> errors = new ArrayList<Throwable>(); final Runnable runnable = new Runnable() { @Override public void run() { try { for (int i = 0; i < noOfCallsPerThreads; i++) { final DataSet dataSet = dataContext.executeQuery(compiledQuery, new Object[] { 103 }); try { assertTrue(dataSet.next()); Row row = dataSet.getRow(); assertNotNull(row); assertEquals("Atelier graphique", row.getValue(0).toString()); assertFalse(dataSet.next()); } finally { dataSet.close(); } } } catch (Throwable e) { errors.add(e); } finally { latch.countDown(); } } }; for (int i = 0; i < threadCount; i++) { executorService.execute(runnable); } try { latch.await(60000, TimeUnit.MILLISECONDS); if (errors.size() > 0) { throw new IllegalStateException(errors.get(0)); } assertTrue(true); } finally { executorService.shutdownNow(); } assertEquals(0, compiledQuery.getActiveLeases()); compiledQuery.close(); assertEquals(0, compiledQuery.getActiveLeases()); assertEquals(0, compiledQuery.getIdleLeases()); }
From source file:org.codehaus.mojo.nbm.CreateWebstartAppMojo.java
/** * * @throws org.apache.maven.plugin.MojoExecutionException * @throws org.apache.maven.plugin.MojoFailureException *//*from w ww .j a v a 2s . co m*/ @Override public void execute() throws MojoExecutionException, MojoFailureException { if ("none".equalsIgnoreCase(includeLocales)) { includeLocales = ""; } if (signingThreads < 1) { signingThreads = Runtime.getRuntime().availableProcessors(); } if ((signingMaximumThreads > 0) && (signingThreads > signingMaximumThreads)) { signingThreads = signingMaximumThreads; } getLog().info("Using " + signingThreads + " signing threads."); if (!"nbm-application".equals(project.getPackaging())) { throw new MojoExecutionException( "This goal only makes sense on project with nbm-application packaging."); } final Project antProject = antProject(); getLog().warn( "WARNING: Unsigned and self-signed WebStart applications are deprecated from JDK7u21 onwards. To ensure future correct functionality please use trusted certificate."); if (keystore != null && keystorealias != null && keystorepassword != null) { File ks = new File(keystore); if (!ks.exists()) { throw new MojoFailureException("Cannot find keystore file at " + ks.getAbsolutePath()); } else { //proceed.. } } else if (keystore != null || keystorepassword != null || keystorealias != null) { throw new MojoFailureException( "If you want to sign the jnlp application, you need to define all three keystore related parameters."); } else { File generatedKeystore = new File(outputDirectory, "generated.keystore"); if (!generatedKeystore.exists()) { getLog().warn("Keystore related parameters not set, generating a default keystore."); GenerateKey genTask = (GenerateKey) antProject.createTask("genkey"); genTask.setAlias("jnlp"); genTask.setStorepass("netbeans"); genTask.setDname("CN=" + System.getProperty("user.name")); genTask.setKeystore(generatedKeystore.getAbsolutePath()); genTask.execute(); } keystore = generatedKeystore.getAbsolutePath(); keystorepassword = "netbeans"; keystorealias = "jnlp"; } Taskdef taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(MakeJnlp2.class.getName()); taskdef.setName("makejnlp"); taskdef.execute(); taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(Jar.class.getName()); taskdef.setName("jar"); taskdef.execute(); taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(VerifyJNLP.class.getName()); taskdef.setName("verifyjnlp"); taskdef.execute(); // +p try { final File webstartBuildDir = new File( outputDirectory + File.separator + "webstart" + File.separator + brandingToken); if (webstartBuildDir.exists()) { FileUtils.deleteDirectory(webstartBuildDir); } webstartBuildDir.mkdirs(); // P: copy webappResources --[ MavenResourcesExecution mavenResourcesExecution = new MavenResourcesExecution(webappResources, webstartBuildDir, project, encoding, Collections.EMPTY_LIST, Collections.EMPTY_LIST, session); mavenResourcesExecution.setEscapeWindowsPaths(true); mavenResourcesFiltering.filterResources(mavenResourcesExecution); // ]-- final String localCodebase = codebase != null ? codebase : webstartBuildDir.toURI().toString(); getLog().info("Generating webstartable binaries at " + webstartBuildDir.getAbsolutePath()); final File nbmBuildDirFile = new File(outputDirectory, brandingToken); // +p (needs to be before make jnlp) //TODO is it really netbeans/ if (masterJnlpFileName == null) { masterJnlpFileName = brandingToken; } Properties props = new Properties(); props.setProperty("jnlp.codebase", localCodebase); props.setProperty("app.name", brandingToken); props.setProperty("app.title", project.getName()); if (project.getOrganization() != null) { props.setProperty("app.vendor", project.getOrganization().getName()); } else { props.setProperty("app.vendor", "Nobody"); } String description = project.getDescription() != null ? project.getDescription() : "No Project Description"; props.setProperty("app.description", description); props.setProperty("branding.token", brandingToken); props.setProperty("master.jnlp.file.name", masterJnlpFileName); props.setProperty("netbeans.jnlp.fixPolicy", "false"); StringBuilder stBuilder = new StringBuilder(); if (additionalArguments != null) { StringTokenizer st = new StringTokenizer(additionalArguments); while (st.hasMoreTokens()) { String arg = st.nextToken(); if (arg.startsWith("-J")) { if (stBuilder.length() > 0) { stBuilder.append(' '); } stBuilder.append(arg.substring(2)); } } } props.setProperty("netbeans.run.params", stBuilder.toString()); final File masterJnlp = new File(webstartBuildDir, masterJnlpFileName + ".jnlp"); filterCopy(masterJnlpFile, "master.jnlp", masterJnlp, props); if (generateJnlpTimestamp) // \/\/\/\/ bad bad bad \/\/\/\/ { final File masterJnlpFileTmp = File.createTempFile(masterJnlpFileName + "_", ""); Files.append(JnlpUtils.getCurrentJnlpTimestamp() + "\n", masterJnlpFileTmp, Charset.forName("UTF-8")); ByteSink sink = Files.asByteSink(masterJnlpFileTmp, FileWriteMode.APPEND); sink.write(Files.toByteArray(masterJnlp)); Files.copy(masterJnlpFileTmp, masterJnlp); } File startup = copyLauncher(outputDirectory, nbmBuildDirFile); String masterJnlpStr = FileUtils.fileRead(masterJnlp); // P: JNLP-INF/APPLICATION_TEMPLATE.JNLP support --[ // this can be done better and will // ashamed if (generateJnlpApplicationTemplate) { File jnlpInfDir = new File(outputDirectory, "JNLP-INF"); getLog().info("Generate JNLP application template under: " + jnlpInfDir); jnlpInfDir.mkdirs(); File jnlpTemplate = new File(jnlpInfDir, "APPLICATION_TEMPLATE.JNLP"); masterJnlpStr = masterJnlpStr.replaceAll("(<jnlp.*codebase\\ *=\\ *)\"((?!\").)*", "$1\"*") .replaceAll("(<jnlp.*href\\ *=\\ *)\"((?!\").)*", "$1\"*"); FileUtils.fileWrite(jnlpTemplate, masterJnlpStr); File startupMerged = new File(outputDirectory, "startup-jnlpinf.jar"); Jar jar = (Jar) antProject.createTask("jar"); jar.setDestFile(startupMerged); jar.setFilesetmanifest((FilesetManifestConfig) EnumeratedAttribute .getInstance(FilesetManifestConfig.class, "merge")); FileSet jnlpInfDirectoryFileSet = new FileSet(); jnlpInfDirectoryFileSet.setDir(outputDirectory); jnlpInfDirectoryFileSet.setIncludes("JNLP-INF/**"); jar.addFileset(jnlpInfDirectoryFileSet); ZipFileSet startupJar = new ZipFileSet(); startupJar.setSrc(startup); jar.addZipfileset(startupJar); jar.execute(); startup = startupMerged; getLog().info("APPLICATION_TEMPLATE.JNLP generated - startup.jar: " + startup); } final JarsConfig startupConfig = new JarsConfig(); ManifestEntries startupManifestEntries = new ManifestEntries(); startupConfig.setManifestEntries(startupManifestEntries); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); if (!validateJnlpDtd) { factory.setValidating(false); factory.setNamespaceAware(true); factory.setFeature("http://xml.org/sax/features/namespaces", false); factory.setFeature("http://xml.org/sax/features/validation", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); } DocumentBuilder builder = factory.newDocumentBuilder(); final BufferedReader masterJnlpStrReader = new BufferedReader(new StringReader(masterJnlpStr)); if (generateJnlpTimestamp) { masterJnlpStrReader.readLine(); } Document doc = builder.parse(new InputSource(masterJnlpStrReader)); Element jnlpRoot = doc.getDocumentElement(); jarCodebase = jnlpRoot.getAttribute("codebase"); if (jarCodebase.isEmpty()) { jarCodebase = "*"; } startupManifestEntries.setCodebase(jarCodebase); XPath xpath = XPathFactory.newInstance().newXPath(); Node jnlpSecurityPermission = (Node) xpath.evaluate( "(/jnlp/security/all-permissions | /jnlp/security/j2ee-application-client-permissions)[1]", doc, XPathConstants.NODE); if (jnlpSecurityPermission == null) { jarPermissions = "sandbox"; jnlpSecurity = ""; } else { jarPermissions = "all-permissions"; jnlpSecurity = "<security><" + jnlpSecurityPermission.getNodeName() + "/></security>"; } startupManifestEntries.setPermissions(jarPermissions); if (applicationName == null) { String jnlpApplicationTitle = (String) xpath.evaluate("/jnlp/information/title", doc, XPathConstants.STRING); applicationName = jnlpApplicationTitle == null ? brandingToken : jnlpApplicationTitle; } startupManifestEntries.setApplicationName(applicationName); // +p if (autoManifestSecurityEntries) { if (jarsConfigs == null) { jarsConfigs = new ArrayList<JarsConfig>(); } jarsConfigs.add(0, startupConfig); } final List<SignJar.JarsConfig> signJarJarsConfigs = buildSignJarJarsConfigs(jarsConfigs); File jnlpDestination = new File(webstartBuildDir.getAbsolutePath() + File.separator + "startup.jar"); SignJar signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); if (keystoretype != null) { signTask.setStoretype(keystoretype); } signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setUnsignFirst(signingRemoveExistingSignatures); signTask.setJarsConfigs(buildSignJarJarsConfigs(Collections.singletonList(startupConfig))); signTask.setBasedir(nbmBuildDirFile); signTask.setSignedjar(jnlpDestination); signTask.setJar(startup); signTask.setPack200(pack200); signTask.setPack200Effort(pack200Effort); signTask.execute(); // <-- all of this will be refactored soon ]-- // FileUtils.copyDirectoryStructureIfModified( nbmBuildDirFile, webstartBuildDir ); MakeJnlp2 jnlpTask = (MakeJnlp2) antProject.createTask("makejnlp"); jnlpTask.setOptimize(optimize); jnlpTask.setIncludelocales(includeLocales); jnlpTask.setDir(webstartBuildDir); jnlpTask.setCodebase(localCodebase); //TODO, how to figure verify excludes.. jnlpTask.setVerify(false); jnlpTask.setPermissions(jnlpSecurity); jnlpTask.setSignJars(true); jnlpTask.setAlias(keystorealias); jnlpTask.setKeystore(keystore); jnlpTask.setStorePass(keystorepassword); if (keystoretype != null) { jnlpTask.setStoreType(keystoretype); } jnlpTask.setSigningForce(signingForce); jnlpTask.setSigningTsaCert(signingTsaCert); jnlpTask.setSigningTsaUrl(signingTsaUrl); jnlpTask.setUnsignFirst(signingRemoveExistingSignatures); jnlpTask.setJarsConfigs(signJarJarsConfigs); jnlpTask.setSigningMaxMemory(signingMaxMemory); jnlpTask.setSigningRetryCount(signingRetryCount); jnlpTask.setBasedir(nbmBuildDirFile); jnlpTask.setNbThreads(signingThreads); jnlpTask.setProcessJarVersions(processJarVersions); jnlpTask.setPack200(pack200); jnlpTask.setPack200Effort(pack200Effort); FileSet fs = jnlpTask.createModules(); fs.setDir(nbmBuildDirFile); OrSelector or = new OrSelector(); AndSelector and = new AndSelector(); FilenameSelector inc = new FilenameSelector(); inc.setName("*/modules/**/*.jar"); or.addFilename(inc); inc = new FilenameSelector(); inc.setName("*/lib/**/*.jar"); or.addFilename(inc); inc = new FilenameSelector(); inc.setName("*/core/**/*.jar"); or.addFilename(inc); ModuleSelector ms = new ModuleSelector(); Parameter included = new Parameter(); included.setName("includeClusters"); included.setValue(""); Parameter excluded = new Parameter(); excluded.setName("excludeClusters"); excluded.setValue(""); Parameter exModules = new Parameter(); exModules.setName("excludeModules"); exModules.setValue(""); ms.setParameters(new Parameter[] { included, excluded, exModules }); and.add(or); and.add(ms); fs.addAnd(and); jnlpTask.execute(); Set<String> locales = jnlpTask.getExecutedLocales(); String extSnippet = generateExtensions(fs, antProject, ""); // "netbeans/" //branding DirectoryScanner ds = new DirectoryScanner(); ds.setBasedir(nbmBuildDirFile); final List<String> localeIncludes = new ArrayList<String>(); final List<String> localeExcludes = new ArrayList<String>(); localeIncludes.add("**/locale/*.jar"); if (includeLocales != null) { List<String> excludes = Splitter.on(',').trimResults().omitEmptyStrings() .splitToList(includeLocales); for (String exclude : (Collection<String>) CollectionUtils.subtract(locales, excludes)) { localeExcludes.add("**/locale/*_" + exclude + ".jar"); } } ds.setIncludes(localeIncludes.toArray(new String[localeIncludes.size()])); ds.setExcludes(localeExcludes.toArray(new String[localeExcludes.size()])); ds.scan(); String[] includes = ds.getIncludedFiles(); StringBuilder brandRefs = new StringBuilder( "<property name=\"jnlp.packEnabled\" value=\"" + String.valueOf(pack200) + "\"/>\n"); if (includes != null && includes.length > 0) { final File brandingDir = new File(webstartBuildDir, "branding"); brandingDir.mkdirs(); for (String incBran : includes) { File source = new File(nbmBuildDirFile, incBran); File dest = new File(brandingDir, source.getName()); brandRefs.append(" <jar href=\'branding/").append(dest.getName()).append("\'/>\n"); } final ExecutorService executorService = Executors.newFixedThreadPool(signingThreads); final List<Exception> threadException = new ArrayList<Exception>(); for (final String toSign : includes) { executorService.execute(new Runnable() { @Override public void run() { try { File toSignFile = new File(nbmBuildDirFile, toSign); SignJar signTask = (SignJar) antProject.createTask("signjar"); if (keystoretype != null) { signTask.setStoretype(keystoretype); } signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setUnsignFirst(signingRemoveExistingSignatures); signTask.setJarsConfigs(signJarJarsConfigs); signTask.setJar(toSignFile); signTask.setDestDir(brandingDir); signTask.setBasedir(nbmBuildDirFile); signTask.setDestFlatten(true); signTask.setPack200(pack200); signTask.setPack200Effort(pack200Effort); signTask.execute(); } catch (Exception e) { threadException.add(e); } } }); } executorService.shutdown(); executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); if (!threadException.isEmpty()) { throw threadException.get(0); } } File modulesJnlp = new File(webstartBuildDir.getAbsolutePath() + File.separator + "modules.jnlp"); props.setProperty("jnlp.branding.jars", brandRefs.toString()); props.setProperty("jnlp.resources", extSnippet); filterCopy(null, /* filename is historical */"branding.jnlp", modulesJnlp, props); if (verifyJnlp) { getLog().info("Verifying generated webstartable content."); VerifyJNLP verifyTask = (VerifyJNLP) antProject.createTask("verifyjnlp"); FileSet verify = new FileSet(); verify.setFile(masterJnlp); verifyTask.addConfiguredFileset(verify); verifyTask.execute(); } // create zip archive if (destinationFile.exists()) { destinationFile.delete(); } ZipArchiver archiver = new ZipArchiver(); if (codebase != null) { getLog().warn("Defining <codebase>/${nbm.webstart.codebase} is generally unnecessary"); archiver.addDirectory(webstartBuildDir); } else { archiver.addDirectory(webstartBuildDir, null, new String[] { "**/*.jnlp" }); for (final File jnlp : webstartBuildDir.listFiles()) { if (!jnlp.getName().endsWith(".jnlp")) { continue; } archiver.addResource(new PlexusIoResource() { public @Override InputStream getContents() throws IOException { return new ByteArrayInputStream(FileUtils.fileRead(jnlp, "UTF-8") .replace(localCodebase, "$$codebase").getBytes("UTF-8")); } public @Override long getLastModified() { return jnlp.lastModified(); } public @Override boolean isExisting() { return true; } public @Override long getSize() { return UNKNOWN_RESOURCE_SIZE; } public @Override URL getURL() throws IOException { return null; } public @Override String getName() { return jnlp.getAbsolutePath(); } public @Override boolean isFile() { return true; } public @Override boolean isDirectory() { return false; } }, jnlp.getName(), archiver.getDefaultFileMode()); } } File jdkhome = new File(System.getProperty("java.home")); File servlet = new File(jdkhome, "sample/jnlp/servlet/jnlp-servlet.jar"); if (!servlet.exists()) { servlet = new File(jdkhome.getParentFile(), "sample/jnlp/servlet/jnlp-servlet.jar"); if (!servlet.exists()) { servlet = File.createTempFile("nbm_", "jnlp-servlet.jar"); FileUtils.copyURLToFile( Thread.currentThread().getContextClassLoader().getResource("jnlp-servlet.jar"), servlet); } } if (servlet.exists()) { File servletDir = new File(webstartBuildDir, "WEB-INF/lib"); servletDir.mkdirs(); signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setJar(servlet); signTask.setSignedjar(new File(servletDir, "jnlp-servlet.jar")); signTask.execute(); //archiver.addFile( servlet, "WEB-INF/lib/jnlp-servlet.jar" ); archiver.addResource(new PlexusIoResource() { public @Override InputStream getContents() throws IOException { return new ByteArrayInputStream(("" + "<web-app>\n" + " <servlet>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <servlet-class>jnlp.sample.servlet.JnlpDownloadServlet</servlet-class>\n" + " </servlet>\n" + " <servlet-mapping>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <url-pattern>*.jnlp</url-pattern>\n" + " </servlet-mapping>\n" + " <servlet-mapping>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <url-pattern>*.jar</url-pattern>\n" + " </servlet-mapping>\n" + " <mime-mapping>\n" + " <extension>jnlp</extension>\n" + " <mime-type>application/x-java-jnlp-file</mime-type>\n" + " </mime-mapping>\n" + "</web-app>\n").getBytes()); } public @Override long getLastModified() { return UNKNOWN_MODIFICATION_DATE; } public @Override boolean isExisting() { return true; } public @Override long getSize() { return UNKNOWN_RESOURCE_SIZE; } public @Override URL getURL() throws IOException { return null; } public @Override String getName() { return "web.xml"; } public @Override boolean isFile() { return true; } public @Override boolean isDirectory() { return false; } }, "WEB-INF/web.xml", archiver.getDefaultFileMode()); } archiver.setDestFile(destinationFile); archiver.createArchive(); if (signWar) { signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setJar(destinationFile); signTask.execute(); } // attach standalone so that it gets installed/deployed projectHelper.attachArtifact(project, "war", webstartClassifier, destinationFile); } catch (Exception ex) { throw new MojoExecutionException("Error creating webstartable binary.", ex); } }
From source file:org.netbeans.nbbuild.MakeJnlp2.java
private void generateFiles() throws IOException, BuildException { final Set<String> declaredLocales = new HashSet<String>(); final boolean useAllLocales; if ("*".equals(includelocales)) { useAllLocales = true;/* w w w.j a v a 2 s . c o m*/ } else if ("".equals(includelocales)) { useAllLocales = false; } else { useAllLocales = false; StringTokenizer tokenizer = new StringTokenizer(includelocales, ","); while (tokenizer.hasMoreElements()) { declaredLocales.add(tokenizer.nextToken()); } } final Set<String> indirectFilePaths = new HashSet<String>(); for (FileSet fs : new FileSet[] { indirectJars, indirectFiles }) { if (fs != null) { DirectoryScanner scan = fs.getDirectoryScanner(getProject()); for (String f : scan.getIncludedFiles()) { indirectFilePaths.add(f.replace(File.pathSeparatorChar, '/')); } } } final ExecutorService executorService = Executors.newFixedThreadPool(nbThreads); final List<BuildException> exceptions = new ArrayList<BuildException>(); for (final Iterator fileIt = files.iterator(); fileIt.hasNext();) { if (!exceptions.isEmpty()) { break; } final FileResource fr = (FileResource) fileIt.next(); final File jar = fr.getFile(); if (!jar.canRead()) { throw new BuildException("Cannot read file: " + jar); } // if (optimize && checkDuplicate(jar).isPresent()) { continue; } // executorService.execute(new Runnable() { @Override public void run() { JarFile theJar = null; try { theJar = new JarFile(jar); String codenamebase = JarWithModuleAttributes .extractCodeName(theJar.getManifest().getMainAttributes()); if (codenamebase == null) { throw new BuildException("Not a NetBeans Module: " + jar); } { int slash = codenamebase.indexOf('/'); if (slash >= 0) { codenamebase = codenamebase.substring(0, slash); } } String dashcnb = codenamebase.replace('.', '-'); String title; String oneline; String shrt; String osDep = null; { String bundle = theJar.getManifest().getMainAttributes() .getValue("OpenIDE-Module-Localizing-Bundle"); Properties prop = new Properties(); if (bundle != null) { ZipEntry en = theJar.getEntry(bundle); if (en == null) { throw new BuildException("Cannot find entry: " + bundle + " in file: " + jar); } InputStream is = theJar.getInputStream(en); prop.load(is); is.close(); } title = prop.getProperty("OpenIDE-Module-Name", codenamebase); oneline = prop.getProperty("OpenIDE-Module-Short-Description", title); shrt = prop.getProperty("OpenIDE-Module-Long-Description", oneline); } { String osMan = theJar.getManifest().getMainAttributes() .getValue("OpenIDE-Module-Requires"); if (osMan != null) { if (osMan.indexOf("org.openide.modules.os.MacOSX") >= 0) { // NOI18N osDep = "Mac OS X"; // NOI18N } else if (osMan.indexOf("org.openide.modules.os.Linux") >= 0) { // NOI18N osDep = "Linux"; // NOI18N } else if (osMan.indexOf("org.openide.modules.os.Solaris") >= 0) { // NOI18N osDep = "Solaris"; // NOI18N } else if (osMan.indexOf("org.openide.modules.os.Windows") >= 0) { // NOI18N osDep = "Windows"; // NOI18N } } } Map<String, List<File>> localizedFiles = verifyExtensions(jar, theJar.getManifest(), dashcnb, codenamebase, verify, indirectFilePaths); executedLocales = localizedFiles.keySet(); new File(targetFile, dashcnb).mkdir(); File signed = new File(new File(targetFile, dashcnb), jar.getName()); // +p final JarConfigResolved jarConfig = signOrCopy(jar, signed); File jnlp = new File(targetFile, dashcnb + ".jnlp"); StringWriter writeJNLP = new StringWriter(); writeJNLP.write("<?xml version='1.0' encoding='UTF-8'?>\n"); writeJNLP.write( "<!DOCTYPE jnlp PUBLIC \"-//Sun Microsystems, Inc//DTD JNLP Descriptor 6.0//EN\" \"http://java.sun.com/dtd/JNLP-6.0.dtd\">\n"); writeJNLP.write("<jnlp spec='1.0+' codebase='" + codebase + "'>\n"); writeJNLP.write(" <information>\n"); writeJNLP.write(" <title>" + XMLUtil.toElementContent(title) + "</title>\n"); writeJNLP.write(" <vendor>NetBeans</vendor>\n"); writeJNLP.write(" <description kind='one-line'>" + XMLUtil.toElementContent(oneline) + "</description>\n"); writeJNLP.write(" <description kind='short'>" + XMLUtil.toElementContent(shrt) + "</description>\n"); writeJNLP.write(" </information>\n"); String realPermissions = permissions; if ((jarConfig != null) && (jarConfig.getExtraManifestAttributes() != null)) { String jarPermissions = jarConfig.getExtraManifestAttributes().getValue("Permissions"); if (jarPermissions != null) { if ("all-permissions".equals(jarPermissions)) { realPermissions = "<security><all-permissions/></security>\n"; } else { realPermissions = ""; } } } writeJNLP.write(realPermissions); if (osDep == null) { writeJNLP.write(" <resources>\n"); } else { writeJNLP.write(" <resources os='" + osDep + "'>\n"); } writeJNLP.write("<property name=\"jnlp.packEnabled\" value=\"" + String.valueOf(pack200) + "\"/>\n"); writeJNLP.write(constructJarHref(jar, dashcnb)); processExtensions(jar, theJar.getManifest(), writeJNLP, dashcnb, codebase, realPermissions); processIndirectJars(writeJNLP, dashcnb); processIndirectFiles(writeJNLP, dashcnb); writeJNLP.write(" </resources>\n"); if (useAllLocales || !declaredLocales.isEmpty()) { // write down locales for (Map.Entry<String, List<File>> e : localizedFiles.entrySet()) { final String locale = e.getKey(); if (!declaredLocales.isEmpty() && !declaredLocales.contains(locale)) { continue; } final List<File> allFiles = e.getValue(); writeJNLP.write(" <resources locale='" + locale + "'>\n"); for (File n : allFiles) { log("generating locale " + locale + " for " + n, Project.MSG_VERBOSE); String name = n.getName(); String clusterRootPrefix = jar.getParent() + File.separatorChar; String absname = n.getAbsolutePath(); if (absname.startsWith(clusterRootPrefix)) { name = absname.substring(clusterRootPrefix.length()) .replace(File.separatorChar, '-'); } File t = new File(new File(targetFile, dashcnb), name); signOrCopy(n, t); writeJNLP.write(constructJarHref(n, dashcnb, name)); } writeJNLP.write(" </resources>\n"); } } writeJNLP.write(" <component-desc/>\n"); writeJNLP.write("</jnlp>\n"); writeJNLP.close(); // +p Files.write(writeJNLP.toString(), jnlp, Charset.forName("UTF-8")); } catch (Exception e) { exceptions.add(new BuildException(e)); } finally { if (theJar != null) { try { theJar.close(); } catch (IOException e) { } } } } }); } executorService.shutdown(); try { executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (Exception e) { throw new BuildException(e); } if (!exceptions.isEmpty()) { throw exceptions.get(0); } }
From source file:com.jayway.maven.plugins.android.AbstractAndroidMojo.java
/** * Performs the callback action on the devices determined by * {@link #shouldDoWithThisDevice(com.android.ddmlib.IDevice)} * * @param deviceCallback the action to perform on each device * @throws org.apache.maven.plugin.MojoExecutionException * in case there is a problem//from www . ja va2 s. c o m * @throws org.apache.maven.plugin.MojoFailureException * in case there is a problem */ protected void doWithDevices(final DeviceCallback deviceCallback) throws MojoExecutionException, MojoFailureException { final AndroidDebugBridge androidDebugBridge = initAndroidDebugBridge(); if (!androidDebugBridge.isConnected()) { throw new MojoExecutionException("Android Debug Bridge is not connected."); } waitForInitialDeviceList(androidDebugBridge); List<IDevice> devices = Arrays.asList(androidDebugBridge.getDevices()); int numberOfDevices = devices.size(); getLog().debug("Found " + numberOfDevices + " devices connected with the Android Debug Bridge"); if (devices.size() == 0) { throw new MojoExecutionException("No online devices attached."); } int threadCount = getDeviceThreads(); if (getDeviceThreads() == 0) { getLog().info("android.devicesThreads parameter not set, using a thread for each attached device"); threadCount = numberOfDevices; } else { getLog().info("android.devicesThreads parameter set to " + getDeviceThreads()); } boolean shouldRunOnAllDevices = getDevices().size() == 0; if (shouldRunOnAllDevices) { getLog().info("android.devices parameter not set, using all attached devices"); } else { getLog().info("android.devices parameter set to " + getDevices().toString()); } ArrayList<DoThread> doThreads = new ArrayList<DoThread>(); ExecutorService executor = Executors.newFixedThreadPool(threadCount); for (final IDevice idevice : devices) { if (shouldRunOnAllDevices) { String deviceType = idevice.isEmulator() ? "Emulator " : "Device "; getLog().info(deviceType + DeviceHelper.getDescriptiveName(idevice) + " found."); } if (shouldRunOnAllDevices || shouldDoWithThisDevice(idevice)) { DoThread deviceDoThread = new DoThread() { public void runDo() throws MojoFailureException, MojoExecutionException { deviceCallback.doWithDevice(idevice); } }; doThreads.add(deviceDoThread); executor.execute(deviceDoThread); } } executor.shutdown(); while (!executor.isTerminated()) { // waiting for threads finish } throwAnyDoThreadErrors(doThreads); if (!shouldRunOnAllDevices && doThreads.isEmpty()) { throw new MojoExecutionException("No device found for android.device=" + getDevices().toString()); } }