List of usage examples for java.util.concurrent Executors newSingleThreadExecutor
public static ExecutorService newSingleThreadExecutor(ThreadFactory threadFactory)
From source file:de.fu_berlin.inf.dpp.net.internal.StreamServiceManager.java
protected void startThreads() { sender = new PacketSender(); Utils.runSafeAsync("StreamServiceManagers-senderThread", log, sender); receiver = new PacketReceiver(); Utils.runSafeAsync("StreamServiceManagers-receiverThread", log, receiver); stopSessionExecutor = Executors.newScheduledThreadPool(5, new NamedThreadFactory("StreamSessionStopper-")); sessionDispatcher = Executors.newSingleThreadExecutor(new NamedThreadFactory("StreamSessionDispatcher-")); negotiatesToUser = Executors//from ww w. j a v a 2 s. com .newSingleThreadExecutor(new NamedThreadFactory("StreamSessionNegotiationUser-")); negotiations = Executors.newFixedThreadPool(5, new NamedThreadFactory("StreamSessionNegotiation-")); }
From source file:gov.va.isaac.mojos.profileSync.ProfilesMojoBase.java
protected String getPassword() throws MojoExecutionException { if (password == null) { password = System.getProperty(PROFILE_SYNC_PASSWORD_PROPERTY); //still blank, try the passed in param if (StringUtils.isBlank(password)) { password = profileSyncPassword; }/* ww w . j a v a 2s. com*/ //still no password, prompt if allowed if (StringUtils.isBlank(password) && !Boolean.getBoolean(PROFILE_SYNC_NO_PROMPTS)) { Callable<Void> callable = new Callable<Void>() { @Override public Void call() throws Exception { try { if (!disableHintGiven) { System.out.println("To disable remote sync during build, add '-D" + PROFILE_SYNC_DISABLE + "=true' to your maven command"); disableHintGiven = true; } System.out.println("Enter the " + config_.getChangeSetUrlType().name() + " password for the Profiles/Changset remote store: (" + config_.getChangeSetUrl() + "):"); //Use console if available, for password masking Console console = System.console(); if (console != null) { password = new String(console.readPassword()); } else { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); password = br.readLine(); } } catch (IOException e) { throw new MojoExecutionException("Error reading password from console"); } return null; } }; try { Executors.newSingleThreadExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r, "User Password Prompt Thread"); t.setDaemon(true); return t; } }).submit(callable).get(2, TimeUnit.MINUTES); } catch (TimeoutException | InterruptedException e) { throw new MojoExecutionException("Password not provided within timeout"); } catch (ExecutionException ee) { throw (ee.getCause() instanceof MojoExecutionException ? (MojoExecutionException) ee.getCause() : new MojoExecutionException("Unexpected", ee.getCause())); } } } return password; }
From source file:com.dtolabs.rundeck.core.execution.impl.jsch.JschNodeExecutor.java
public NodeExecutorResult executeCommand(final ExecutionContext context, final String[] command, final INodeEntry node) { if (null == node.getHostname() || null == node.extractHostname()) { return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure, "Hostname must be set to connect to remote node '" + node.getNodename() + "'", node); }// www . j av a 2 s . co m final ExecutionListener listener = context.getExecutionListener(); final Project project = new Project(); AntSupport.addAntBuildListener(listener, project); boolean success = false; final ExtSSHExec sshexec; //perform jsch sssh command final NodeSSHConnectionInfo nodeAuthentication = new NodeSSHConnectionInfo(node, framework, context); final int timeout = nodeAuthentication.getSSHTimeout(); try { sshexec = SSHTaskBuilder.build(node, command, project, context.getDataContext(), nodeAuthentication, context.getLoglevel(), listener); } catch (SSHTaskBuilder.BuilderException e) { return NodeExecutorResultImpl.createFailure(StepFailureReason.ConfigurationFailure, e.getMessage(), node); } //Sudo support final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() { public Thread newThread(Runnable r) { return new Thread(null, r, "SudoResponder " + node.getNodename() + ": " + System.currentTimeMillis()); } }); final Future<ResponderTask.ResponderResult> responderFuture; final SudoResponder sudoResponder = SudoResponder.create(node, framework, context); Runnable responderCleanup = null; if (sudoResponder.isSudoEnabled() && sudoResponder.matchesCommandPattern(command[0])) { final DisconnectResultHandler resultHandler = new DisconnectResultHandler(); //configure two piped i/o stream pairs, to connect to the input/output of the SSH connection final PipedInputStream responderInput = new PipedInputStream(); final PipedOutputStream responderOutput = new PipedOutputStream(); final PipedInputStream jschInput = new PipedInputStream(); //lead pipe allows connected inputstream to close and not hang the writer to this stream final PipedOutputStream jschOutput = new LeadPipeOutputStream(); try { responderInput.connect(jschOutput); jschInput.connect(responderOutput); } catch (IOException e) { return NodeExecutorResultImpl.createFailure(StepFailureReason.IOFailure, e.getMessage(), node); } //first sudo prompt responder ResponderTask responder = new ResponderTask(sudoResponder, responderInput, responderOutput, resultHandler); /** * Callable will be executed by the ExecutorService */ final Callable<ResponderTask.ResponderResult> responderResultCallable; //if 2nd responder final SudoResponder sudoResponder2 = SudoResponder.create(node, framework, context, SUDO2_OPT_PREFIX, DEFAULT_SUDO2_PASSWORD_OPTION, DEFAULT_SUDO2_COMMAND_PATTERN); if (sudoResponder2.isSudoEnabled() && sudoResponder2.matchesCommandPattern(CLIUtils.generateArgline(null, command, false))) { logger.debug("Enable second sudo responder"); sudoResponder2.setDescription("Second " + SudoResponder.DEFAULT_DESCRIPTION); sudoResponder.setDescription("First " + SudoResponder.DEFAULT_DESCRIPTION); //sequence of the first then the second sudo responder responderResultCallable = responder.createSequence(sudoResponder2); } else { responderResultCallable = responder; } //set up SSH execution sshexec.setAllocatePty(true); sshexec.setInputStream(jschInput); sshexec.setSecondaryStream(jschOutput); sshexec.setDisconnectHolder(resultHandler); responderFuture = executor.submit(responderResultCallable); //close streams after responder is finished responderCleanup = new Runnable() { public void run() { logger.debug("SudoResponder shutting down..."); try { responderInput.close(); } catch (IOException e) { e.printStackTrace(); } try { responderOutput.flush(); responderOutput.close(); } catch (IOException e) { e.printStackTrace(); } //executor pool shutdown executor.shutdownNow(); } }; executor.submit(responderCleanup); } else { responderFuture = null; } if (null != context.getExecutionListener()) { context.getExecutionListener().log(3, "Starting SSH Connection: " + nodeAuthentication.getUsername() + "@" + node.getHostname() + " (" + node.getNodename() + ")"); } String errormsg = null; FailureReason failureReason = null; try { sshexec.execute(); success = true; } catch (BuildException e) { final ExtractFailure extractJschFailure = extractFailure(e, node, timeout, framework); errormsg = extractJschFailure.getErrormsg(); failureReason = extractJschFailure.getReason(); context.getExecutionListener().log(0, errormsg); } if (null != responderCleanup) { responderCleanup.run(); } shutdownAndAwaitTermination(executor); if (null != responderFuture) { try { logger.debug("Waiting 5 seconds for responder future result"); final ResponderTask.ResponderResult result = responderFuture.get(5, TimeUnit.SECONDS); logger.debug("Responder result: " + result); if (!result.isSuccess() && !result.isInterrupted()) { context.getExecutionListener().log(0, result.getResponder().toString() + " failed: " + result.getFailureReason()); } } catch (InterruptedException e) { //ignore } catch (java.util.concurrent.ExecutionException e) { e.printStackTrace(); } catch (TimeoutException e) { //ignore } } final int resultCode = sshexec.getExitStatus(); if (success) { return NodeExecutorResultImpl.createSuccess(node); } else { return NodeExecutorResultImpl.createFailure(failureReason, errormsg, node, resultCode); } }
From source file:fr.efl.chaine.xslt.GauloisPipe.java
/** * Execute the specified templates on the specified files to the specified * output directory on the specified number of threads. * * @param templates the specified templates * @param inputs the specified input files * @param outputDirectory the specified output directory * @param nbThreads the specified number of thread * @param processor the processor/* ww w . ja va2 s . c om*/ * @param listener The listener to start, if not null * @return <tt>false</tt> if an error occurs while processing. */ private boolean executesPipeOnMultiThread(final Pipe pipe, List<ParametrableFile> inputs, int nbThreads, Listener listener) { ExecutorService service = (nbThreads == 1) ? Executors.newSingleThreadExecutor(getThreadFactory()) : Executors.newFixedThreadPool(nbThreads, getThreadFactory()); // a try to solve multi-thread compiling problem... // that's a pretty dirty hack, but just a try, to test... if (xslCache.isEmpty() && !inputs.isEmpty()) { // in the opposite case, there is only a listener, and probably the first // file will be proccess alone... try { XsltTransformer transformer = buildTransformer(pipe, inputs.get(0).getFile(), inputs.get(0).getFile().toURI().toURL().toExternalForm(), ParametersMerger.merge(inputs.get(0).getParameters(), config.getParams()), messageListener, null); } catch (IOException | InvalidSyntaxException | URISyntaxException | SaxonApiException ex) { String msg = "while pre-compiling for a multi-thread use..."; LOGGER.error(msg); errors.add(new GauloisRunException(msg, ex)); } } for (ParametrableFile pf : inputs) { final ParametrableFile fpf = pf; Runnable r = new Runnable() { @Override public void run() { try { execute(pipe, fpf, messageListener); } catch (SaxonApiException | IOException | InvalidSyntaxException | URISyntaxException ex) { String msg = "[" + instanceName + "] while processing " + fpf.getFile().getName(); LOGGER.error(msg, ex); errors.add(new GauloisRunException(msg, fpf.getFile())); } } }; service.execute(r); } if (listener == null) { // on ajoute plus rien service.shutdown(); try { service.awaitTermination(5, TimeUnit.HOURS); return true; } catch (InterruptedException ex) { LOGGER.error("[" + instanceName + "] multi-thread processing interrupted, 5 hour limit exceed."); return false; } } else { ExecutionContext context = new ExecutionContext(this, pipe, messageListener, service); final HttpListener httpListener = new HttpListener(listener.getPort(), listener.getStopKeyword(), context); Runnable runner = new Runnable() { @Override public void run() { httpListener.run(); } }; new Thread(runner).start(); return true; } }
From source file:com.all.dht.database.StorageDaemon.java
public void start(MojitoDHT dht) { this.dht = dht; this.storableExecutor = Executors.newFixedThreadPool(dhtSettings.getMaxConcurrentStores(), new IncrementalNamedThreadFactory("DhtStorablePool")); this.storableQueue = new LinkedBlockingQueue<AllStorable>(); this.failedStores = new HashMap<KUID, AtomicInteger>(); this.process = Executors.newSingleThreadExecutor(new IncrementalNamedThreadFactory("StorageDaemonThread")); this.process.execute(this); }
From source file:com.streamsets.pipeline.stage.origin.jdbc.cdc.oracle.OracleCDCSource.java
@Override public List<ConfigIssue> init() { List<ConfigIssue> issues = super.init(); errorRecordHandler = new DefaultErrorRecordHandler(getContext()); useLocalBuffering = !getContext().isPreview() && configBean.bufferLocally; if (!hikariConfigBean.driverClassName.isEmpty()) { try {/*from w w w.j a v a 2 s . c o m*/ Class.forName(hikariConfigBean.driverClassName); } catch (ClassNotFoundException e) { LOG.error("Hikari Driver class not found.", e); issues.add(getContext().createConfigIssue(Groups.LEGACY.name(), DRIVER_CLASSNAME, JdbcErrors.JDBC_28, e.toString())); } } issues = hikariConfigBean.validateConfigs(getContext(), issues); if (connection == null) { // For tests, we set a mock connection try { dataSource = jdbcUtil.createDataSourceForRead(hikariConfigBean); connection = dataSource.getConnection(); connection.setAutoCommit(false); } catch (StageException | SQLException e) { LOG.error("Error while connecting to DB", e); issues.add( getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00, e.toString())); return issues; } } recordQueue = new LinkedBlockingQueue<>(2 * configBean.baseConfigBean.maxBatchSize); String container = configBean.pdb; List<SchemaAndTable> schemasAndTables; try { initializeStatements(); alterSession(); } catch (SQLException ex) { LOG.error("Error while creating statement", ex); issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00, hikariConfigBean.getConnectionString())); } zoneId = ZoneId.of(configBean.dbTimeZone); dateTimeColumnHandler = new DateTimeColumnHandler(zoneId); String commitScnField; BigDecimal scn = null; try { scn = getEndingSCN(); switch (configBean.startValue) { case SCN: if (new BigDecimal(configBean.startSCN).compareTo(scn) > 0) { issues.add(getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startSCN", JDBC_47, scn.toPlainString())); } break; case LATEST: // If LATEST is used, use now() as the startDate and proceed as if a startDate was specified configBean.startDate = nowAtDBTz().format(dateTimeColumnHandler.dateFormatter); // fall-through case DATE: try { LocalDateTime startDate = dateTimeColumnHandler.getDate(configBean.startDate); if (startDate.isAfter(nowAtDBTz())) { issues.add(getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startDate", JDBC_48)); } } catch (DateTimeParseException ex) { LOG.error("Invalid date", ex); issues.add( getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startDate", JDBC_49)); } break; default: throw new IllegalStateException("Unknown start value!"); } } catch (SQLException ex) { LOG.error("Error while getting SCN", ex); issues.add(getContext().createConfigIssue(CREDENTIALS.name(), USERNAME, JDBC_42)); } try (Statement reusedStatement = connection.createStatement()) { int majorVersion = getDBVersion(issues); // If version is 12+, then the check for table presence must be done in an alternate container! if (majorVersion == -1) { return issues; } if (majorVersion >= 12) { if (!StringUtils.isEmpty(container)) { String switchToPdb = "ALTER SESSION SET CONTAINER = " + configBean.pdb; try { reusedStatement.execute(switchToPdb); } catch (SQLException ex) { LOG.error("Error while switching to container: " + container, ex); issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_40, container)); return issues; } containerized = true; } } schemasAndTables = new ArrayList<>(); for (SchemaTableConfigBean tables : configBean.baseConfigBean.schemaTableConfigs) { tables.schema = configBean.baseConfigBean.caseSensitive ? tables.schema : tables.schema.toUpperCase(); tables.table = configBean.baseConfigBean.caseSensitive ? tables.table : tables.table.toUpperCase(); if (tables.excludePattern != null) { tables.excludePattern = configBean.baseConfigBean.caseSensitive ? tables.excludePattern : tables.excludePattern.toUpperCase(); } Pattern p = StringUtils.isEmpty(tables.excludePattern) ? null : Pattern.compile(tables.excludePattern); try (ResultSet rs = jdbcUtil.getTableAndViewMetadata(connection, tables.schema, tables.table)) { while (rs.next()) { String schemaName = rs.getString(TABLE_METADATA_TABLE_SCHEMA_CONSTANT); String tableName = rs.getString(TABLE_METADATA_TABLE_NAME_CONSTANT); if (p == null || !p.matcher(tableName).matches()) { schemaName = schemaName.trim(); tableName = tableName.trim(); schemasAndTables.add(new SchemaAndTable(schemaName, tableName)); } } } } validateTablePresence(reusedStatement, schemasAndTables, issues); if (!issues.isEmpty()) { return issues; } for (SchemaAndTable schemaAndTable : schemasAndTables) { try { tableSchemas.put(schemaAndTable, getTableSchema(schemaAndTable)); if (scn != null) { tableSchemaLastUpdate.put(schemaAndTable, scn); } } catch (SQLException ex) { LOG.error("Error while switching to container: " + container, ex); issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_50)); } } container = CDB_ROOT; if (majorVersion >= 12) { try { switchContainer.execute(); LOG.info("Switched to CDB$ROOT to start LogMiner."); } catch (SQLException ex) { // Fatal only if we switched to a PDB earlier if (containerized) { LOG.error("Error while switching to container: " + container, ex); issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_40, container)); return issues; } // Log it anyway LOG.info("Switching containers failed, ignoring since there was no PDB switch", ex); } } commitScnField = majorVersion >= 11 ? "COMMIT_SCN" : "CSCN"; } catch (SQLException ex) { LOG.error("Error while creating statement", ex); issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00, hikariConfigBean.getConnectionString())); return issues; } final String ddlTracking = shouldTrackDDL ? " + DBMS_LOGMNR.DDL_DICT_TRACKING" : ""; final String readCommitted = useLocalBuffering ? "" : "+ DBMS_LOGMNR.COMMITTED_DATA_ONLY"; this.logMinerProcedure = "BEGIN" + " DBMS_LOGMNR.START_LOGMNR(" + " {}," + " {}," + " OPTIONS => DBMS_LOGMNR." + configBean.dictionary.name() + " + DBMS_LOGMNR.CONTINUOUS_MINE" + readCommitted + " + DBMS_LOGMNR.NO_SQL_DELIMITER" + ddlTracking + ");" + " END;"; final String base = "SELECT SCN, USERNAME, OPERATION_CODE, TIMESTAMP, SQL_REDO, TABLE_NAME, " + commitScnField + ", SEQUENCE#, CSF, XIDUSN, XIDSLT, XIDSQN, RS_ID, SSN, SEG_OWNER, ROLLBACK, ROW_ID " + " FROM V$LOGMNR_CONTENTS" + " WHERE "; final String tableCondition = getListOfSchemasAndTables(schemasAndTables); final String commitRollbackCondition = Utils.format("OPERATION_CODE = {} OR OPERATION_CODE = {}", COMMIT_CODE, ROLLBACK_CODE); final String operationsCondition = "OPERATION_CODE IN (" + getSupportedOperations() + ")"; final String restartNonBufferCondition = Utils.format("((" + commitScnField + " = ? AND SEQUENCE# > ?) OR " + commitScnField + " > ?)" + (shouldTrackDDL ? " OR (OPERATION_CODE = {} AND SCN > ?)" : ""), DDL_CODE); if (useLocalBuffering) { selectString = String.format("%s ((%s AND (%s)) OR (%s))", base, tableCondition, operationsCondition, commitRollbackCondition); } else { selectString = base + " (" + tableCondition + " AND (" + operationsCondition + "))" + "AND (" + restartNonBufferCondition + ")"; } try { initializeLogMnrStatements(); } catch (SQLException ex) { LOG.error("Error while creating statement", ex); issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00, hikariConfigBean.getConnectionString())); } if (configBean.dictionary == DictionaryValues.DICT_FROM_REDO_LOGS) { try { startLogMnrForRedoDict(); } catch (Exception ex) { LOG.warn("Error while attempting to start LogMiner to load dictionary", ex); issues.add(getContext().createConfigIssue(Groups.CDC.name(), "oracleCDCConfigBean.dictionary", JDBC_44, ex)); } } if (useLocalBuffering && configBean.bufferLocation == BufferingValues.ON_DISK) { File tmpDir = new File(System.getProperty("java.io.tmpdir")); String relativePath = getContext().getSdcId() + "/" + getContext().getPipelineId() + "/" + getContext().getStageInfo().getInstanceName(); this.txnBufferLocation = new File(tmpDir, relativePath); try { if (txnBufferLocation.exists()) { FileUtils.deleteDirectory(txnBufferLocation); LOG.info("Deleted " + txnBufferLocation.toString()); } Files.createDirectories(txnBufferLocation.toPath()); LOG.info("Created " + txnBufferLocation.toString()); } catch (IOException ex) { Throwables.propagate(ex); } } if (configBean.bufferLocally) { if (configBean.parseQuery) { parsingExecutor = Executors.newFixedThreadPool(configBean.parseThreadPoolSize, new ThreadFactoryBuilder().setNameFormat("Oracle CDC Origin Parse Thread - %d").build()); } else { parsingExecutor = Executors.newSingleThreadExecutor( new ThreadFactoryBuilder().setNameFormat("Oracle CDC Origin Parse Thread - %d").build()); } } if (configBean.txnWindow >= configBean.logminerWindow) { issues.add(getContext().createConfigIssue(Groups.CDC.name(), "oracleCDCConfigBean.logminerWindow", JDBC_81)); } version = useLocalBuffering ? VERSION_UNCOMMITTED : VERSION_STR; delay = getContext().createGauge("Read Lag (seconds)"); return issues; }