List of usage examples for java.lang InterruptedException toString
public String toString()
From source file:net.kidlogger.kidlogger.KLService.java
public void runShutdownEvent() { saveToPref("powerOff", true); try {/*from w ww. j av a2 s. c om*/ Thread off = new Thread(new Runnable() { public void run() { sync.writeLog(".htm", Templates.getPowerLog(true)); } }); off.start(); off.join(); } catch (InterruptedException e) { app.logError(CN + "runShutdownEvent", e.toString()); } // Stop service itself this.stopSelf(); }
From source file:net.kidlogger.kidlogger.KLService.java
private void logServiceState(final boolean start) { SharedPreferences pref = this.getSharedPreferences(PREF_NAME, 0); final boolean user = pref.getBoolean("user_control", false); //WriteThread wtls; if (user) {// w w w . j a v a 2s . c om //wtls = new WriteThread(sync, ".htm", // Templates.getServiceState(start, true)); SharedPreferences.Editor svpr = this.getSharedPreferences(PREF_NAME, 0).edit(); svpr.putBoolean("user_control", false); svpr.commit(); } //else{ // wtls = new WriteThread(sync, ".htm", // Templates.getServiceState(start, false)); //} Thread ls = new Thread(new Runnable() { public void run() { sync.writeLog(".htm", Templates.getServiceState(start, user)); } }); if (start) ls.start(); else { try { ls.start(); ls.join(); } catch (InterruptedException e) { app.logError(CN + "logServiceState", e.toString()); } } }
From source file:org.apache.ranger.audit.TestAuditQueue.java
@Test public void testFileDestination() { logger.debug("testFileDestination()..."); int messageToSend = 10; int batchSize = messageToSend / 3; int queueSize = messageToSend * 2; int intervalMS = 500; // Should be less than final sleep time String logFolderName = "target/testFileDestination"; File logFolder = new File(logFolderName); String logFileName = "test_ranger_audit.log"; File logFile = new File(logFolder, logFileName); Properties props = new Properties(); // Destination String filePropPrefix = AuditProviderFactory.AUDIT_DEST_BASE + ".file"; props.put(filePropPrefix, "enable"); props.put(filePropPrefix + "." + AuditQueue.PROP_NAME, "file"); props.put(filePropPrefix + "." + FileAuditDestination.PROP_FILE_LOCAL_DIR, logFolderName); props.put(filePropPrefix + "." + FileAuditDestination.PROP_FILE_LOCAL_FILE_NAME_FORMAT, "%app-type%_ranger_audit.log"); props.put(filePropPrefix + "." + FileAuditDestination.PROP_FILE_FILE_ROLLOVER, "" + 10); props.put(filePropPrefix + "." + AuditQueue.PROP_QUEUE, "batch"); String batchPropPrefix = filePropPrefix + "." + "batch"; props.put(batchPropPrefix + "." + AuditQueue.PROP_BATCH_SIZE, "" + batchSize); props.put(batchPropPrefix + "." + AuditQueue.PROP_QUEUE_SIZE, "" + queueSize); props.put(batchPropPrefix + "." + AuditQueue.PROP_BATCH_INTERVAL, "" + intervalMS); // Enable File Spooling int destRetryMS = 10; props.put(batchPropPrefix + "." + AuditQueue.PROP_FILE_SPOOL_ENABLE, "" + true); props.put(batchPropPrefix + "." + AuditFileSpool.PROP_FILE_SPOOL_LOCAL_DIR, "target"); props.put(batchPropPrefix + "." + AuditFileSpool.PROP_FILE_SPOOL_DEST_RETRY_MS, "" + destRetryMS); AuditProviderFactory factory = AuditProviderFactory.getInstance(); factory.init(props, "test"); // FileAuditDestination fileDest = new FileAuditDestination(); // fileDest.init(props, filePropPrefix); ///* w w w . j av a 2 s . c o m*/ // AuditBatchQueue queue = new AuditBatchQueue(fileDest); // queue.init(props, batchPropPrefix); // queue.start(); AuditHandler queue = factory.getProvider(); for (int i = 0; i < messageToSend; i++) { queue.log(createEvent()); } // Let's wait for second try { Thread.sleep(1000); } catch (InterruptedException e) { // ignore } queue.waitToComplete(); queue.stop(); queue.waitToComplete(); assertTrue("File created", logFile.exists()); try { List<AuthzAuditEvent> eventList = new ArrayList<AuthzAuditEvent>(); int totalSum = 0; BufferedReader br = new BufferedReader(new FileReader(logFile)); String line; int lastSeq = -1; boolean outOfSeq = false; while ((line = br.readLine()) != null) { AuthzAuditEvent event = MiscUtil.fromJson(line, AuthzAuditEvent.class); eventList.add(event); totalSum += event.getEventCount(); if (event.getSeqNum() <= lastSeq) { outOfSeq = true; } } br.close(); assertEquals("Total count", messageToSend, eventList.size()); assertEquals("Total sum", messageToSend, totalSum); assertFalse("Event not in sequnce", outOfSeq); } catch (Throwable e) { logger.error("Error opening file for reading.", e); assertTrue("Error reading file. fileName=" + logFile + ", error=" + e.toString(), true); } }
From source file:com.cloudera.sqoop.mapreduce.MySQLDumpMapper.java
/** * Import the table into HDFS by using mysqldump to pull out the data from * the database and upload the files directly to HDFS. *//*from w w w .j a va2s .com*/ public void map(String splitConditions, NullWritable val, Context context) throws IOException, InterruptedException { LOG.info("Beginning mysqldump fast path import"); ArrayList<String> args = new ArrayList<String>(); String tableName = conf.get(MySQLUtils.TABLE_NAME_KEY); // We need to parse the connect string URI to determine the database name. // Using java.net.URL directly on the connect string will fail because // Java doesn't respect arbitrary JDBC-based schemes. So we chop off the // scheme (everything before '://') and replace it with 'http', which we // know will work. String connectString = conf.get(MySQLUtils.CONNECT_STRING_KEY); String databaseName = JdbcUrl.getDatabaseName(connectString); String hostname = JdbcUrl.getHostName(connectString); int port = JdbcUrl.getPort(connectString); if (null == databaseName) { throw new IOException("Could not determine database name"); } LOG.info("Performing import of table " + tableName + " from database " + databaseName); args.add(MySQLUtils.MYSQL_DUMP_CMD); // requires that this is on the path. String password = conf.get(MySQLUtils.PASSWORD_KEY); String passwordFile = null; Process p = null; AsyncSink sink = null; AsyncSink errSink = null; PerfCounters counters = new PerfCounters(); try { // --defaults-file must be the first argument. if (null != password && password.length() > 0) { passwordFile = MySQLUtils.writePasswordFile(conf); args.add("--defaults-file=" + passwordFile); } // Don't use the --where="<whereClause>" version because spaces in it can // confuse Java, and adding in surrounding quotes confuses Java as well. String whereClause = conf.get(MySQLUtils.WHERE_CLAUSE_KEY, "(1=1)") + " AND (" + splitConditions + ")"; args.add("-w"); args.add(whereClause); args.add("--host=" + hostname); if (-1 != port) { args.add("--port=" + Integer.toString(port)); } args.add("--skip-opt"); args.add("--compact"); args.add("--no-create-db"); args.add("--no-create-info"); args.add("--quick"); // no buffering args.add("--single-transaction"); String username = conf.get(MySQLUtils.USERNAME_KEY); if (null != username) { args.add("--user=" + username); } // If the user supplied extra args, add them here. String[] extra = conf.getStrings(MySQLUtils.EXTRA_ARGS_KEY); if (null != extra) { for (String arg : extra) { args.add(arg); } } args.add(databaseName); args.add(tableName); // begin the import in an external process. LOG.debug("Starting mysqldump with arguments:"); for (String arg : args) { LOG.debug(" " + arg); } // Actually start the mysqldump. p = Runtime.getRuntime().exec(args.toArray(new String[0])); // read from the stdout pipe into the HDFS writer. InputStream is = p.getInputStream(); if (MySQLUtils.outputDelimsAreMySQL(conf)) { LOG.debug("Output delimiters conform to mysqldump; " + "using straight copy"); sink = new CopyingAsyncSink(context, counters); } else { LOG.debug("User-specified delimiters; using reparsing import"); LOG.info("Converting data to use specified delimiters."); LOG.info("(For the fastest possible import, use"); LOG.info("--mysql-delimiters to specify the same field"); LOG.info("delimiters as are used by mysqldump.)"); sink = new ReparsingAsyncSink(context, conf, counters); } // Start an async thread to read and upload the whole stream. counters.startClock(); sink.processStream(is); // Start an async thread to send stderr to log4j. errSink = new LoggingAsyncSink(LOG); errSink.processStream(p.getErrorStream()); } finally { // block until the process is done. int result = 0; if (null != p) { while (true) { try { result = p.waitFor(); } catch (InterruptedException ie) { // interrupted; loop around. continue; } break; } } // Remove the password file. if (null != passwordFile) { if (!new File(passwordFile).delete()) { LOG.error("Could not remove mysql password file " + passwordFile); LOG.error("You should remove this file to protect your credentials."); } } // block until the stream sink is done too. int streamResult = 0; if (null != sink) { while (true) { try { streamResult = sink.join(); } catch (InterruptedException ie) { // interrupted; loop around. continue; } break; } } // Try to wait for stderr to finish, but regard any errors as advisory. if (null != errSink) { try { if (0 != errSink.join()) { LOG.info("Encountered exception reading stderr stream"); } } catch (InterruptedException ie) { LOG.info("Thread interrupted waiting for stderr to complete: " + ie.toString()); } } LOG.info("Transfer loop complete."); if (0 != result) { throw new IOException("mysqldump terminated with status " + Integer.toString(result)); } if (0 != streamResult) { throw new IOException("Encountered exception in stream sink"); } counters.stopClock(); LOG.info("Transferred " + counters.toString()); } }
From source file:com.t2.dataouthandler.DataOutHandler.java
/** * Synchronous version of sendPacketToRemoteDbSync. * Doesn't return until HTTP transaction is either complete or has timed out. * @param dataOutPacket//from www . ja v a 2 s.c o m * @param queuedAction * @param drupalNodeId */ void sendPacketToRemoteDbSync(final DataOutPacket dataOutPacket, final String queuedAction, final String drupalNodeId) { if (VERBOSE_LOGGING) { Log.e(TAG, "Waiting for sendPacketToRemoteDbToken"); } synchronized (sendPacketToRemoteDbToken) { sendPacketToRemoteDb(dataOutPacket, queuedAction, drupalNodeId); try { sendPacketToRemoteDbToken.wait(SYNC_TIMEOUT); } catch (InterruptedException e) { Log.e(TAG, e.toString()); e.printStackTrace(); } } if (VERBOSE_LOGGING) { Log.e(TAG, "Done Waiting for sendPacketToRemoteDbToken"); } }
From source file:com.iiordanov.bVNC.RemoteCanvas.java
/** * Create a view showing a remote desktop connection * @param context Containing context (activity) * @param bean Connection settings// www. j a va2 s . co m * @param setModes Callback to run on UI thread after connection is set up */ void initializeCanvas(ConnectionBean bean, Database db, final Runnable setModes) { this.setModes = setModes; connection = bean; database = db; decoder.setColorModel(COLORMODEL.valueOf(bean.getColorModel())); // Startup the connection thread with a progress dialog pd = ProgressDialog.show(getContext(), getContext().getString(R.string.info_progress_dialog_connecting), getContext().getString(R.string.info_progress_dialog_establishing), true, true, new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialog) { closeConnection(); handler.post(new Runnable() { public void run() { Utils.showFatalErrorMessage(getContext(), getContext().getString(R.string.info_progress_dialog_aborted)); } }); } }); // Make this dialog cancellable only upon hitting the Back button and not touching outside. pd.setCanceledOnTouchOutside(false); Thread t = new Thread() { public void run() { try { // Initialize SSH key if necessary if (connection.getConnectionType() == Constants.CONN_TYPE_SSH && connection.getSshHostKey().equals("") && Utils.isNullOrEmptry(connection.getIdHash())) { handler.sendEmptyMessage(Constants.DIALOG_SSH_CERT); // Block while user decides whether to accept certificate or not. // The activity ends if the user taps "No", so we block indefinitely here. synchronized (RemoteCanvas.this) { while (connection.getSshHostKey().equals("")) { try { RemoteCanvas.this.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } } if (isSpice) { startSpiceConnection(); } else if (isRdp) { startRdpConnection(); } else { startVncConnection(); } } catch (Throwable e) { if (maintainConnection) { Log.e(TAG, e.toString()); e.printStackTrace(); // Ensure we dismiss the progress dialog before we finish if (pd.isShowing()) pd.dismiss(); if (e instanceof OutOfMemoryError) { disposeDrawable(); showFatalMessageAndQuit(getContext().getString(R.string.error_out_of_memory)); } else { String error = getContext().getString(R.string.error_connection_failed); if (e.getMessage() != null) { if (e.getMessage().indexOf("SSH") < 0 && (e.getMessage().indexOf("authentication") > -1 || e.getMessage().indexOf("Unknown security result") > -1 || e.getMessage().indexOf("password check failed") > -1)) { error = getContext().getString(R.string.error_vnc_authentication); } error = error + "<br>" + e.getLocalizedMessage(); } showFatalMessageAndQuit(error); } } } } }; t.start(); clipboardMonitor = new ClipboardMonitor(getContext(), this); if (clipboardMonitor != null) { clipboardMonitorTimer = new Timer(); if (clipboardMonitorTimer != null) { try { clipboardMonitorTimer.schedule(clipboardMonitor, 0, 500); } catch (NullPointerException e) { } } } }
From source file:com.cloudera.sqoop.manager.DirectPostgresqlManager.java
@Override /**//from w w w .j a va 2s.c om * Import the table into HDFS by using psql to pull the data out of the db * via COPY FILE TO STDOUT. */ public void importTable(ImportJobContext context) throws IOException, ImportException { String tableName = context.getTableName(); SqoopOptions options = context.getOptions(); LOG.info("Beginning psql fast path import"); if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile) { // TODO(aaron): Support SequenceFile-based load-in LOG.warn("File import layout" + options.getFileLayout() + " is not supported by"); LOG.warn("Postgresql direct import; import will proceed as text files."); } String commandFilename = null; String passwordFilename = null; Process p = null; AsyncSink sink = null; AsyncSink errSink = null; PerfCounters counters = new PerfCounters(); try { // Get the COPY TABLE command to issue, write this to a file, and pass // it in to psql with -f filename. Then make sure we delete this file // in our finally block. String copyCmd = getCopyCommand(tableName); commandFilename = writeCopyCommand(copyCmd); // Arguments to pass to psql on the command line. ArrayList<String> args = new ArrayList<String>(); // Environment to pass to psql. List<String> envp = Executor.getCurEnvpStrings(); // We need to parse the connect string URI to determine the database // name and the host and port. If the host is localhost and the port is // not specified, we don't want to pass this to psql, because we want to // force the use of a UNIX domain socket, not a TCP/IP socket. String connectString = options.getConnectString(); String databaseName = JdbcUrl.getDatabaseName(connectString); String hostname = JdbcUrl.getHostName(connectString); int port = JdbcUrl.getPort(connectString); if (null == databaseName) { throw new ImportException("Could not determine database name"); } LOG.info("Performing import of table " + tableName + " from database " + databaseName); args.add(PSQL_CMD); // requires that this is on the path. args.add("--tuples-only"); args.add("--quiet"); String username = options.getUsername(); if (username != null) { args.add("--username"); args.add(username); String password = options.getPassword(); if (null != password) { passwordFilename = writePasswordFile(password); // Need to send PGPASSFILE environment variable specifying // location of our postgres file. envp.add("PGPASSFILE=" + passwordFilename); } } args.add("--host"); args.add(hostname); if (port != -1) { args.add("--port"); args.add(Integer.toString(port)); } if (null != databaseName && databaseName.length() > 0) { args.add(databaseName); } // The COPY command is in a script file. args.add("-f"); args.add(commandFilename); // begin the import in an external process. LOG.debug("Starting psql with arguments:"); for (String arg : args) { LOG.debug(" " + arg); } // This writer will be closed by AsyncSink. SplittableBufferedWriter w = DirectImportUtils.createHdfsSink(options.getConf(), options, context); // Actually start the psql dump. p = Runtime.getRuntime().exec(args.toArray(new String[0]), envp.toArray(new String[0])); // read from the stdout pipe into the HDFS writer. InputStream is = p.getInputStream(); sink = new PostgresqlAsyncSink(w, options, counters); LOG.debug("Starting stream sink"); counters.startClock(); sink.processStream(is); errSink = new LoggingAsyncSink(LOG); errSink.processStream(p.getErrorStream()); } finally { // block until the process is done. LOG.debug("Waiting for process completion"); int result = 0; if (null != p) { while (true) { try { result = p.waitFor(); } catch (InterruptedException ie) { // interrupted; loop around. continue; } break; } } // Remove any password file we wrote if (null != passwordFilename) { if (!new File(passwordFilename).delete()) { LOG.error("Could not remove postgresql password file " + passwordFilename); LOG.error("You should remove this file to protect your credentials."); } } if (null != commandFilename) { // We wrote the COPY comand to a tmpfile. Remove it. if (!new File(commandFilename).delete()) { LOG.info("Could not remove temp file: " + commandFilename); } } // block until the stream sink is done too. int streamResult = 0; if (null != sink) { while (true) { try { streamResult = sink.join(); } catch (InterruptedException ie) { // interrupted; loop around. continue; } break; } } // Attempt to block for stderr stream sink; errors are advisory. if (null != errSink) { try { if (0 != errSink.join()) { LOG.info("Encountered exception reading stderr stream"); } } catch (InterruptedException ie) { LOG.info("Thread interrupted waiting for stderr to complete: " + ie.toString()); } } LOG.info("Transfer loop complete."); if (0 != result) { throw new IOException("psql terminated with status " + Integer.toString(result)); } if (0 != streamResult) { throw new IOException("Encountered exception in stream sink"); } counters.stopClock(); LOG.info("Transferred " + counters.toString()); } }
From source file:com.github.maven_nar.cpptasks.CCTask.java
/** * Executes the task. Compiles the given files. * // www. j a v a 2s. co m * @throws BuildException * if someting goes wrong with the build */ @Override public void execute() throws BuildException { // // if link type allowed objdir to be defaulted // provide it from outfile if (this._objDir == null) { if (this._outfile != null) { this._objDir = new File(this._outfile.getParent()); } else { this._objDir = new File("."); } } // // if the object directory does not exist // if (!this._objDir.exists()) { throw new BuildException("Object directory does not exist"); } final TargetHistoryTable objHistory = new TargetHistoryTable(this, this._objDir); // // get the first active version info // VersionInfo versionInfo = null; final Enumeration<VersionInfo> versionEnum = this.versionInfos.elements(); while (versionEnum.hasMoreElements()) { versionInfo = versionEnum.nextElement(); versionInfo = versionInfo.merge(); if (versionInfo.isActive()) { break; } else { versionInfo = null; } } // // determine the eventual linker configuration // (may be null) and collect any explicit // object files or libraries final Vector<File> objectFiles = new Vector<>(); final Vector<File> sysObjectFiles = new Vector<>(); final LinkerConfiguration linkerConfig = collectExplicitObjectFiles(objectFiles, sysObjectFiles, versionInfo); // // Assemble hashtable of all files // that we know how to compile (keyed by output file name) // final Map<String, TargetInfo> targets = getTargets(linkerConfig, objectFiles, versionInfo, this._outfile); TargetInfo linkTarget = null; // // if output file is not specified, // then skip link step // if (this._outfile != null) { linkTarget = getLinkTarget(linkerConfig, objectFiles, sysObjectFiles, targets, versionInfo); } if (this.projects.size() > 0) { final List<File> files = new ArrayList<>(); final ProjectFileCollector matcher = new ProjectFileCollector(files); for (int i = 0; i < this._compilers.size(); i++) { final CompilerDef currentCompilerDef = this._compilers.elementAt(i); if (currentCompilerDef.isActive() && currentCompilerDef.hasFileSets()) { currentCompilerDef.visitFiles(matcher); } } this.compilerDef.visitFiles(matcher); final Enumeration<ProjectDef> iter = this.projects.elements(); while (iter.hasMoreElements()) { final ProjectDef projectDef = iter.nextElement(); if (projectDef.isActive()) { projectDef.execute(this, files, targets, linkTarget); } } } if (this.projectsOnly) { return; } // // mark targets that don't have a history record or // whose source last modification time is not // the same as the history to be rebuilt // objHistory.markForRebuild(targets); final CCTaskProgressMonitor monitor = new CCTaskProgressMonitor(objHistory, versionInfo); // // check for changed include files // final int rebuildCount = checkForChangedIncludeFiles(targets); if (rebuildCount > 0) { BuildException compileException = null; // // compile all targets with getRebuild() == true // final Map<CompilerConfiguration, Vector<TargetInfo>> targetsByConfig = getTargetsToBuildByConfiguration( targets); // // build array containing Vectors with precompiled generation // steps going first // final ArrayList<Vector<TargetInfo>> targetVectorsPreComp = new ArrayList<>(); final ArrayList<Vector<TargetInfo>> targetVectors = new ArrayList<>(); int index = 0; for (final Map.Entry<CompilerConfiguration, Vector<TargetInfo>> targetsForConfig : targetsByConfig .entrySet()) { // // get the configuration from the first entry // final CompilerConfiguration config = targetsForConfig.getKey(); if (config.isPrecompileGeneration()) { targetVectorsPreComp.add(targetsForConfig.getValue()); } else { targetVectors.add(targetsForConfig.getValue()); } } // BEGINFREEHEP final Progress progress = new Progress(getObjdir(), rebuildCount); progress.start(); // ENDFREEHEP compileException = runTargetPool(monitor, compileException, targetVectorsPreComp); if (compileException == null || this.relentless) compileException = runTargetPool(monitor, compileException, targetVectors); // BEGINFREEHEP progress.exit(); try { progress.join(); } catch (final InterruptedException ex) { } // ENDFREEHEP // // save the details of the object file compilation // settings to disk for dependency analysis // try { objHistory.commit(); } catch (final IOException ex) { this.log("Error writing history.xml: " + ex.toString()); } // // if we threw a compile exception and // didn't throw it at the time because // we were relentless then // save the history and // throw the exception // if (compileException != null) { if (this.failOnError) { throw compileException; } else { log(compileException.getMessage(), Project.MSG_ERR); return; } } } // // if the dependency tree was not fully // evaluated, then throw an exception // since we really didn't do what we // should have done // // if (this.dependencyDepth >= 0) { throw new BuildException("All files at depth " + Integer.toString(this.dependencyDepth) + " from changes successfully compiled.\n" + "Remove or change dependencyDepth to -1 to perform full compilation."); } // // if no link target then // commit the history for the object files // and leave the task if (linkTarget != null) { // // get the history for the link target (may be the same // as the object history) final TargetHistoryTable linkHistory = getLinkHistory(objHistory); // // see if it needs to be rebuilt // linkHistory.markForRebuild(linkTarget); // // if it needs to be rebuilt, rebuild it // final File output = linkTarget.getOutput(); if (linkTarget.getRebuild()) { final LinkerConfiguration linkConfig = (LinkerConfiguration) linkTarget.getConfiguration(); // BEGINFREEHEP log("Linking..."); log("Starting link {" + linkConfig.getIdentifier() + "}"); // ENDFREEHEP if (this.failOnError) { linkConfig.link(this, linkTarget); } else { try { linkConfig.link(this, linkTarget); } catch (final BuildException ex) { log(ex.getMessage(), Project.MSG_ERR); return; } } if (this.outputFileProperty != null) { getProject().setProperty(this.outputFileProperty, output.getAbsolutePath()); } linkHistory.update(linkTarget); try { linkHistory.commit(); } catch (final IOException ex) { log("Error writing link history.xml: " + ex.toString()); } } else { if (this.outputFileProperty != null) { getProject().setProperty(this.outputFileProperty, output.getAbsolutePath()); } } } }