List of usage examples for java.io IOException getStackTrace
public StackTraceElement[] getStackTrace()
From source file:com.cablelabs.sim.PCSim2.java
/** * This method determines if the provisioning file used to conduct a test needs to * be generated from one of the templates prior to starting the test. * @return - the MAC address used for the name of the file *//*from ww w. j a v a 2 s . c o m*/ private String autoGenerate(ProvisioningData pd) { Properties platform = SystemSettings.getSettings(SettingConstants.PLATFORM); Properties dut = SystemSettings.getSettings(SettingConstants.DUT); if (pd != null && platform != null && dut != null) { String pcscfLabel = dut.getProperty(SettingConstants.PCSCF); String macAddr = dut.getProperty(SettingConstants.MAC_ADDRESS); String tftpIP = platform.getProperty(SettingConstants.TFTP_SERVER_IP); String tftpPort = platform.getProperty(SettingConstants.TFTP_SERVER_PORT); String phone1 = dut.getProperty(SettingConstants.PHONE_NUMBER_1); String phone2 = dut.getProperty(SettingConstants.PHONE_NUMBER_2); String cw = platform.getProperty(SettingConstants.CW_NUMBER); if (macAddr != null && pcscfLabel != null && tftpIP != null && tftpPort != null && cw != null) { // First see if we have already issued a generated file. if (!provDB.issued(macAddr, pd)) { // Next verify the port is not set to zero try { int port = Integer.parseInt(tftpPort); if (port > 0 && port <= 65535) { // Next make sure the TFTP Server IP is not set to 0.0.0.0 if (tftpIP.equals("0.0.0.0")) { logger.warn(PC2LogCategory.PCSim2, subCat, "The TFTP Server IP setting in the platform file is not valid. Ending auto generate operation."); return null; } File input = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator + SettingConstants.CW + cw + File.separator + pd.getProvFileName()); if (input != null) { ProvGen pg = new ProvGen(input); if (phone1 != null) pg.changePhoneNum(SettingConstants.AUTO_GENERATE_PHONE_NUMBER_1, phone1); if (phone2 != null) pg.changePhoneNum(SettingConstants.AUTO_GENERATE_PHONE_NUMBER_2, phone2); Properties pcscf = SystemSettings.getSettings(pcscfLabel); if (pcscf != null) { String pcscfIP = pcscf.getProperty(SettingConstants.IP); if (pcscfIP != null) pg.changePCSCF(pcscfIP); } String newFileName = macAddr + ".bin"; if (pg.output(SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator + SettingConstants.CW + cw + File.separator + newFileName)) { // Test system //File output = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + newFileName); //File pact = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + "chinmaya_base_ph1_pcscf.bin"); //pg.compare(pact, output); // Create a data entry of the issued event //ProvisioningData issuePD = new ProvisioningData(macAddr, pd.getPolicyFileName(), newFileName); logger.info(PC2LogCategory.PCSim2, subCat, "Beginning to TFTP the new provisioning file."); provDB.setIssuedData(macAddr, pd); // Next we need to TFTP the file to the server TFTPClient client = new TFTPClient(); File binFile = new File( SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator + SettingConstants.CW + cw + File.separator + newFileName); if (binFile.isFile() && binFile.canRead()) { FileInputStream istrm = new FileInputStream(binFile); //InetAddress ia = InetAddress.getByName("10.4.1.37"); client.open(); // client.open(20003, ia); client.sendFile(newFileName, TFTP.BINARY_MODE, istrm, tftpIP, port); client.close(); logger.info(PC2LogCategory.PCSim2, subCat, "TFTP of the new provisioning file is complete."); return macAddr; } else { logger.warn(PC2LogCategory.PCSim2, subCat, "The " + macAddr + ".bin doesn't appear in the " + SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator + SettingConstants.CW + cw + " Ending auto generate operation."); } } else { logger.error(PC2LogCategory.PCSim2, subCat, "PCSim2 could not locate provisioning template file[" + input.getAbsolutePath() + "]."); } } // else { // logger.info(PC2LogCategory.PCSim2, subCat, "Auto provisioning is terminating because the input directory is null."); // } } else { logger.info(PC2LogCategory.PCSim2, subCat, "Auto provisioning is terminating because the port(" + port + ") is less than 0 or greater than 65535."); } } catch (NumberFormatException nfe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "TFTP Server Port setting doesn't appear to be a number."); } catch (UnknownHostException uhe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "system encountered an error when attempting to send the file to the TFTP Server.\n" + uhe.getMessage() + "\n" + uhe.getStackTrace()); } catch (IOException ioe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "system encountered an error when attempting to send the file to the TFTP Server.\n" + ioe.getMessage() + "\n" + ioe.getStackTrace()); } } else { logger.info(PC2LogCategory.PCSim2, subCat, "Auto provisioning detected the same same provisioning template is already in use, skipping operation."); } } else { logger.info(PC2LogCategory.PCSim2, subCat, "Auto provisioning is stopping because one of the values is null.\n" + "macAddr=" + macAddr + " pcscfLabel=" + pcscfLabel + " tftpIP=" + tftpIP + " tftpPort=" + tftpPort); } } else { if (pd != null) logger.info(PC2LogCategory.PCSim2, subCat, "The provisioning data is null, terminating processing."); if (platform != null) logger.info(PC2LogCategory.PCSim2, subCat, "The Platform settings is null, terminating processing."); if (dut != null) logger.info(PC2LogCategory.PCSim2, subCat, "The DUT settings is null, terminating processing."); } return null; }
From source file:org.loon.framework.android.game.LGameAndroid2DActivity.java
/** * ??Android/*from w w w. j av a 2 s . c om*/ * * @param exception */ private void androidException(Exception exception) { final AlertDialog.Builder builder = new AlertDialog.Builder(this); try { throw exception; } catch (IOException e) { if (e.getMessage().startsWith("Network unreachable")) { builder.setTitle("No network"); builder.setMessage( "LGame-Android Remote needs local network access. Please make sure that your wireless network is activated. You can click on the Settings button below to directly access your network settings."); builder.setNeutralButton("Settings", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { startActivity(new Intent(android.provider.Settings.ACTION_WIRELESS_SETTINGS)); } }); } else { builder.setTitle("Unknown I/O Exception"); builder.setMessage(e.getMessage().toString()); } } catch (HttpException e) { if (e.getMessage().startsWith("401")) { builder.setTitle("HTTP 401: Unauthorized"); builder.setMessage( "The supplied username and/or password is incorrect. Please check your settings."); builder.setNeutralButton("Settings", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { startActivity(new Intent()); } }); } } catch (RuntimeException e) { builder.setTitle("RuntimeException"); builder.setMessage(e.getStackTrace().toString()); } catch (Exception e) { builder.setTitle("Exception"); builder.setMessage(e.getMessage()); } finally { exception.printStackTrace(); builder.setCancelable(true); builder.setNegativeButton("Close", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); final AlertDialog alert = builder.create(); try { alert.show(); } catch (Throwable e) { } finally { LSystem.destroy(); } } }
From source file:de.decoit.visa.rdf.RDFManager.java
/** * Read a SPARQL query from a file into a String and create a Query object * from that. If a resource was specified all occurrences of $URI$ * placeholder in the read query will be replaced with the URI of the * resource. If a model URI is specified, GRAPH lines will be added to the * query using the placeholders $S_MOD$ and $E_MOD$. * * @param pFileName File name of the SPARQL file. The file must exist and be * located in 'res/sparql'//from ww w . j av a 2 s .c om * @param pRes Optional resource object, will be used to replace the $URI$ * placeholder. Can be set to null if not required. * @param pMod Optional model URI, will be used to add GRAPH lines to the * query. If set to null the query will be executed on the * default model of the dataset. * @return A Query object containing the read SPARQL query, null if the * input file cannot be read */ private Query readSPARQL(String pFileName, Resource pRes, String pMod) { try { // Open the SPARQL file for reading Path inFile = Paths.get("res/sparql", pFileName); BufferedReader br = Files.newBufferedReader(inFile, StandardCharsets.UTF_8); // Read all lines and concatenate them using a StringBuilder StringBuilder rv = new StringBuilder(); String line = br.readLine(); while (line != null) { rv.append(line); rv.append(System.lineSeparator()); line = br.readLine(); } br.close(); // Get the String from the StringBuilder and, if required, replace // the $URI$ placeholder String rvStr = rv.toString(); if (pRes != null) { rvStr = rvStr.replaceAll("\\$URI\\$", pRes.getURI()); } if (pMod != null && !pMod.isEmpty()) { StringBuilder graphLine = new StringBuilder("GRAPH <"); graphLine.append(pMod); graphLine.append("> {"); rvStr = rvStr.replaceAll("\\$S_MOD\\$", graphLine.toString()).replaceAll("\\$E_MOD\\$", "}"); } else { rvStr = rvStr.replaceAll("\\$S_MOD\\$", "").replaceAll("\\$E_MOD\\$", ""); } // Build a Query object and return it return QueryFactory.create(rvStr); } catch (IOException ex) { StringBuilder sb = new StringBuilder("Caught: ["); sb.append(ex.getClass().getSimpleName()); sb.append("] "); sb.append(ex.getMessage()); log.error(sb.toString()); if (log.isDebugEnabled()) { for (StackTraceElement ste : ex.getStackTrace()) { log.debug(ste.toString()); } } return null; } }
From source file:de.juwimm.cms.util.Communication.java
/** * Used in case of overwrite picture. Refreshed the cache for that image * @param pictureId/*from w w w . j a v a 2 s .c o m*/ */ public void updateCachedImage(Integer pictureId) { String thumbPath = "ejbimage?typ=t&id=" + pictureId; File svgFile = new File(Constants.SVG_CACHE + this.encodeSvgImageName(thumbPath) + ".png"); byte[] svgByte = null; try { File cacheDir = new File(Constants.SVG_CACHE); cacheDir.mkdirs(); svgByte = getClientService().getPicture(pictureId).getThumbnail(); OutputStream out = new BufferedOutputStream(new FileOutputStream(svgFile)); out.write(svgByte, 0, svgByte.length); out.flush(); out.close(); } catch (IOException ioe) { log.warn("Image with Id " + pictureId + " has not been found - " + ioe.getStackTrace()); } }
From source file:org.kuali.ole.module.purap.service.impl.ElectronicInvoiceHelperServiceImpl.java
@Override public ElectronicInvoiceLoad loadElectronicInvoices() { //add a step to check for directory paths prepareDirectories(getRequiredDirectoryNames()); String baseDirName = getBaseDirName(); String rejectDirName = getRejectDirName(); String acceptDirName = getAcceptDirName(); emailTextErrorList = new StringBuffer(); boolean moveFiles = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean( ElectronicInvoiceStep.class, PurapParameterConstants.ElectronicInvoiceParameters.FILE_MOVE_AFTER_LOAD_IND); int failedCnt = 0; if (LOG.isInfoEnabled()) { LOG.info("Invoice Base Directory - " + electronicInvoiceInputFileType.getDirectoryPath()); LOG.info("Invoice Accept Directory - " + acceptDirName); LOG.info("Invoice Reject Directory - " + rejectDirName); LOG.info("Is moving files allowed - " + moveFiles); }// ww w . j a v a 2 s . c om if (StringUtils.isBlank(rejectDirName)) { throw new RuntimeException("Reject directory name should not be empty"); } if (StringUtils.isBlank(acceptDirName)) { throw new RuntimeException("Accept directory name should not be empty"); } File baseDir = new File(baseDirName); if (!baseDir.exists()) { throw new RuntimeException("Base dir [" + baseDirName + "] doesn't exists in the system"); } File[] filesToBeProcessed = baseDir.listFiles(new FileFilter() { @Override public boolean accept(File file) { String fullPath = FilenameUtils.getFullPath(file.getAbsolutePath()); String fileName = FilenameUtils.getBaseName(file.getAbsolutePath()); File processedFile = new File(fullPath + File.separator + fileName + ".processed"); return (!file.isDirectory() && file.getName().endsWith(".xml") && !processedFile.exists()); } }); ElectronicInvoiceLoad eInvoiceLoad = new ElectronicInvoiceLoad(); if (filesToBeProcessed == null || filesToBeProcessed.length == 0) { StringBuffer mailText = new StringBuffer(); mailText.append("\n\n"); mailText.append(PurapConstants.ElectronicInvoice.NO_FILES_PROCESSED_EMAIL_MESSAGE); mailText.append("\n\n"); sendSummary(mailText); return eInvoiceLoad; } try { /** * Create, if not there */ FileUtils.forceMkdir(new File(acceptDirName)); FileUtils.forceMkdir(new File(rejectDirName)); } catch (IOException e) { throw new RuntimeException(e); } if (LOG.isInfoEnabled()) { LOG.info(filesToBeProcessed.length + " file(s) available for processing"); } StringBuilder emailMsg = new StringBuilder(); for (File element2 : filesToBeProcessed) { // MSU Contribution DTT-3014 OLEMI-8483 OLECNTRB-974 File xmlFile = element2; LOG.info("Processing " + xmlFile.getName() + "...."); byte[] modifiedXML = null; // process only if file exists and not empty if (xmlFile.length() != 0L) { modifiedXML = addNamespaceDefinition(eInvoiceLoad, xmlFile); } boolean isRejected = false; if (modifiedXML == null) {//Not able to parse the xml isRejected = true; } else { try { isRejected = processElectronicInvoice(eInvoiceLoad, xmlFile, modifiedXML); } catch (Exception e) { String msg = xmlFile.getName() + "\n"; LOG.error(msg); //since getMessage() is empty we'll compose the stack trace and nicely format it. StackTraceElement[] elements = e.getStackTrace(); StringBuffer trace = new StringBuffer(); trace.append(e.getClass().getName()); if (e.getMessage() != null) { trace.append(": "); trace.append(e.getMessage()); } trace.append("\n"); for (StackTraceElement element : elements) { trace.append(" at "); trace.append(describeStackTraceElement(element)); trace.append("\n"); } LOG.error(trace); emailMsg.append(msg); msg += "\n--------------------------------------------------------------------------------------\n" + trace; logProcessElectronicInvoiceError(msg); failedCnt++; /** * Clear the error map, so that subsequent EIRT routing isn't prevented since rice is throwing a * ValidationException if the error map is not empty before routing the doc. */ GlobalVariables.getMessageMap().clearErrorMessages(); //Do not execute rest of code below continue; } } /** * If there is a single order has rejects and the remainings are accepted in a invoice file, * then the entire file has been moved to the reject dir. */ if (isRejected) { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been rejected"); } if (moveFiles) { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been marked to move to " + rejectDirName); } eInvoiceLoad.addRejectFileToMove(xmlFile, rejectDirName); } } else { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been accepted"); } if (moveFiles) { if (!moveFile(xmlFile, acceptDirName)) { String msg = xmlFile.getName() + " unable to move"; LOG.error(msg); throw new PurError(msg); } } } if (!moveFiles) { String fullPath = FilenameUtils.getFullPath(xmlFile.getAbsolutePath()); String fileName = FilenameUtils.getBaseName(xmlFile.getAbsolutePath()); File processedFile = new File(fullPath + File.separator + fileName + ".processed"); try { FileUtils.touch(processedFile); } catch (IOException e) { throw new RuntimeException(e); } } // delete the .done file deleteDoneFile(xmlFile); } emailTextErrorList.append("\nFAILED FILES\n"); emailTextErrorList.append("-----------------------------------------------------------\n\n"); emailTextErrorList.append(emailMsg); emailTextErrorList.append("\nTOTAL COUNT\n"); emailTextErrorList.append("===========================\n"); emailTextErrorList.append(" " + failedCnt + " FAILED\n"); emailTextErrorList.append("===========================\n"); StringBuffer summaryText = saveLoadSummary(eInvoiceLoad); StringBuffer finalText = new StringBuffer(); finalText.append(summaryText); finalText.append("\n"); finalText.append(emailTextErrorList); sendSummary(finalText); LOG.info("Processing completed"); return eInvoiceLoad; }
From source file:org.kuali.kfs.module.purap.service.impl.ElectronicInvoiceHelperServiceImpl.java
@Override @NonTransactional//from ww w .j a v a 2 s. com public ElectronicInvoiceLoad loadElectronicInvoices() { //add a step to check for directory paths prepareDirectories(getRequiredDirectoryNames()); String rejectDirName = getRejectDirName(); String acceptDirName = getAcceptDirName(); emailTextErrorList = new StringBuffer(); boolean moveFiles = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean( ElectronicInvoiceStep.class, PurapParameterConstants.ElectronicInvoiceParameters.FILE_MOVE_AFTER_LOAD_IND); int failedCnt = 0; if (LOG.isInfoEnabled()) { LOG.info("Invoice Base Directory - " + electronicInvoiceInputFileType.getDirectoryPath()); LOG.info("Invoice Accept Directory - " + acceptDirName); LOG.info("Invoice Reject Directory - " + rejectDirName); LOG.info("Is moving files allowed - " + moveFiles); } if (StringUtils.isBlank(rejectDirName)) { throw new RuntimeException("Reject directory name should not be empty"); } if (StringUtils.isBlank(acceptDirName)) { throw new RuntimeException("Accept directory name should not be empty"); } File[] filesToBeProcessed = getFilesToBeProcessed(); ElectronicInvoiceLoad eInvoiceLoad = new ElectronicInvoiceLoad(); if (filesToBeProcessed == null || filesToBeProcessed.length == 0) { StringBuffer mailText = new StringBuffer(); mailText.append("\n\n"); mailText.append(PurapConstants.ElectronicInvoice.NO_FILES_PROCESSED_EMAIL_MESSAGE); mailText.append("\n\n"); sendSummary(mailText); return eInvoiceLoad; } try { /** * Create, if not there */ FileUtils.forceMkdir(new File(acceptDirName)); FileUtils.forceMkdir(new File(rejectDirName)); } catch (IOException e) { throw new RuntimeException(e); } if (LOG.isInfoEnabled()) { LOG.info(filesToBeProcessed.length + " file(s) available for processing"); } StringBuilder emailMsg = new StringBuilder(); for (int i = 0; i < filesToBeProcessed.length; i++) { File xmlFile = filesToBeProcessed[i]; LOG.info("Processing " + xmlFile.getName() + "...."); byte[] modifiedXML = null; //process only if file exists and not empty if (xmlFile.length() != 0L) { modifiedXML = addNamespaceDefinition(eInvoiceLoad, xmlFile); } boolean isRejected = false; if (modifiedXML == null) {//Not able to parse the xml isRejected = true; } else { try { isRejected = processElectronicInvoice(eInvoiceLoad, xmlFile, modifiedXML); } catch (Exception e) { String msg = xmlFile.getName() + "\n"; LOG.error(msg); //since getMessage() is empty we'll compose the stack trace and nicely format it. StackTraceElement[] elements = e.getStackTrace(); StringBuffer trace = new StringBuffer(); trace.append(e.getClass().getName()); if (e.getMessage() != null) { trace.append(": "); trace.append(e.getMessage()); } trace.append("\n"); for (int j = 0; j < elements.length; ++j) { StackTraceElement element = elements[j]; trace.append(" at "); trace.append(describeStackTraceElement(element)); trace.append("\n"); } LOG.error(trace); emailMsg.append(msg); msg += "\n--------------------------------------------------------------------------------------\n" + trace; logProcessElectronicInvoiceError(msg); failedCnt++; /** * Clear the error map, so that subsequent EIRT routing isn't prevented since rice * is throwing a ValidationException if the error map is not empty before routing the doc. */ GlobalVariables.getMessageMap().clearErrorMessages(); //Do not execute rest of code below continue; } } /** * If there is a single order has rejects and the remainings are accepted in a invoice file, * then the entire file has been moved to the reject dir. */ if (isRejected) { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been rejected"); } if (moveFiles) { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been marked to move to " + rejectDirName); } eInvoiceLoad.addRejectFileToMove(xmlFile, rejectDirName); } } else { if (LOG.isInfoEnabled()) { LOG.info(xmlFile.getName() + " has been accepted"); } if (moveFiles) { if (!moveFile(xmlFile, acceptDirName)) { String msg = xmlFile.getName() + " unable to move"; LOG.error(msg); throw new PurError(msg); } } } if (!moveFiles) { String fullPath = FilenameUtils.getFullPath(xmlFile.getAbsolutePath()); String fileName = FilenameUtils.getBaseName(xmlFile.getAbsolutePath()); File processedFile = new File(fullPath + File.separator + fileName + ".processed"); try { FileUtils.touch(processedFile); } catch (IOException e) { throw new RuntimeException(e); } } // delete the .done file deleteDoneFile(xmlFile); } emailTextErrorList.append("\nFAILED FILES\n"); emailTextErrorList.append("-----------------------------------------------------------\n\n"); emailTextErrorList.append(emailMsg); emailTextErrorList.append("\nTOTAL COUNT\n"); emailTextErrorList.append("===========================\n"); emailTextErrorList.append(" " + failedCnt + " FAILED\n"); emailTextErrorList.append("===========================\n"); StringBuffer summaryText = saveLoadSummary(eInvoiceLoad); StringBuffer finalText = new StringBuffer(); finalText.append(summaryText); finalText.append("\n"); finalText.append(emailTextErrorList); sendSummary(finalText); LOG.info("Processing completed"); return eInvoiceLoad; }
From source file:org.apache.storm.daemon.worker.Worker.java
public void start() throws Exception { LOG.info("Launching worker for {} on {}:{} with id {} and conf {}", topologyId, assignmentId, port, workerId, conf);//from w w w . j ava2s . c om // because in local mode, its not a separate // process. supervisor will register it in this case // if ConfigUtils.isLocalMode(conf) returns false then it is in distributed mode. if (!ConfigUtils.isLocalMode(conf)) { // Distributed mode SysOutOverSLF4J.sendSystemOutAndErrToSLF4J(); String pid = Utils.processPid(); FileUtils.touch(new File(ConfigUtils.workerPidPath(conf, workerId, pid))); FileUtils.writeStringToFile(new File(ConfigUtils.workerArtifactsPidPath(conf, topologyId, port)), pid, Charset.forName("UTF-8")); } final Map<String, Object> topologyConf = ConfigUtils .overrideLoginConfigWithSystemProperty(ConfigUtils.readSupervisorStormConf(conf, topologyId)); List<ACL> acls = Utils.getWorkerACL(topologyConf); IStateStorage stateStorage = ClusterUtils.mkStateStorage(conf, topologyConf, acls, new ClusterStateContext(DaemonType.WORKER)); IStormClusterState stormClusterState = ClusterUtils.mkStormClusterState(stateStorage, acls, new ClusterStateContext()); Credentials initialCredentials = stormClusterState.credentials(topologyId, null); Map<String, String> initCreds = new HashMap<>(); if (initialCredentials != null) { initCreds.putAll(initialCredentials.get_creds()); } autoCreds = AuthUtils.GetAutoCredentials(topologyConf); subject = AuthUtils.populateSubject(null, autoCreds, initCreds); backpressureZnodeTimeoutMs = ObjectReader.getInt(topologyConf.get(Config.BACKPRESSURE_ZNODE_TIMEOUT_SECS)) * 1000; Subject.doAs(subject, new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { workerState = new WorkerState(conf, context, topologyId, assignmentId, port, workerId, topologyConf, stateStorage, stormClusterState); // Heartbeat here so that worker process dies if this fails // it's important that worker heartbeat to supervisor ASAP so that supervisor knows // that worker is running and moves on doHeartBeat(); executorsAtom = new AtomicReference<>(null); // launch heartbeat threads immediately so that slow-loading tasks don't cause the worker to timeout // to the supervisor workerState.heartbeatTimer.scheduleRecurring(0, (Integer) conf.get(Config.WORKER_HEARTBEAT_FREQUENCY_SECS), () -> { try { doHeartBeat(); } catch (IOException e) { throw new RuntimeException(e); } }); workerState.executorHeartbeatTimer.scheduleRecurring(0, (Integer) conf.get(Config.WORKER_HEARTBEAT_FREQUENCY_SECS), Worker.this::doExecutorHeartbeats); workerState.registerCallbacks(); workerState.refreshConnections(null); workerState.activateWorkerWhenAllConnectionsReady(); workerState.refreshStormActive(null); workerState.runWorkerStartHooks(); List<IRunningExecutor> newExecutors = new ArrayList<IRunningExecutor>(); for (List<Long> e : workerState.getExecutors()) { if (ConfigUtils.isLocalMode(topologyConf)) { newExecutors.add(LocalExecutor.mkExecutor(workerState, e, initCreds).execute()); } else { newExecutors.add(Executor.mkExecutor(workerState, e, initCreds).execute()); } } executorsAtom.set(newExecutors); EventHandler<Object> tupleHandler = (packets, seqId, batchEnd) -> workerState .sendTuplesToRemoteWorker((HashMap<Integer, ArrayList<TaskMessage>>) packets, seqId, batchEnd); // This thread will publish the messages destined for remote tasks to remote connections transferThread = Utils.asyncLoop(() -> { workerState.transferQueue.consumeBatchWhenAvailable(tupleHandler); return 0L; }); DisruptorBackpressureCallback disruptorBackpressureHandler = mkDisruptorBackpressureHandler( workerState); workerState.transferQueue.registerBackpressureCallback(disruptorBackpressureHandler); workerState.transferQueue .setEnableBackpressure((Boolean) topologyConf.get(Config.TOPOLOGY_BACKPRESSURE_ENABLE)); workerState.transferQueue.setHighWaterMark( ObjectReader.getDouble(topologyConf.get(Config.BACKPRESSURE_DISRUPTOR_HIGH_WATERMARK))); workerState.transferQueue.setLowWaterMark( ObjectReader.getDouble(topologyConf.get(Config.BACKPRESSURE_DISRUPTOR_LOW_WATERMARK))); WorkerBackpressureCallback backpressureCallback = mkBackpressureHandler(topologyConf); backpressureThread = new WorkerBackpressureThread(workerState.backpressureTrigger, workerState, backpressureCallback); if ((Boolean) topologyConf.get(Config.TOPOLOGY_BACKPRESSURE_ENABLE)) { backpressureThread.start(); stormClusterState.topologyBackpressure(topologyId, backpressureZnodeTimeoutMs, workerState::refreshThrottle); int pollingSecs = ObjectReader.getInt(topologyConf.get(Config.TASK_BACKPRESSURE_POLL_SECS)); workerState.refreshBackpressureTimer.scheduleRecurring(0, pollingSecs, workerState::refreshThrottle); } credentialsAtom = new AtomicReference<Credentials>(initialCredentials); establishLogSettingCallback(); workerState.stormClusterState.credentials(topologyId, Worker.this::checkCredentialsChanged); workerState.refreshCredentialsTimer.scheduleRecurring(0, (Integer) conf.get(Config.TASK_CREDENTIALS_POLL_SECS), new Runnable() { @Override public void run() { checkCredentialsChanged(); if ((Boolean) topologyConf.get(Config.TOPOLOGY_BACKPRESSURE_ENABLE)) { checkThrottleChanged(); } } }); workerState.checkForUpdatedBlobsTimer.scheduleRecurring(0, (Integer) conf.getOrDefault(Config.WORKER_BLOB_UPDATE_POLL_INTERVAL_SECS, 10), new Runnable() { @Override public void run() { try { LOG.debug("Checking if blobs have updated"); updateBlobUpdates(); } catch (IOException e) { // IOException from reading the version files to be ignored LOG.error(e.getStackTrace().toString()); } } }); // The jitter allows the clients to get the data at different times, and avoids thundering herd if (!(Boolean) topologyConf.get(Config.TOPOLOGY_DISABLE_LOADAWARE_MESSAGING)) { workerState.refreshLoadTimer.scheduleRecurringWithJitter(0, 1, 500, Worker.this::doRefreshLoad); } workerState.refreshConnectionsTimer.scheduleRecurring(0, (Integer) conf.get(Config.TASK_REFRESH_POLL_SECS), workerState::refreshConnections); workerState.resetLogLevelsTimer.scheduleRecurring(0, (Integer) conf.get(Config.WORKER_LOG_LEVEL_RESET_POLL_SECS), logConfigManager::resetLogLevels); workerState.refreshActiveTimer.scheduleRecurring(0, (Integer) conf.get(Config.TASK_REFRESH_POLL_SECS), workerState::refreshStormActive); LOG.info("Worker has topology config {}", Utils.redactValue(topologyConf, Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD)); LOG.info("Worker {} for storm {} on {}:{} has finished loading", workerId, topologyId, assignmentId, port); return this; }; }); }
From source file:org.apache.geode.internal.cache.GemFireCacheImpl.java
/** * Initializes the contents of this <code>Cache</code> according to the declarative caching XML * file specified by the given <code>DistributedSystem</code>. Note that this operation cannot be * performed in the constructor because creating regions in the cache, etc. uses the cache itself * (which isn't initialized until the constructor returns). * * @throws CacheXmlException If something goes wrong while parsing the declarative caching XML * file.//from w ww. jav a 2s.c o m * @throws TimeoutException If a {@link org.apache.geode.cache.Region#put(Object, Object)}times * out while initializing the cache. * @throws CacheWriterException If a <code>CacheWriterException</code> is thrown while * initializing the cache. * @throws RegionExistsException If the declarative caching XML file desribes a region that * already exists (including the root region). * @throws GatewayException If a <code>GatewayException</code> is thrown while initializing the * cache. * * @see #loadCacheXml */ private void initializeDeclarativeCache() throws TimeoutException, CacheWriterException, GatewayException, RegionExistsException { URL url = getCacheXmlURL(); String cacheXmlDescription = this.cacheConfig.getCacheXMLDescription(); if (url == null && cacheXmlDescription == null) { if (isClient()) { determineDefaultPool(); initializeClientRegionShortcuts(this); } else { initializeRegionShortcuts(this); } initializePdxRegistry(); readyDynamicRegionFactory(); return; // nothing needs to be done } try { logCacheXML(url, cacheXmlDescription); InputStream stream = null; if (cacheXmlDescription != null) { if (logger.isTraceEnabled()) { logger.trace("initializing cache with generated XML: {}", cacheXmlDescription); } stream = new StringBufferInputStream(cacheXmlDescription); } else { stream = url.openStream(); } loadCacheXml(stream); try { stream.close(); } catch (IOException ignore) { } } catch (IOException ex) { throw new CacheXmlException( LocalizedStrings.GemFireCache_WHILE_OPENING_CACHE_XML_0_THE_FOLLOWING_ERROR_OCCURRED_1 .toLocalizedString(new Object[] { url.toString(), ex })); } catch (CacheXmlException ex) { CacheXmlException newEx = new CacheXmlException( LocalizedStrings.GemFireCache_WHILE_READING_CACHE_XML_0_1 .toLocalizedString(new Object[] { url, ex.getMessage() })); newEx.setStackTrace(ex.getStackTrace()); newEx.initCause(ex.getCause()); throw newEx; } }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
public void exportStudyFilesToLegacySystem(String lastUpdateTime, String authority) { // Get list of studies that have been updated yesterday, // and export them to legacy VDC system Logger logger = null;//from w w w .ja v a2 s .c om String exportLogDirStr = System.getProperty("vdc.export.log.dir"); if (exportLogDirStr == null) { System.out.println("Missing system property: vdc.export.log.dir. Please add to JVM options"); return; } File exportLogDir = new File(exportLogDirStr); if (!exportLogDir.exists()) { exportLogDir.mkdir(); } logger = Logger.getLogger("edu.harvard.iq.dvn.core.web.servlet.VDCExportServlet"); // Everytime export runs, we want to write to a separate log file (handler). // So if export has run previously, remove the previous handler if (logger.getHandlers() != null && logger.getHandlers().length > 0) { int numHandlers = logger.getHandlers().length; for (int i = 0; i < numHandlers; i++) { logger.removeHandler(logger.getHandlers()[i]); } } SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd"); FileHandler handler = null; try { handler = new FileHandler( exportLogDirStr + File.separator + "export_" + formatter.format(new Date()) + ".log"); } catch (IOException e) { throw new EJBException(e); } // Add handler to the desired logger logger.addHandler(handler); logger.info("Begin Exporting Studies"); int studyCount = 0; int deletedStudyCount = 0; try { /* THIS IS LEGACY CODE AND SHOULD BE DELETED // For all studies that have been deleted in the dataverse since last export, remove study directory in VDC String query = "SELECT s from DeletedStudy s where s.authority = '" + authority + "' "; List deletedStudies = em.createQuery(query).getResultList(); for (Iterator it = deletedStudies.iterator(); it.hasNext();) { DeletedStudy deletedStudy = (DeletedStudy) it.next(); logger.info("Deleting study " + deletedStudy.getGlobalId()); Study study = em.find(Study.class, deletedStudy.getId()); File legacyStudyDir = new File(FileUtil.getLegacyFileDir() + File.separatorChar + study.getAuthority() + File.separatorChar + study.getStudyId()); // Remove files in the directory, then delete the directory. File[] studyFiles = legacyStudyDir.listFiles(); if (studyFiles != null) { for (int i = 0; i < studyFiles.length; i++) { studyFiles[i].delete(); } } legacyStudyDir.delete(); deletedStudyCount++; em.remove(deletedStudy); } */ // Do export of all studies updated at "lastUpdateTime"" if (authority == null) { authority = vdcNetworkService.find().getAuthority(); } String beginTime = null; String endTime = null; if (lastUpdateTime == null) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_YEAR, -1); beginTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); // Use yesterday as default value cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } else { beginTime = lastUpdateTime; Date date = new SimpleDateFormat("yyyy-MM-dd").parse(lastUpdateTime); Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.DAY_OF_YEAR, 1); endTime = new SimpleDateFormat("yyyy-MM-dd").format(cal.getTime()); } String query = "SELECT s from Study s where s.authority = '" + authority + "' "; query += " and s.lastUpdateTime >'" + beginTime + "'"; // query+=" and s.lastUpdateTime <'" +endTime+"'"; query += " order by s.studyId"; List updatedStudies = em.createQuery(query).getResultList(); for (Iterator it = updatedStudies.iterator(); it.hasNext();) { Study study = (Study) it.next(); logger.info("Exporting study " + study.getStudyId()); exportStudyToLegacySystem(study, authority); studyCount++; } } catch (Exception e) { logger.severe(e.getMessage()); String stackTrace = "StackTrace: \n"; logger.severe("Exception caused by: " + e + "\n"); StackTraceElement[] ste = e.getStackTrace(); for (int m = 0; m < ste.length; m++) { stackTrace += ste[m].toString() + "\n"; } logger.severe(stackTrace); } logger.info("End export, " + studyCount + " studies successfully exported, " + deletedStudyCount + " studies deleted."); }
From source file:com.cablelabs.sim.PCSim2.java
/** * This method creates the T.I.M results file for the a specific * test case.//from w ww .ja v a2s . com */ private void generateResults(TSDocument doc) { String fileName = doc.getLogFileName(); int index = fileName.lastIndexOf("_ss.log"); String timFileName = fileName.substring(0, index) + "_ss.res"; File tim = new File(timFileName); if (!tim.exists()) { Properties platform = SystemSettings.getSettings(SettingConstants.PLATFORM); Properties dut = SystemSettings.getSettings("DUT"); try { FileOutputStream output = null; if (tim.createNewFile()) { output = new FileOutputStream(tim); } else { output = new FileOutputStream((timFileName + "_" + System.currentTimeMillis())); } String testerName = platform.getProperty(SettingConstants.TESTER_NAME); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); Date stop = new Date(); String cw = platform.getProperty(SettingConstants.CW_NUMBER); if (!cw.startsWith("CW")) cw = "CW" + cw; String result = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" + "<res-document xmlns=\"http://cablelabs.com/TEPResultDocument\" version=\"1.0\">\n\t" + "<execution method=\"automatic\" start=\"" + sdf.format(doc.getStart()) + "\" stop=\"" + sdf.format(stop) + "\" />\n\t" + "<tester username=\"" + testerName + "\" />\n\t" + "<certwave name=\"" + cw + "\" />\n\t" + "<test-result type=\"" + platform.getProperty(SettingConstants.DUT_SUBGROUP) + "\" name=\"" + doc.getNumber() + "\"\n\tproduct=\"" + dut.getProperty(SettingConstants.DUT_VENDOR) + "\" result=\"" + ((testPassed == null || testPassed) ? "PASS" : "FAIL") + "\"\n\tunit=\"" + dut.getProperty(SettingConstants.PRODUCT_UNIT) + "\"/>\n" + "</res-document>"; if (output != null) { output.write(result.getBytes()); output.close(); } // else // logger.fatal(PC2LogCategory.Parser, subCat, // "Couldn't write TIM file! Writing to log file for preservation!\n" + result); } catch (IOException ioe) { logger.error(PC2LogCategory.Parser, subCat, "Could not create new TIM file[" + timFileName + "]."); } String tftpIP = platform.getProperty(SettingConstants.TFTP_SERVER_IP); String tftpPort = platform.getProperty(SettingConstants.TFTP_SERVER_PORT); if (tftpIP != null && tftpPort != null) { boolean recProv = SystemSettings .resolveBooleanSetting(platform.getProperty(SettingConstants.RECORD_PROVISIONING_FILE)); if (recProv && dut != null) try { RecordProvFileListener rpfl = new RecordProvFileListener(); boolean success = rpfl.run(); String provFile = rpfl.getValue(); if (success) { int port = Integer.parseInt(tftpPort); if (port > 0 && port <= 65535) { // Next make sure the TFTP Server IP is not set to 0.0.0.0 if (tftpIP.equals("0.0.0.0")) { logger.warn(PC2LogCategory.PCSim2, subCat, "The TFTP Server IP setting in the platform file is not valid. Ending auto generate operation."); } else { // Next we need to TFTP the file from the server TFTPClient client = new TFTPClient(); int dirIndex = fileName.lastIndexOf("/", index); File dir = new File(fileName.substring(0, dirIndex) + PROV_FILE_DIRECTORY); if (dir.exists() && !dir.isDirectory()) { logger.error(PC2LogCategory.PCSim2, subCat, "The path " + dir.getPath() + " is not a directory. Terminating the recording of the provisioning file."); } File binFile = new File(fileName.substring(0, dirIndex + 1) + PROV_FILE_DIRECTORY + File.separator + fileName.substring(dirIndex + 1, index) + "_prov.bin"); boolean exists = false; if (!binFile.exists()) exists = binFile.createNewFile(); if (exists && binFile.canWrite()) { FileOutputStream ostrm = new FileOutputStream(binFile); //InetAddress ia = InetAddress.getByName("10.4.1.37"); client.open(); // client.open(20003, ia); client.receiveFile(provFile, TFTP.BINARY_MODE, ostrm, tftpIP, port); client.close(); logger.info(PC2LogCategory.PCSim2, subCat, "TFTP of the record provisioning file is complete."); } else { logger.warn(PC2LogCategory.PCSim2, subCat, "The system could not TFTP the provisioning file because TFTP address is " + tftpIP + "."); } } } else { logger.warn(PC2LogCategory.PCSim2, subCat, "Recording of the provisioning file is terminating because the port(" + port + ") is less than 0 or greater than 65535."); } } else { logger.warn(PC2LogCategory.PCSim2, subCat, "Recording of the provisioning file is terminating because PACT returned an error string of \"" + provFile + "\"."); } } catch (NumberFormatException nfe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "TFTP Server Port setting doesn't appear to be a number."); } catch (UnknownHostException uhe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "system encountered an error when attempting to send the file to the TFTP Server.\n" + uhe.getMessage() + "\n" + uhe.getStackTrace()); } catch (IOException ioe) { logger.warn(PC2LogCategory.PCSim2, subCat, "PCSim2 is not auto generating a provisioning file because the " + "system encountered an error when attempting to send the file to the TFTP Server.\n" + ioe.getMessage() + "\n" + ioe.getStackTrace()); } } } }