List of usage examples for java.util Scanner hasNextLine
public boolean hasNextLine()
From source file:org.mrgeo.hdfs.tile.SplitFile.java
public void readSplits(final InputStream stream, final List<Long> splits, final List<String> partitions) { splits.clear();// ww w. j a v a 2 s. c o m partitions.clear(); final Scanner in = new Scanner(new BufferedReader(new InputStreamReader(stream))); try { boolean firstline = true; boolean hasPartitionNames = false; while (in.hasNextLine()) { final long split = ByteBuffer.wrap(Base64.decodeBase64(in.nextLine().getBytes())).getLong(); if (firstline) { firstline = false; if (split == HAS_PARTITION_NAMES) { hasPartitionNames = true; } else { splits.add(split); } } else { if (split != HAS_PARTITION_NAMES) { splits.add(split); } if (hasPartitionNames) { final String partition = new String(Base64.decodeBase64(in.nextLine().getBytes())); partitions.add(partition); } } } } finally { in.close(); try { stream.close(); } catch (final IOException e) { log.error("Exception while closing stream in readSplits", e); } } }
From source file:dotaSoundEditor.Controls.EditorPanel.java
protected void revertButtonActionPerformed(ActionEvent evt, Path vpkToRevert) { //TODO: See if we can abstract away some of this functionality if (currentTree.getSelectionRows().length != 0 && ((TreeNode) currentTree.getSelectionPath().getLastPathComponent()).isLeaf()) { DefaultMutableTreeNode selectedNode = (DefaultMutableTreeNode) currentTree.getSelectionPath() .getLastPathComponent(); String selectedWaveString = ((DefaultMutableTreeNode) selectedNode).getUserObject().toString(); String selectedWaveParentString = ((DefaultMutableTreeNode) ((DefaultMutableTreeNode) selectedNode) .getParent()).getUserObject().toString(); selectedNode = (DefaultMutableTreeNode) this.getTreeNodeFromWavePath(selectedWaveString); //First go in and delete the sound in customSounds deleteSoundFileByWaveString(selectedWaveString); //Get the relevant wavestring from the internal scriptfile VPKArchive vpk = new VPKArchive(); try {//from w ww . j a v a 2 s. c o m vpk.load(new File(vpkToRevert.toString())); } catch (IOException ex) { ex.printStackTrace(); } String scriptDir = getCurrentScriptString(); scriptDir = scriptDir.replace(Paths.get(installDir, "/dota/").toString(), ""); scriptDir = scriptDir.replace("\\", "/"); //Match internal forward slashes scriptDir = scriptDir.substring(1); //Cut off leading slash byte[] bytes = null; VPKEntry entry = vpk.getEntry(scriptDir); try { ByteBuffer scriptBuffer = entry.getData(); bytes = new byte[scriptBuffer.remaining()]; scriptBuffer.get(bytes); } catch (IOException ex) { ex.printStackTrace(); } String scriptFileString = new String(bytes, Charset.forName("UTF-8")); ArrayList<String> wavePathList = this.getWavePathsAsList(selectedNode.getParent()); int waveStringIndex = wavePathList.indexOf(selectedWaveString); //Cut off every part of the scriptFileString before we get to the entry describing the relevant hero action, so we don't accidentally get the wrong wavepaths StringBuilder scriptFileStringShortened = new StringBuilder(); Scanner scan = new Scanner(scriptFileString); boolean found = false; while (scan.hasNextLine()) { String curLine = scan.nextLine(); if (curLine.equals(selectedWaveParentString)) { found = true; } if (found == true) { scriptFileStringShortened.append(curLine).append(System.lineSeparator()); } } scriptFileString = scriptFileStringShortened.toString(); ArrayList<String> internalWavePathsList = getWavePathListFromString(scriptFileString); String replacementString = internalWavePathsList.get(waveStringIndex); selectedNode.setUserObject(replacementString); ScriptParser parser = new ScriptParser(this.currentTreeModel); parser.writeModelToFile(getCurrentScriptString()); //Modify the UI treeNode in addition to the backing TreeNode ((DefaultMutableTreeNode) currentTree.getLastSelectedPathComponent()).setUserObject(replacementString); ((DefaultTreeModel) currentTree.getModel()) .nodeChanged((DefaultMutableTreeNode) currentTree.getLastSelectedPathComponent()); } }
From source file:org.ala.util.BieAccessLogReader.java
public final void processFile(String aFileName, String url) throws FileNotFoundException { Scanner scanner = new Scanner(new File(aFileName)); try {/*from www . ja v a2s. co m*/ if (uidInfosourceIDMap == null) { uidInfosourceIDMap = infoSourceDao.getInfosourceIdUidMap(); } if (restfulClient == null) { restfulClient = new RestfulClient(); } if (serMapper == null) { serMapper = new ObjectMapper(); serMapper.getSerializationConfig().setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); } // first use a Scanner to get each line while (scanner.hasNextLine()) { try { processLine(scanner.nextLine(), url); } catch (Exception e) { //do nothing e.printStackTrace(); } } } finally { // ensure the underlying stream is always closed // this only has any effect if the item passed to the Scanner // constructor implements Closeable (which it does in this case). try { //check & clean recordCounts. processLine("", url); System.out.println("****** PROCESS END. line ctr :" + ctr); } catch (Exception e) { //do nothing } scanner.close(); restfulClient = null; } }
From source file:eu.prestoprime.plugin.rights.RightsTasks.java
/** * Execution: xsltproc --stringparam ppavro /tmp/ppavro.owl --stringparam * countrycodes /tmp/ebu_Iso3166CountryCodeCS.xml --stringparam * languagecodes /tmp/ebu_Iso639_1LanguageCodeCS.xml --stringparam querydoc * /tmp/query.owl /tmp/RightsCompareFromIndex.xsl /tmp/RightsIndex.xml * //w w w . j av a2s. co m * @throws DataException * @throws ToolException * @throws IOException * @throws JAXBException */ private List<String> queryRights(String owlQueryFilePath) throws DataException, ToolException, IOException, JAXBException { // List<RightsModel> resultRights = new ArrayList<RightsModel>(); List<String> resultRights = new ArrayList<String>(); XSLTProc xsltproc = new XSLTProc(); logger.debug("Initialized XSLTProc..."); // step 1: clean up OWL file String byPassXsl = xsltproc.addResourceFile("ByPassIntrsctns.xsl"); xsltproc.setXSLFile(byPassXsl); xsltproc.extract(owlQueryFilePath); String step1Result = xsltproc.getOutputFile(); // step2: extract RightsInstance // stylesheet String rightsxsl = xsltproc.addResourceFile("RightsCompareFromIndex.xsl"); xsltproc.setXSLFile(rightsxsl); // add stringparams String ppavro = xsltproc.addResourceFile("ppavro.owl"); xsltproc.addStringParam("ppavro", ppavro); String countrycodes = xsltproc.addResourceFile("ebu_Iso3166CountryCodeCS.xml"); xsltproc.addStringParam("countrycodes", countrycodes); String languagecodes = xsltproc.addResourceFile("ebu_Iso639_LanguageCodeCS.xml"); xsltproc.addStringParam("languagecodes", languagecodes); xsltproc.addStringParam("querydoc", step1Result); // index file RightsIndex rightsIndex = RightsUtils.getRightsIndex(); if (rightsIndex == null) return null; File indexTempFile = File.createTempFile("RightsIndex-", ".xml"); indexTempFile.deleteOnExit(); RightsUtils.getRightsContext().createMarshaller().marshal(rightsIndex, indexTempFile); String indexFile = indexTempFile.getAbsolutePath(); logger.debug("Looping on Rights Index..."); xsltproc.extract(indexFile); String resultFilePath = xsltproc.getOutputFile(); logger.debug("Scanning file: " + resultFilePath); FileInputStream resultInputStream = new FileInputStream(resultFilePath); Scanner scanner = new Scanner(resultInputStream); while (scanner.hasNextLine()) { String line = (String) scanner.nextLine(); if (line.startsWith("true")) { logger.debug("--------------------------------------------------------"); logger.debug("Found matching EE with identifier: " + line.split(",")[1]); String dcIdentifier = line.split(",")[1].trim(); String dcIdentifierNoFrags = dcIdentifier.split("#")[0]; // remove // media // fragments logger.debug("DC identifier: " + dcIdentifierNoFrags); Map<String, String> elements = new HashMap<String, String>(); elements.put("identifier", dcIdentifierNoFrags); String aipId = P4DataManager.getInstance().getAIPByDCID(dcIdentifierNoFrags); /* * logger.debug("Retrieving RightsModel for AIP "+aipId); * * List<RightsModel> rightsModelList = * manager.findRightsByAIPId(aipId); * * if(rightsModelList.size()>1){ * logger.error("RightsModel is not unique!!! Skipping..."); * continue; } * * resultRights.add(rightsModelList.get(0)); */ resultRights.add(aipId); logger.debug("Added AIP to RightsResult list: " + aipId); logger.debug("--------------------------------------------------------"); } } logger.debug("XSLTProc elapsed time: " + xsltproc.getExecTime()); return resultRights; }
From source file:edu.uci.ics.asterix.transaction.management.service.locking.LockManagerDeterministicUnitTest.java
public void readRequest() throws IOException, ACIDException { int i = 0;//w ww . j ava 2 s . c o m LockRequest lockRequest = null; TransactionContext txnContext = null; HashMap<Integer, TransactionContext> jobMap = new HashMap<Integer, TransactionContext>(); int threadId; String requestType; int jobId; int datasetId; int PKHashVal; int waitTime; ArrayList<Integer> list = null; String lockMode; Scanner scanner = new Scanner(new FileInputStream(requestFileName)); while (scanner.hasNextLine()) { try { threadId = Integer.parseInt(scanner.next().substring(1)); requestType = scanner.next(); if (requestType.equals("CSQ") || requestType.equals("CST") || requestType.equals("END")) { log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType)); if (requestType.equals("CSQ") || requestType.equals("CST")) { list = new ArrayList<Integer>(); while (scanner.hasNextInt()) { threadId = scanner.nextInt(); if (threadId < 0) { break; } list.add(threadId); } expectedResultList.add(list); } } else if (requestType.equals("DW")) { defaultWaitTime = scanner.nextInt(); log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + "," + defaultWaitTime); continue; } else if (requestType.equals("W")) { waitTime = scanner.nextInt(); log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType), waitTime); } else { jobId = Integer.parseInt(scanner.next().substring(1)); datasetId = Integer.parseInt(scanner.next().substring(1)); PKHashVal = Integer.parseInt(scanner.next().substring(1)); lockMode = scanner.next(); txnContext = jobMap.get(jobId); if (txnContext == null) { txnContext = new TransactionContext(new JobId(jobId), txnProvider); jobMap.put(jobId, txnContext); } log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + ",J" + jobId + ",D" + datasetId + ",E" + PKHashVal + "," + lockMode); lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType), new DatasetId(datasetId), PKHashVal, getLockMode(lockMode), txnContext); } requestList.add(lockRequest); } catch (NoSuchElementException e) { scanner.close(); break; } } }
From source file:com.mapr.db.utils.ImportTPCHJSONFiles.java
public void readFileAndWriteToTable(String maprdbJsonTableName, String maprdbJsonTablePath, String jsonFilePath) {/*from ww w .j av a 2 s . c o m*/ System.out.println("Importing " + maprdbJsonTableName); try { Scanner scan = new Scanner(new FileReader(jsonFilePath)); StringBuilder jsonFileContents = new StringBuilder(); while (scan.hasNextLine()) { jsonFileContents.append(scan.nextLine()); } scan.close(); StringTokenizer st = new StringTokenizer(jsonFileContents.toString(), "%"); String record = ""; while (st.hasMoreTokens()) { record = st.nextToken(); //System.out.println(record); JSONParser parser = new JSONParser(); try { Object obj = parser.parse(record); JSONObject jsonObject = (JSONObject) obj; if (maprdbJsonTableName.equalsIgnoreCase("Customer")) { c = new Customer(); c = c.getDocument(jsonObject); //System.out.println(c.toString()); c_table = this.getTable(c_table, maprdbJsonTablePath); c.insertDocument(c_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Lineitem")) { l = new Lineitem(); l = l.getDocument(jsonObject); //System.out.println(l.toString()); l_table = this.getTable(l_table, maprdbJsonTablePath); l.insertDocument(l_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Orders")) { o = new Orders(); o = o.getDocument(jsonObject); //System.out.println(o.toString()); o_table = this.getTable(o_table, maprdbJsonTablePath); o.insertDocument(o_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Part")) { p = new Part(); p = p.getDocument(jsonObject); //System.out.println(p.toString()); p_table = this.getTable(p_table, maprdbJsonTablePath); p.insertDocument(p_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Partsupp")) { ps = new Partsupp(); ps = ps.getDocument(jsonObject); //System.out.println(ps.toString()); ps_table = this.getTable(ps_table, maprdbJsonTablePath); ps.insertDocument(ps_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Nation")) { n = new Nation(); n = n.getDocument(jsonObject); //System.out.println(n.toString()); n_table = this.getTable(n_table, maprdbJsonTablePath); n.insertDocument(n_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Supplier")) { s = new Supplier(); s = s.getDocument(jsonObject); //System.out.println(s.toString()); s_table = this.getTable(s_table, maprdbJsonTablePath); s.insertDocument(s_table); } else if (maprdbJsonTableName.equalsIgnoreCase("Region")) { r = new Region(); r = r.getDocument(jsonObject); //System.out.println(r.toString()); r_table = this.getTable(r_table, maprdbJsonTablePath); r.insertDocument(r_table); } } catch (Exception e) { e.printStackTrace(); } } } catch (Exception e) { e.printStackTrace(); } switch (maprdbJsonTableName) { case "CUSTOMER": System.out.println("Imported " + c_count + " Records\n"); break; case "LINEITEM": System.out.println("Imported " + l_count + " Records\n"); break; case "ORDERS": System.out.println("Imported " + o_count + " Records\n"); break; case "PART": System.out.println("Imported " + p_count + " Records\n"); break; case "PARTSUPP": System.out.println("Imported " + ps_count + " Records\n"); break; case "NATION": System.out.println("Imported " + n_count + " Records\n"); break; case "SUPPLIER": System.out.println("Imported " + s_count + " Records\n"); break; case "REGION": System.out.println("Imported " + r_count + " Records\n"); break; } }
From source file:edu.lternet.pasta.datapackagemanager.DataPackageArchive.java
/** * Generate an "archive" of the data package by parsing and retrieving * components of the data package resource map * //w ww .j a v a 2 s. c o m * @param scope * The scope value of the data package * @param identifier * The identifier value of the data package * @param revision * The revision value of the data package * @param map * The resource map of the data package * @param authToken * The authentication token of the user requesting the archive * @param transaction * The transaction id of the request * @return The file name of the data package archive * @throws Exception */ public String createDataPackageArchive(String scope, Integer identifier, Integer revision, String userId, AuthToken authToken, String transaction) throws Exception { String zipName = transaction + ".zip"; String zipPath = tmpDir + "/"; EmlPackageId emlPackageId = new EmlPackageId(scope, identifier, revision); StringBuffer manifest = new StringBuffer(); Date now = new Date(); manifest.append("Manifest file for " + zipName + " created on " + now.toString() + "\n"); DataPackageManager dpm = null; /* * It is necessary to create a temporary file while building the ZIP archive * to prevent the client from accessing an incomplete product. */ String tmpName = DigestUtils.md5Hex(transaction); File zFile = new File(zipPath + tmpName); if (zFile.exists()) { String gripe = "The resource " + zipName + "already exists!"; throw new ResourceExistsException(gripe); } try { dpm = new DataPackageManager(); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); throw e; } FileOutputStream fOut = null; try { fOut = new FileOutputStream(zFile); } catch (FileNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } if (dpm != null && fOut != null) { String map = null; try { map = dpm.readDataPackage(scope, identifier, revision.toString(), authToken, userId); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); throw e; } Scanner mapScanner = new Scanner(map); ZipOutputStream zOut = new ZipOutputStream(fOut); while (mapScanner.hasNextLine()) { FileInputStream fIn = null; String objectName = null; File file = null; String line = mapScanner.nextLine(); if (line.contains(URI_MIDDLE_METADATA)) { try { file = dpm.getMetadataFile(scope, identifier, revision.toString(), userId, authToken); objectName = emlPackageId.toString() + ".xml"; } catch (ClassNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (SQLException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); } if (file != null) { try { fIn = new FileInputStream(file); Long size = FileUtils.sizeOf(file); manifest.append(objectName + " (" + size.toString() + " bytes)\n"); } catch (FileNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } else if (line.contains(URI_MIDDLE_REPORT)) { try { file = dpm.readDataPackageReport(scope, identifier, revision.toString(), emlPackageId, authToken, userId); objectName = emlPackageId.toString() + ".report.xml"; } catch (ClassNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (SQLException e) { logger.error(e.getMessage()); e.printStackTrace(); } if (file != null) { try { fIn = new FileInputStream(file); Long size = FileUtils.sizeOf(file); manifest.append(objectName + " (" + size.toString() + " bytes)\n"); } catch (FileNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } else if (line.contains(URI_MIDDLE_DATA)) { String[] lineParts = line.split("/"); String entityId = lineParts[lineParts.length - 1]; String dataPackageResourceId = DataPackageManager.composeResourceId(ResourceType.dataPackage, scope, identifier, revision, null); String entityResourceId = DataPackageManager.composeResourceId(ResourceType.data, scope, identifier, revision, entityId); String entityName = null; String xml = null; try { entityName = dpm.readDataEntityName(dataPackageResourceId, entityResourceId, authToken); xml = dpm.readMetadata(scope, identifier, revision.toString(), userId, authToken); objectName = dpm.findObjectName(xml, entityName); file = dpm.getDataEntityFile(scope, identifier, revision.toString(), entityId, authToken, userId); } catch (UnauthorizedException e) { logger.error(e.getMessage()); e.printStackTrace(); manifest.append(objectName + " (access denied)\n"); } catch (ResourceNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (ClassNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (SQLException e) { logger.error(e.getMessage()); e.printStackTrace(); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); } if (file != null) { try { fIn = new FileInputStream(file); Long size = FileUtils.sizeOf(file); manifest.append(objectName + " (" + size.toString() + " bytes)\n"); } catch (FileNotFoundException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } if (objectName != null && fIn != null) { ZipEntry zipEntry = new ZipEntry(objectName); try { zOut.putNextEntry(zipEntry); int length; byte[] buffer = new byte[1024]; while ((length = fIn.read(buffer)) > 0) { zOut.write(buffer, 0, length); } zOut.closeEntry(); fIn.close(); } catch (IOException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } // Create ZIP archive manifest File mFile = new File(zipPath + transaction + ".txt"); FileUtils.writeStringToFile(mFile, manifest.toString()); ZipEntry zipEntry = new ZipEntry("manifest.txt"); try { FileInputStream fIn = new FileInputStream(mFile); zOut.putNextEntry(zipEntry); int length; byte[] buffer = new byte[1024]; while ((length = fIn.read(buffer)) > 0) { zOut.write(buffer, 0, length); } zOut.closeEntry(); fIn.close(); } catch (IOException e) { logger.error(e.getMessage()); e.printStackTrace(); } // Close ZIP archive zOut.close(); FileUtils.forceDelete(mFile); } File tmpFile = new File(zipPath + tmpName); File zipFile = new File(zipPath + zipName); // Copy hidden ZIP archive to visible ZIP archive, thus making available if (!tmpFile.renameTo(zipFile)) { String gripe = "Error renaming " + tmpName + " to " + zipName + "!"; throw new IOException(); } return zipName; }
From source file:org.lobid.lodmill.PipeLobidOrganisationEnrichment.java
private void iniGeonamesDump() { final Scanner geonamesDump = new Scanner( Thread.currentThread().getContextClassLoader().getResourceAsStream(this.GEONAMES_DE_FILENAME)); try {/* ww w . j a v a 2 s.c o m*/ while (geonamesDump.hasNextLine()) { String[] geonameDumpLines = geonamesDump.nextLine().split("\t"); if (geonameDumpLines[13].matches("\\d+") && geonameDumpLines[7].equals("ADM4")) { String gnRegionalId = geonameDumpLines[13]; int gnId = Integer.parseInt(geonameDumpLines[0]); GEONAMES_REGION_ID.put(gnRegionalId, gnId); } } } finally { geonamesDump.close(); } }
From source file:ml.shifu.shifu.core.processor.stats.MapReducerStatsWorker.java
protected void updateBinningInfoWithMRJob() throws IOException, InterruptedException, ClassNotFoundException { RawSourceData.SourceType source = this.modelConfig.getDataSet().getSource(); String filePath = Constants.BINNING_INFO_FILE_NAME; BufferedWriter writer = null; List<Scanner> scanners = null; try {// ww w . j ava2s. co m scanners = ShifuFileUtils.getDataScanners(pathFinder.getUpdatedBinningInfoPath(source), source); writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(new File(filePath)), Charset.forName("UTF-8"))); for (Scanner scanner : scanners) { while (scanner.hasNextLine()) { String line = scanner.nextLine(); writer.write(line + "\n"); } } } finally { // release processor.closeScanners(scanners); IOUtils.closeQuietly(writer); } Configuration conf = new Configuration(); prepareJobConf(source, conf, filePath); @SuppressWarnings("deprecation") Job job = new Job(conf, "Shifu: Stats Updating Binning Job : " + this.modelConfig.getModelSetName()); job.setJarByClass(getClass()); job.setMapperClass(UpdateBinningInfoMapper.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(BinningInfoWritable.class); job.setInputFormatClass(CombineInputFormat.class); FileInputFormat.setInputPaths(job, ShifuFileUtils.getFileSystemBySourceType(source) .makeQualified(new Path(super.modelConfig.getDataSetRawPath()))); job.setReducerClass(UpdateBinningInfoReducer.class); int mapperSize = new CombineInputFormat().getSplits(job).size(); log.info("DEBUG: Test mapper size is {} ", mapperSize); Integer reducerSize = Environment.getInt(CommonConstants.SHIFU_UPDATEBINNING_REDUCER); if (reducerSize != null) { job.setNumReduceTasks(Environment.getInt(CommonConstants.SHIFU_UPDATEBINNING_REDUCER, 20)); } else { // By average, each reducer handle 100 variables int newReducerSize = (this.columnConfigList.size() / 100) + 1; // if(newReducerSize < 1) { // newReducerSize = 1; // } // if(newReducerSize > 500) { // newReducerSize = 500; // } log.info("Adjust updating binning info reducer size to {} ", newReducerSize); job.setNumReduceTasks(newReducerSize); } job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); job.setOutputFormatClass(TextOutputFormat.class); String preTrainingInfo = this.pathFinder.getPreTrainingStatsPath(source); FileOutputFormat.setOutputPath(job, new Path(preTrainingInfo)); // clean output firstly ShifuFileUtils.deleteFile(preTrainingInfo, source); // submit job if (!job.waitForCompletion(true)) { FileUtils.deleteQuietly(new File(filePath)); throw new RuntimeException("MapReduce Job Updateing Binning Info failed."); } else { long totalValidCount = job.getCounters().findCounter(Constants.SHIFU_GROUP_COUNTER, "TOTAL_VALID_COUNT") .getValue(); long invalidTagCount = job.getCounters().findCounter(Constants.SHIFU_GROUP_COUNTER, "INVALID_TAG") .getValue(); long filterOut = job.getCounters().findCounter(Constants.SHIFU_GROUP_COUNTER, "FILTER_OUT_COUNT") .getValue(); long weightExceptions = job.getCounters().findCounter(Constants.SHIFU_GROUP_COUNTER, "WEIGHT_EXCEPTION") .getValue(); log.info( "Total valid records {}, invalid tag records {}, filter out records {}, weight exception records {}", totalValidCount, invalidTagCount, filterOut, weightExceptions); if (totalValidCount > 0L && invalidTagCount * 1d / totalValidCount >= 0.8d) { log.warn( "Too many invalid tags, please check you configuration on positive tags and negative tags."); } } FileUtils.deleteQuietly(new File(filePath)); }
From source file:com.blackducksoftware.tools.appuseradjuster.add.AddUser.java
private List<String> getFileContents() { List<String> contents = new ArrayList<String>(); FileInputStream fis = null;/* w ww . jav a 2 s . co m*/ try { fis = new FileInputStream(userAppRoleMappingFilePath); } catch (FileNotFoundException e) { logger.error("Unable to read in the Application/User mapping file: {}", userAppRoleMappingFilePath); e.printStackTrace(); } Scanner scanner = new Scanner(fis); logger.info("Reading file for Application/User+role mapping ({})", userAppRoleMappingFilePath); while (scanner.hasNextLine()) { contents.add(scanner.nextLine()); } scanner.close(); return contents; }