List of usage examples for java.util Scanner hasNext
public boolean hasNext()
From source file:ml.shifu.shifu.core.processor.ExportModelProcessor.java
private Map<Integer, ColumnStatistics> readSEValuesToMap(String seOutputFiles, SourceType source) throws IOException { // here only works for 1 reducer FileStatus[] globStatus = ShifuFileUtils.getFileSystemBySourceType(source) .globStatus(new Path(seOutputFiles)); if (globStatus == null || globStatus.length == 0) { throw new RuntimeException("Var select MSE stats output file not exist."); }//from ww w. jav a2 s . co m Map<Integer, ColumnStatistics> map = new HashMap<Integer, ColumnStatistics>(); List<Scanner> scanners = null; try { scanners = ShifuFileUtils.getDataScanners(globStatus[0].getPath().toString(), source); for (Scanner scanner : scanners) { String str = null; while (scanner.hasNext()) { str = scanner.nextLine().trim(); String[] splits = CommonUtils.split(str, "\t"); if (splits.length == 5) { map.put(Integer.parseInt(splits[0].trim()), new ColumnStatistics(Double.parseDouble(splits[2]), Double.parseDouble(splits[3]), Double.parseDouble(splits[4]))); } } } } finally { if (scanners != null) { for (Scanner scanner : scanners) { if (scanner != null) { scanner.close(); } } } } return map; }
From source file:eu.cassandra.server.api.UploadFileService.java
@POST @Path("/upload") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response uploadFile(@FormDataParam("file") InputStream uploadedInputStream, @FormDataParam("file") FormDataContentDisposition fileDetail, @FormDataParam("prj_id") String prj_id) { String filename = fileDetail.getFileName(); String uploadedFileLocation = context.getRealPath("/resources") + "/" + filename; System.out.println(uploadedFileLocation); try {/*from ww w.j ava 2 s.co m*/ // Save it writeToFile(uploadedInputStream, uploadedFileLocation); // TODO: Create a Run and return the id in the response ObjectId objid = ObjectId.get(); DBObject run = new BasicDBObject(); String dbname = objid.toString(); Mongo m = new Mongo("localhost"); DB db = m.getDB(dbname); MongoResults mr = new MongoResults(dbname); mr.createIndexes(); Calendar calendar = Calendar.getInstance(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddhhmm"); String runName = "Run for " + filename + " on " + sdf.format(calendar.getTime()); run.put("_id", objid); run.put("name", runName); run.put("started", System.currentTimeMillis()); run.put("ended", System.currentTimeMillis()); run.put("type", "file"); run.put("prj_id", prj_id); run.put("percentage", 100); DBConn.getConn().getCollection(MongoRuns.COL_RUNS).insert(run); // Find the project DBObject query = new BasicDBObject(); query.put("_id", new ObjectId(prj_id)); DBObject project = DBConn.getConn().getCollection(MongoProjects.COL_PROJECTS).findOne(query); db.getCollection(MongoProjects.COL_PROJECTS).insert(project); // Make a scenario DBObject scenario = new BasicDBObject(); scenario.put("name", filename); scenario.put("project_id", prj_id); db.getCollection(MongoScenarios.COL_SCENARIOS).insert(scenario); // Get the scenario id query = new BasicDBObject(); query.put("project_id", prj_id); DBObject insertedScenario = db.getCollection(MongoScenarios.COL_SCENARIOS).findOne(query); ObjectId scnIdObj = (ObjectId) insertedScenario.get("_id"); String scnId = scnIdObj.toString(); // TODO: Parse and calculate KPIs File f = new File(uploadedFileLocation); Scanner sc = new Scanner(f); String header = sc.next(); String[] headerTokens = header.split(","); int numOfInstallations = headerTokens.length - 1; double maxPower = 0; double[] maxPowerInst = new double[numOfInstallations]; double avgPower = 0; double[] avgPowerInst = new double[numOfInstallations]; double energy = 0; double[] energyInst = new double[numOfInstallations]; int tick = 0; while (sc.hasNext()) { tick++; String line = sc.next(); String[] tokens = line.split(","); double powerSum = 0; for (int i = 1; i < tokens.length; i++) { double power = Double.parseDouble(tokens[i]); energyInst[i - 1] += (power / 1000.0) * Constants.MINUTE_HOUR_RATIO; avgPowerInst[i - 1] += power; if (maxPowerInst[i - 1] < power) { maxPowerInst[i - 1] = power; } powerSum += power; mr.addTickResultForInstallation(tick, headerTokens[i], power, 0, MongoResults.COL_INSTRESULTS); } mr.addAggregatedTickResult(tick, powerSum, 0, MongoResults.COL_AGGRRESULTS); energy += (powerSum / 1000.0) * Constants.MINUTE_HOUR_RATIO; avgPower += powerSum; if (maxPower < powerSum) { maxPower = powerSum; } } // TODO: Add ticks and KPIs in the db for (int i = 0; i < numOfInstallations; i++) { mr.addKPIs(headerTokens[i + 1], maxPowerInst[i], avgPowerInst[i] / tick, energyInst[i], 0, 0); } mr.addKPIs(MongoResults.AGGR, maxPower, avgPower / tick, energy, 0, 0); String output = "File uploaded to : " + uploadedFileLocation; // Make sim params DBObject sim_params = new BasicDBObject(); sim_params.put("name", filename); sim_params.put("scn_id", scnId); sim_params.put("numberOfDays", (tick / 1440)); sim_params.put("mcruns", 1); db.getCollection(MongoSimParam.COL_SIMPARAM).insert(sim_params); return Response.status(200).entity(output).build(); } catch (Exception exp) { JSONtoReturn jsonMsg = new JSONtoReturn(); String json = PrettyJSONPrinter.prettyPrint(jsonMsg.createJSONError("Error", exp)); Response r = Response.status(Response.Status.BAD_REQUEST).entity(json).build(); return r; } }
From source file:com.java2s.intents4.IntentsDemo4Activity.java
private IntentFilter createFilterFromEditTextFields() { IntentFilter filter = new IntentFilter(); if (filterActionsLayout != null) { int count = filterActionsLayout.getChildCount(); for (int i = 0; i < count; i++) { String action = ((EditText) ((ViewGroup) filterActionsLayout.getChildAt(i)).getChildAt(1)).getText() .toString().trim();/*ww w.jav a 2 s . co m*/ if (action.length() != 0) { filter.addAction(action); } } } if (filterSchemeLayout != null) { int count = filterSchemeLayout.getChildCount(); for (int i = 0; i < count; i++) { String scheme = ((EditText) ((ViewGroup) filterSchemeLayout.getChildAt(i)).getChildAt(1)).getText() .toString().trim(); if (scheme.length() != 0) { filter.addDataScheme(scheme); } } } if (filterAuthLayout != null) { int count = filterAuthLayout.getChildCount(); for (int i = 0; i < count; i++) { String auth = ((EditText) ((ViewGroup) filterAuthLayout.getChildAt(i)).getChildAt(1)).getText() .toString().trim(); if (auth.length() != 0) { Scanner scanner = new Scanner(auth); scanner.useDelimiter(":"); String host = null; String port = null; if (scanner.hasNext()) { host = scanner.next(); } if (scanner.hasNext()) { port = scanner.next(); } filter.addDataAuthority(host, port); } } } if (filterPathLayout != null) { int count = filterPathLayout.getChildCount(); for (int i = 0; i < count; i++) { ViewGroup group = (ViewGroup) filterPathLayout.getChildAt(i); String path = ((EditText) group.getChildAt(1)).getText().toString().trim(); String pattern = ((TextView) ((ViewGroup) group.getChildAt(2)).getChildAt(0)).getText().toString() .trim(); // ((TextView) int patternInt = 0; if (pattern.equals(pathPatterns[0])) { patternInt = PatternMatcher.PATTERN_LITERAL; } if (pattern.equals(pathPatterns[1])) { patternInt = PatternMatcher.PATTERN_PREFIX; } if (pattern.equals(pathPatterns[2])) { patternInt = PatternMatcher.PATTERN_SIMPLE_GLOB; } if (path.length() != 0) { filter.addDataPath(path, patternInt); } } } if (filterTypeLayout != null) { int count = filterTypeLayout.getChildCount(); for (int i = 0; i < count; i++) { String aType = ((EditText) ((ViewGroup) filterTypeLayout.getChildAt(i)).getChildAt(1)).getText() .toString().trim(); if (aType.length() != 0) { try { filter.addDataType(aType); } catch (MalformedMimeTypeException e) { e.printStackTrace(); } } } } if (filterCategoriesLayout != null) { int count = filterCategoriesLayout.getChildCount(); for (int i = 0; i < count; i++) { String cat = ((EditText) ((ViewGroup) filterCategoriesLayout.getChildAt(i)).getChildAt(1)).getText() .toString().trim(); if (cat.length() != 0) { filter.addCategory(cat); } } } return filter; }
From source file:gov.nih.nci.caarray.magetab.idf.IdfDocument.java
private void handlePersonRole(String value, int valueIndex) { List<String> roles = new ArrayList<String>(); java.util.Scanner scanner = new java.util.Scanner(value).useDelimiter(";"); while (scanner.hasNext()) { roles.add(scanner.next());/* ww w. java2 s.c o m*/ } Iterator<String> rolesIter = roles.iterator(); while (rolesIter.hasNext()) { investigation.getOrCreatePerson(valueIndex).getRoles() .add(addOntologyTerm(MageTabOntologyCategory.ROLES, rolesIter.next())); } }
From source file:com.axelor.controller.ConnectionToPrestashop.java
public boolean getConnection(String apiKey, String userName) { String message = ""; try {// w w w . j a va2 s .c o m URL url = new URL("http://localhost/client-lib/connection.php?Akey=" + apiKey); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.connect(); InputStream inputStream = connection.getInputStream(); Scanner scan = new Scanner(inputStream); while (scan.hasNext()) { message += scan.nextLine(); } scan.close(); System.out.println("MESSAGE :: " + message); } catch (Exception e) { e.printStackTrace(); } return message == "" ? true : false; }
From source file:ml.shifu.shifu.core.processor.StatsModelProcessor.java
/** * Dump {@link CorrelationWritable} from correlation MR job output file. This may need more memory if high column * number. Local memory should be set to 4G instead of 2G. * // w w w . j a v a 2 s. c o m * @param source * source type * @param outputFilePattern * output file pattern like part-* * @return Sorted map including CorrelationWritable info * @throws IOException * any IO exception in reading output file * @throws UnsupportedEncodingException * encoding exception to de-serialize correlation info in output file */ private SortedMap<Integer, CorrelationWritable> dumpCorrInfo(SourceType source, String outputFilePattern) throws IOException, UnsupportedEncodingException { SortedMap<Integer, CorrelationWritable> corrMap = new TreeMap<Integer, CorrelationWritable>(); FileStatus[] globStatus = ShifuFileUtils.getFileSystemBySourceType(source) .globStatus(new Path(outputFilePattern)); if (globStatus == null || globStatus.length == 0) { throw new RuntimeException("Correlation computing output file not exist."); } for (FileStatus fileStatus : globStatus) { List<Scanner> scanners = ShifuFileUtils.getDataScanners(fileStatus.getPath().toString(), source); for (Scanner scanner : scanners) { while (scanner.hasNext()) { String str = scanner.nextLine().trim(); if (str.contains(Constants.TAB_STR)) { String[] splits = str.split(Constants.TAB_STR); String corrStr = splits[1]; int columnIndex = Integer.parseInt(splits[0].trim()); corrMap.put(columnIndex, bytesToObject(Base64.decodeBase64(corrStr.getBytes("utf-8")))); } } } closeScanners(scanners); } return corrMap; }
From source file:org.apache.hadoop.hdfs.server.balancer.LatencyBalancer.java
void getLatencies(String path) { Scanner sc; try {/*from w w w.java2 s . c om*/ sc = new Scanner(new File(path)); while (sc.hasNext()) { latencies.put(sc.next(), Double.parseDouble(sc.next())); } for (String lt : latencies.keySet()) { System.out.println(lt + " " + latencies.get(lt)); } } catch (IOException e) { System.err.println(e); } }
From source file:com.groupon.odo.proxylib.BackupService.java
/** * Restore configuration from backup data * * @param streamData InputStream for configuration to restore * @return true if succeeded, false if operation failed *///from w w w . j a va 2 s .c o m public boolean restoreBackupData(InputStream streamData) { // convert stream to string java.util.Scanner s = new java.util.Scanner(streamData).useDelimiter("\\A"); String data = s.hasNext() ? s.next() : ""; // parse JSON ObjectMapper mapper = new ObjectMapper(); Backup backupData = null; try { backupData = mapper.readValue(data, Backup.class); } catch (Exception e) { logger.error("Could not parse input data: {}, {}", e.getClass(), e.getMessage()); return false; } // TODO: validate json against a schema for safety // GROUPS try { logger.info("Number of groups: {}", backupData.getGroups().size()); for (Group group : backupData.getGroups()) { // determine if group already exists.. if not then add it Integer groupId = PathOverrideService.getInstance().getGroupIdFromName(group.getName()); if (groupId == null) { groupId = PathOverrideService.getInstance().addGroup(group.getName()); } // get all methods from the group.. we are going to remove ones that don't exist in the new configuration List<Method> originalMethods = EditService.getInstance().getMethodsFromGroupId(groupId, null); for (Method originalMethod : originalMethods) { Boolean matchInImportGroup = false; int importCount = 0; for (Method importMethod : group.getMethods()) { if (originalMethod.getClassName().equals(importMethod.getClassName()) && originalMethod.getMethodName().equals(importMethod.getMethodName())) { matchInImportGroup = true; break; } importCount++; } if (!matchInImportGroup) { // remove it from current database since it is a delta to the current import PathOverrideService.getInstance().removeOverride(originalMethod.getId()); } else { // remove from import list since it already exists group.getMethods().remove(importCount); } } // add methods to groups for (Method method : group.getMethods()) { PathOverrideService.getInstance().createOverride(groupId, method.getMethodName(), method.getClassName()); } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } // PROFILES try { logger.info("Number of profiles: {}", backupData.getProfiles().size()); // remove all servers // don't care about deltas here.. we'll just recreate them all // removed default servers (belong to group id=0) ServerRedirectService.getInstance().deleteServerGroup(0); for (com.groupon.odo.proxylib.models.backup.Profile profile : backupData.getProfiles()) { // see if a profile with this name already exists Integer profileId = ProfileService.getInstance().getIdFromName(profile.getName()); com.groupon.odo.proxylib.models.Profile newProfile; if (profileId == null) { // create new profile newProfile = ProfileService.getInstance().add(profile.getName()); } else { // get the existing profile newProfile = ProfileService.getInstance().findProfile(profileId); } // add new servers if (profile.getServers() != null) { for (ServerRedirect server : profile.getServers()) { ServerRedirectService.getInstance().addServerRedirect(server.getRegion(), server.getSrcUrl(), server.getDestUrl(), server.getHostHeader(), newProfile.getId(), 0); } } // remove all server groups for (ServerGroup group : ServerRedirectService.getInstance() .tableServerGroups(newProfile.getId())) { ServerRedirectService.getInstance().deleteServerGroup(group.getId()); } // add new server groups if (profile.getServerGroups() != null) { for (ServerGroup group : profile.getServerGroups()) { int groupId = ServerRedirectService.getInstance().addServerGroup(group.getName(), newProfile.getId()); for (ServerRedirect server : group.getServers()) { ServerRedirectService.getInstance().addServerRedirect(server.getRegion(), server.getSrcUrl(), server.getDestUrl(), server.getHostHeader(), newProfile.getId(), groupId); } } } // remove all paths // don't care about deltas here.. we'll just recreate them all for (EndpointOverride path : PathOverrideService.getInstance().getPaths(newProfile.getId(), Constants.PROFILE_CLIENT_DEFAULT_ID, null)) { PathOverrideService.getInstance().removePath(path.getPathId()); } // add new paths if (profile.getPaths() != null) { for (EndpointOverride path : profile.getPaths()) { int pathId = PathOverrideService.getInstance().addPathnameToProfile(newProfile.getId(), path.getPathName(), path.getPath()); PathOverrideService.getInstance().setContentType(pathId, path.getContentType()); PathOverrideService.getInstance().setRequestType(pathId, path.getRequestType()); PathOverrideService.getInstance().setGlobal(pathId, path.getGlobal()); // add groups to path for (String groupName : path.getGroupNames()) { int groupId = PathOverrideService.getInstance().getGroupIdFromName(groupName); PathOverrideService.getInstance().AddGroupByNumber(newProfile.getId(), pathId, groupId); } } } // set active ClientService.getInstance().updateActive(newProfile.getId(), Constants.PROFILE_CLIENT_DEFAULT_ID, profile.getActive()); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } // SCRIPTS try { // delete all scripts for (Script script : ScriptService.getInstance().getScripts()) { ScriptService.getInstance().removeScript(script.getId()); } // add scripts for (Script script : backupData.getScripts()) { ScriptService.getInstance().addScript(script.getName(), script.getScript()); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } // tell http/https proxies to reload plugins try { org.apache.http.conn.ssl.SSLSocketFactory sslsf = new org.apache.http.conn.ssl.SSLSocketFactory( new TrustStrategy() { @Override public boolean isTrusted(final X509Certificate[] chain, String authType) throws CertificateException { // ignore SSL cert issues return true; } }); HostnameVerifier hostnameVerifier = org.apache.http.conn.ssl.SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; sslsf.setHostnameVerifier((X509HostnameVerifier) hostnameVerifier); for (String connectstring : getConnectorStrings("https_proxy")) { HttpGet request = new HttpGet(connectstring + "/proxy/reload"); HttpClient httpClient = new org.apache.http.impl.client.DefaultHttpClient(); String[] parts = connectstring.split(":"); httpClient.getConnectionManager().getSchemeRegistry() .register(new org.apache.http.conn.scheme.Scheme("https", Integer.parseInt(parts[parts.length - 1]), sslsf)); HttpResponse response = httpClient.execute(request); } } catch (Exception e) { e.printStackTrace(); logger.info("Exception caught during proxy reload. Things may be in an inconsistent state."); } // restart plugin service for this process PluginManager.destroy(); return true; }
From source file:com.zilotti.hostsjuggler.view.ActiveHostsFileWindow.java
private void highlightActiveHostsFile(File hostsFile) throws IOException, ParseException { BufferedReader br = null;/*from ww w . j a va 2 s . co m*/ try { /* Converts the file to text */ // StringReader fileReader = new StringReader(getLinuxHostsFile()); // For testing br = new BufferedReader(new FileReader(hostsFile)); /* Character counter */ int charCounter = 0; /* Line counter */ int lineCounter = 1; /* Line */ String line = null; while ((line = br.readLine()) != null) { line += "\n"; activeHostsFileStyledText.append(line); /* * Remark line */ if (line.startsWith(REM_LINE_CHAR)) { int prevCharCounter = charCounter; charCounter += line.length(); formatRemark(prevCharCounter, charCounter); if (log.isTraceEnabled()) { log.trace("line ='" + line + "'"); //log.trace("remark='"+ getWindowsHostsFile().substring(prevCharCounter, charCounter) +"' ("+ prevCharCounter +","+ charCounter +")"); } } else if (StringUtils.isBlank(line)) // Empty line { charCounter += line.length(); } else // Expects a host line { int localCharCounter = charCounter; charCounter += line.length(); Scanner scanner = new Scanner(line); scanner.useDelimiter(Pattern.compile("(\\s)")); /* Output of the parsing code */ String ipAddress = null; /* Verifies the number of tokens. At least two must exist (IP address and one name) */ if (scanner.hasNext()) { /* The first token must be an IP address */ { ipAddress = scanner.next(); if (!NetworkUtils.isIpAddress(ipAddress)) throw new ParseException("IP address expected. Token found: " + ipAddress, lineCounter); int prevCharCounter = localCharCounter; localCharCounter += ipAddress.length() + 1; // Sums 1 because of the lost space formatIpAddress(prevCharCounter, localCharCounter); } /* The remaining tokens are the host names associated to the IP address */ { while (scanner.hasNext()) { String hostName = scanner.next(); if (StringUtils.isWhitespace(hostName) || StringUtils.isBlank(hostName)) { localCharCounter++; } else if (NetworkUtils.isHostName(hostName)) { int prevCharCounter = localCharCounter; localCharCounter += hostName.length() + 1; // 1 to compensate the space lost // if(log.isTraceEnabled()) // log.trace("hostName='"+ getWindowsHostsFile().substring(prevCharCounter, localCharCounter) +"' ("+ prevCharCounter +","+ localCharCounter +")"); formatHostName(prevCharCounter, localCharCounter); } else throw new ParseException("Host name expected at token " + localCharCounter + ". Found: " + hostName, lineCounter); } } } else throw new ParseException("At least 2 tokens are expected from a host line.", lineCounter); } lineCounter++; } } finally { if (br != null) br.close(); } }
From source file:com.axelor.controller.ConnectionToPrestashop.java
@SuppressWarnings("finally") @Transactional/* ww w . java2s. c om*/ public String syncCustomerGroup() { String message = ""; try { List<Integer> groupIdList = new ArrayList<Integer>(); List<Integer> erpIdList = new ArrayList<Integer>(); List<PrestashopCustomerGroup> erpList = PrestashopCustomerGroup.all().fetch(); for (PrestashopCustomerGroup prestahopCustomerGroup : erpList) { erpIdList.add(prestahopCustomerGroup.getId_group()); } URL url = new URL( "http://localhost/client-lib/crud/action.php?resource=groups&action=getallid&Akey=" + apiKey); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.connect(); InputStream inputStream = connection.getInputStream(); Scanner scan = new Scanner(inputStream); while (scan.hasNext()) { String data = scan.nextLine(); System.out.println(data); groupIdList.add(Integer.parseInt(data)); } System.out.println("From Prestashop :: " + groupIdList.size()); System.out.println("From ERP :: " + erpIdList.size()); scan.close(); // Check new entries in the prestahop Iterator<Integer> prestaListIterator = groupIdList.iterator(); while (prestaListIterator.hasNext()) { Integer tempId = prestaListIterator.next(); System.out.println("Current prestaid for operation ::" + tempId); if (erpIdList.contains(tempId)) { erpIdList.remove(tempId); } else { System.out.println("Current prestaid for insertion operation ::" + tempId); // insert new data in ERP Database insertGroup(tempId); erpIdList.remove(tempId); } } if (erpIdList.isEmpty()) { System.out.println("Synchronization is completed."); message = "done"; } else { // delete from ERP Iterator<Integer> erpListIterator = erpIdList.iterator(); while (erpListIterator.hasNext()) { Integer tempId = erpListIterator.next(); System.out.println("Currently in Erp ::" + tempId); if (tempId != 0) { PrestashopCustomerGroup customerGroupDelete = PrestashopCustomerGroup.all() .filter("id_group=?", tempId).fetchOne(); String groupName = customerGroupDelete.getName(); // customerGroupDelete.remove(); customerGroupDelete.setArchived(Boolean.TRUE); System.out.println("customer deleted ::" + groupName); } } while (prestaListIterator.hasNext()) { Integer tempId = prestaListIterator.next(); System.out.println("Currently in prestashop ::" + tempId); } System.out.println("Synchronization is completed."); message = "done"; } } catch (Exception e) { message = "Wrong Authentication Key or Key has been disabled."; } finally { return message; } }