List of usage examples for java.util Scanner useDelimiter
public Scanner useDelimiter(String pattern)
From source file:org.openhab.binding.amazonechocontrol.internal.Connection.java
public String convertStream(HttpsURLConnection connection) throws IOException { InputStream input = connection.getInputStream(); if (input == null) { return ""; }/*www . jav a 2s .c om*/ InputStream readerStream; if (StringUtils.equalsIgnoreCase(connection.getContentEncoding(), "gzip")) { readerStream = new GZIPInputStream(connection.getInputStream()); } else { readerStream = input; } String contentType = connection.getContentType(); String charSet = null; if (contentType != null) { Matcher m = charsetPattern.matcher(contentType); if (m.find()) { charSet = m.group(1).trim().toUpperCase(); } } Scanner inputScanner = StringUtils.isEmpty(charSet) ? new Scanner(readerStream, StandardCharsets.UTF_8.name()) : new Scanner(readerStream, charSet); Scanner scannerWithoutDelimiter = inputScanner.useDelimiter("\\A"); String result = scannerWithoutDelimiter.hasNext() ? scannerWithoutDelimiter.next() : null; inputScanner.close(); scannerWithoutDelimiter.close(); input.close(); if (result == null) { result = ""; } return result; }
From source file:gdsc.smlm.ij.plugins.DriftCalculator.java
/** * Read the drift file storing the T,X,Y into the class level calculatedTimepoints, lastdx and lastdy * arrays. Ignore any records where T is outside the limits. * /*from w w w.j a v a 2 s. co m*/ * @param limits * @return The number of records read */ private int readDriftFile(int[] limits) { int ok = 0; BufferedReader input = null; try { FileInputStream fis = new FileInputStream(driftFilename); input = new BufferedReader(new UnicodeReader(fis, null)); String line; Pattern pattern = Pattern.compile("[\t, ]+"); while ((line = input.readLine()) != null) { if (line.length() == 0) continue; if (Character.isDigit(line.charAt(0))) { try { Scanner scanner = new Scanner(line); scanner.useDelimiter(pattern); scanner.useLocale(Locale.US); final int t = scanner.nextInt(); if (t < limits[0] || t > limits[1]) continue; final double x = scanner.nextDouble(); final double y = scanner.nextDouble(); calculatedTimepoints[t] = ++ok; lastdx[t] = x; lastdy[t] = y; scanner.close(); } catch (InputMismatchException e) { } catch (NoSuchElementException e) { } } } } catch (IOException e) { // ignore } finally { try { if (input != null) input.close(); } catch (IOException e) { // Ignore } } return ok; }
From source file:org.apache.openaz.xacml.rest.XACMLPapServlet.java
/** * Requests from the Admin Console to create new items or update existing ones * * @param request/*from w ww.jav a2 s.c o m*/ * @param response * @param groupId * @throws ServletException * @throws java.io.IOException */ private void doACPut(HttpServletRequest request, HttpServletResponse response, String groupId) throws ServletException, IOException { try { // for PUT operations the group may or may not need to exist before the operation can be done PDPGroup group = papEngine.getGroup(groupId); // determine the operation needed based on the parameters in the request // for remaining operations the group must exist before the operation can be done if (group == null) { logger.error("Unknown groupId '" + groupId + "'"); response.sendError(HttpServletResponse.SC_NOT_FOUND, "Unknown groupId '" + groupId + "'"); return; } if (request.getParameter("policy") != null) { // group=<groupId> policy=<policyId> contents=policy file <= Create new policy file in group // dir, or replace it if it already exists (do not touch properties) // TODO - currently this is done by the AC, but it should be done here by getting the policy // file out of the contents and saving to disk logger.error("PARTIALLY IMPLEMENTED!!! ACTUAL CHANGES SHOULD BE MADE BY PAP SERVLET!!! "); response.setStatus(HttpServletResponse.SC_NO_CONTENT); return; } else if (request.getParameter("pdpId") != null) { // ARGS: group=<groupId> pdpId=<pdpId/URL> <= create a new PDP or Update an Existing one String pdpId = request.getParameter("pdpId"); // get the request content into a String String json = null; // read the inputStream into a buffer (trick found online scans entire input looking for // end-of-file) Scanner scanner = new Scanner(request.getInputStream()); scanner.useDelimiter("\\A"); json = scanner.hasNext() ? scanner.next() : ""; scanner.close(); logger.info("JSON request from AC: " + json); // convert Object sent as JSON into local object ObjectMapper mapper = new ObjectMapper(); Object objectFromJSON = mapper.readValue(json, StdPDP.class); if (pdpId == null || objectFromJSON == null || !(objectFromJSON instanceof StdPDP) || ((StdPDP) objectFromJSON).getId() == null || !((StdPDP) objectFromJSON).getId().equals(pdpId)) { logger.error( "PDP new/update had bad input. pdpId=" + pdpId + " objectFromJSON=" + objectFromJSON); response.sendError(500, "Bad input, pdpid=" + pdpId + " object=" + objectFromJSON); } StdPDP pdp = (StdPDP) objectFromJSON; if (papEngine.getPDP(pdpId) == null) { // this is a request to create a new PDP object papEngine.newPDP(pdp.getId(), group, pdp.getName(), pdp.getDescription()); } else { // this is a request to update the pdp papEngine.updatePDP(pdp); } response.setStatus(HttpServletResponse.SC_NO_CONTENT); if (logger.isDebugEnabled()) { logger.debug("PDP '" + pdpId + "' created/updated"); } // adjust the group's state including the new PDP ((StdPDPGroup) group).resetStatus(); // tell the Admin Consoles there is a change notifyAC(); // this might affect the PDP, so notify it of the change pdpChanged(pdp); return; } else if (request.getParameter("pipId") != null) { // group=<groupId> pipId=<pipEngineId> contents=pip properties <= add a PIP to pip config, or // replace it if it already exists (lenient operation) // TODO logger.error("UNIMPLEMENTED "); response.sendError(HttpServletResponse.SC_BAD_REQUEST, "UNIMPLEMENTED"); return; } else { // Assume that this is an update of an existing PDP Group // ARGS: group=<groupId> <= Update an Existing Group // get the request content into a String String json = null; // read the inputStream into a buffer (trick found online scans entire input looking for // end-of-file) Scanner scanner = new Scanner(request.getInputStream()); scanner.useDelimiter("\\A"); json = scanner.hasNext() ? scanner.next() : ""; scanner.close(); logger.info("JSON request from AC: " + json); // convert Object sent as JSON into local object ObjectMapper mapper = new ObjectMapper(); Object objectFromJSON = mapper.readValue(json, StdPDPGroup.class); if (objectFromJSON == null || !(objectFromJSON instanceof StdPDPGroup) || !((StdPDPGroup) objectFromJSON).getId().equals(group.getId())) { logger.error("Group update had bad input. id=" + group.getId() + " objectFromJSON=" + objectFromJSON); response.sendError(500, "Bad input, id=" + group.getId() + " object=" + objectFromJSON); } // The Path on the PAP side is not carried on the RESTful interface with the AC // (because it is local to the PAP) // so we need to fill that in before submitting the group for update ((StdPDPGroup) objectFromJSON).setDirectory(((StdPDPGroup) group).getDirectory()); papEngine.updateGroup((StdPDPGroup) objectFromJSON); response.setStatus(HttpServletResponse.SC_NO_CONTENT); if (logger.isDebugEnabled()) { logger.debug("Group '" + group.getId() + "' updated"); } // tell the Admin Consoles there is a change notifyAC(); // Group changed, which might include changing the policies groupChanged(group); return; } } catch (PAPException e) { logger.error("AC PUT exception: " + e, e); response.sendError(500, e.getMessage()); return; } }
From source file:com.ikanow.infinit.e.api.social.sharing.ShareHandler.java
private String getReferenceString(SharePojo share) { // FILE:/*w ww .j ava 2 s .c om*/ if (null == share.getDocumentLocation().get_id()) { // local file based reference FileInputStream fin = null; Scanner s = null; try { File f = new File(share.getDocumentLocation().getCollection()); fin = new FileInputStream(f); s = new Scanner(fin, "UTF-8"); return (s.useDelimiter("\n").next()); } catch (Exception e) { return null; } finally { try { if (null != fin) fin.close(); if (null != s) s.close(); } catch (Exception e) { } // (probably just never opened) } } // DB: // Carry on, this is a database object HashSet<String> shareIdStrs = new HashSet<String>(); for (ShareCommunityPojo commIds : share.getCommunities()) { shareIdStrs.add(commIds.get_id().toString()); } String retVal = null; BasicDBObject query = new BasicDBObject(DocumentPojo._id_, share.getDocumentLocation().get_id()); // (same for all artifacts) String dbName = share.getDocumentLocation().getDatabase(); String collectionName = share.getDocumentLocation().getCollection(); BasicDBObject returnVal = (BasicDBObject) MongoDbManager.getCollection(dbName, collectionName) .findOne(query); try { BasicDBList communities = null; boolean bCustomJob = dbName.equals("custommr"); // (a bit different) boolean bFoundOverlap = false; if (!bCustomJob) { ObjectId communityId = (ObjectId) returnVal.get(DocumentPojo.communityId_); // (same for other artifacts) bFoundOverlap = shareIdStrs.contains(communityId.toString()); } else { communities = (BasicDBList) returnVal.get("communityIds"); // (shared across multiple json types) for (Object commIdObj : communities) { ObjectId commId = (ObjectId) commIdObj; if (shareIdStrs.contains(commId.toString())) { bFoundOverlap = true; break; } } } if (!bFoundOverlap) { throw new RuntimeException(""); // (turned into the common message below) } if (!bCustomJob) { // everything but custom jobs Date modifiedTime = returnVal.getDate(DocumentPojo.modified_); // (same for other artifacts) if (null != modifiedTime) { share.setModified(modifiedTime); } retVal = returnVal.toString(); } else { // custom jobs String database = returnVal.getString(CustomMapReduceJobPojo.outputDatabase_); if (null == database) { database = dbName; } Date modifiedTime = returnVal.getDate(CustomMapReduceJobPojo.lastCompletionTime_); if (null != modifiedTime) { share.setModified(modifiedTime); } String collection = returnVal.getString(CustomMapReduceJobPojo.outputCollection_); BasicDBObject returnVal2 = (BasicDBObject) MongoDbManager.getCollection(database, collection) .findOne(); retVal = returnVal2.toString(); } } catch (Exception e) { throw new RuntimeException("Document not found or permission issue (no overlapping communities)"); } return retVal; }
From source file:com.flexive.core.storage.genericSQL.GenericTreeStorage.java
/** * {@inheritDoc}/*from w w w. j av a 2 s.c o m*/ */ @Override public long[] createNodes(Connection con, SequencerEngine seq, ContentEngine ce, FxTreeMode mode, long parentNodeId, String path, int position, boolean activateContent) throws FxApplicationException { if ("/".equals(path)) return new long[] { FxTreeNode.ROOT_NODE }; final List<Long> result = new ArrayList<Long>(); final Scanner scanner = new Scanner(path); long currentParent = parentNodeId; scanner.useDelimiter("/"); if (parentNodeId != -1) { acquireLocksForUpdate(con, getTreeNodeInfo(con, mode, parentNodeId), false); } while (scanner.hasNext()) { String name = scanner.next(); final FxString label = new FxString(true, name); name = FxFormatUtils.escapeTreePath(name); if (StringUtils.isEmpty(name)) continue; long nodeId = getIdByFQNPath(con, mode, currentParent, "/" + name); if (nodeId == -1) nodeId = createNode(con, seq, ce, mode, nodeId, currentParent, name, label, position, null, null, activateContent); result.add(nodeId); currentParent = nodeId; } return ArrayUtils.toPrimitive(result.toArray(new Long[result.size()])); }
From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMFitting.java
/** * Load a correlation curve from file. Will set the global gr, peakDensity and spatialDomain variables. If the data * fails to be loaded then the method will return false. * //ww w.j a v a2 s .c om * @return True if loaded */ private boolean loadCorrelationCurve() { inputFilename = Utils.getFilename("Input_Correlation_File", inputFilename); if (inputFilename == null) return false; // Set the analysis variables boolean spatialDomainSet = false; boolean peakDensitySet = false; BufferedReader input = null; try { FileInputStream fis = new FileInputStream(inputFilename); input = new BufferedReader(new UnicodeReader(fis, null)); String line; int count = 0; Pattern pattern = Pattern.compile("#([^=]+) = ([^ ]+)"); // Read the header while ((line = input.readLine()) != null) { count++; if (line.length() == 0) continue; if (line.charAt(0) != '#') { // This is the first record break; } // This is a header line. Extract the key-value pair Matcher match = pattern.matcher(line); if (match.find()) { if (match.group(1).equals(HEADER_SPATIAL_DOMAIN)) { // Do not use Boolean.parseBoolean because this will not fail if the field is // neither true/false - it only return true for a match to true spatialDomainSet = true; if (match.group(2).equalsIgnoreCase("true")) spatialDomain = true; else if (match.group(2).equalsIgnoreCase("false")) spatialDomain = false; else // We want to know if the field is not true/false spatialDomainSet = false; } else if (match.group(1).equals(HEADER_PEAK_DENSITY)) { try { peakDensity = Double.parseDouble(match.group(2)); peakDensitySet = true; } catch (NumberFormatException e) { // Ignore this. } } } } if (!peakDensitySet) { IJ.error(TITLE, "No valid " + HEADER_PEAK_DENSITY + " record in file " + inputFilename); return false; } if (!spatialDomainSet) { IJ.error(TITLE, "No valid " + HEADER_SPATIAL_DOMAIN + " record in file " + inputFilename); return false; } // Read the data: gr[0][i], gr[1][i], gr[2][i] ArrayList<double[]> data = new ArrayList<double[]>(); while (line != null) { if (line.length() == 0) continue; if (line.charAt(0) == '#') continue; // Extract the first 3 fields Scanner scanner = new Scanner(line); scanner.useDelimiter("[\t ,]+"); double r, g; try { r = scanner.nextDouble(); g = scanner.nextDouble(); } catch (InputMismatchException e) { IJ.error(TITLE, "Incorrect fields on line " + count); scanner.close(); return false; } catch (NoSuchElementException e) { IJ.error(TITLE, "Incorrect fields on line " + count); scanner.close(); return false; } // Allow the file to be missing the curve error. This is only used for plotting anyway. double error = 0; try { error = scanner.nextDouble(); } catch (InputMismatchException e) { } catch (NoSuchElementException e) { } scanner.close(); data.add(new double[] { r, g, error }); // Read the next line line = input.readLine(); count++; } if (data.isEmpty()) { IJ.error(TITLE, "No data in file " + inputFilename); return false; } gr = new double[3][data.size()]; for (int i = 0; i < data.size(); i++) { final double[] d = data.get(i); gr[0][i] = d[0]; gr[1][i] = d[1]; gr[2][i] = d[2]; } } catch (IOException e) { IJ.error(TITLE, "Unable to read from file " + inputFilename); return false; } finally { try { if (input != null) input.close(); } catch (IOException e) { // Ignore } } return true; }
From source file:ehospital.Principal.java
private void btn_cargar_ambuMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_cargar_ambuMouseClicked // TODO add your handling code here: Scanner sc = null; File archivo2;/*from ww w . j a v a 2 s. co m*/ try { archivo2 = new File("./Ambulancia.txt"); sc = new Scanner(archivo2); sc.useDelimiter(","); while (sc.hasNext()) { Ambulancias ambu = new Ambulancias(sc.next(), sc.nextInt(), sc.nextInt(), new Lugar(sc.next()), sc.nextBoolean()); lista_ambu.add(ambu); } JOptionPane.showMessageDialog(null, "Ambulancias Cargadas"); } catch (Exception e) { } finally { sc.close(); } System.out.println(lista_ambu.toString()); }
From source file:ehospital.Principal.java
private void btn_cargar_paramMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_cargar_paramMouseClicked // TODO add your handling code here: Scanner sc = null; File archivo2;// w w w . j av a2 s . c o m try { archivo2 = new File("./Paramedicos.txt"); sc = new Scanner(archivo2); sc.useDelimiter(","); while (sc.hasNext()) { Paramedicos param = new Paramedicos(sc.next(), sc.nextInt(), sc.nextInt(), sc.next(), new Lugar(sc.next()), sc.nextBoolean()); lista_param.add(param); } JOptionPane.showMessageDialog(null, "Paramedicos Cargadas"); } catch (Exception e) { } finally { sc.close(); } System.out.println(lista_param.toString()); }
From source file:com.novartis.opensource.yada.test.ServiceTest.java
/** * Loads the resource at {@code path} containing query parameter or json * strings/*from w w w. j av a 2s . com*/ * * @param path the path to the test script * @return an array of query or json strings * @throws URISyntaxException when a handle can't be attached to the test file * path * @throws IOException if the {@link InputStream} used for reading test files * can't be closed */ @SuppressWarnings("resource") public String[] loadResource(String path) throws URISyntaxException, IOException { Scanner scanner = null; String[] queries = null; java.net.URL resource = getClass().getResource(path); InputStream in = null; try { scanner = new Scanner(new File(resource.toURI()), UTF8); } catch (Exception e) { in = getClass().getResourceAsStream(path); if (in != null) { scanner = new Scanner(in, UTF8); } } if (scanner != null) { scanner.useDelimiter("\\Z"); queries = scanner.next().split("\\n"); scanner.close(); } if (in != null) in.close(); return queries; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DTA117FileReader.java
private void readSTRLs(DataReader reader) throws IOException { logger.fine("STRLs section; at offset " + reader.getByteOffset() + "; dta map offset: " + dtaMap.getOffset_strls()); // TODO: //from www . j a v a2 s .co m // check that we are at the right byte offset! //reader.readOpeningTag(TAG_STRLS); if (hasSTRLs) { reader.readOpeningTag(TAG_STRLS); File intermediateTabFile = ingesteddata.getTabDelimitedFile(); FileInputStream fileInTab = new FileInputStream(intermediateTabFile); Scanner scanner = new Scanner(fileInTab); scanner.useDelimiter("\\n"); File finalTabFile = File.createTempFile("finalTabfile.", ".tab"); FileOutputStream fileOutTab = new FileOutputStream(finalTabFile); PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); logger.fine("Setting the tab-delimited file to " + finalTabFile.getName()); ingesteddata.setTabDelimitedFile(finalTabFile); int nvar = dataTable.getVarQuantity().intValue(); int nobs = dataTable.getCaseQuantity().intValue(); String[] line; for (int obsindex = 0; obsindex < nobs; obsindex++) { if (scanner.hasNext()) { line = (scanner.next()).split("\t", -1); for (int varindex = 0; varindex < nvar; varindex++) { if ("STRL".equals(variableTypes[varindex])) { // this is a STRL; needs to be re-processed: String voPair = line[varindex]; long v; long o; if (voPair == null) { throw new IOException("Failed to read an intermediate v,o Pair for variable " + varindex + ", observation " + obsindex); } if ("0,0".equals(voPair)) { // This is a code for an empty string - ""; // doesn't need to be defined or looked up. line[varindex] = "\"\""; } else { String[] voTokens = voPair.split(",", 2); try { v = new Long(voTokens[0]).longValue(); o = new Long(voTokens[1]).longValue(); } catch (NumberFormatException nfex) { throw new IOException("Illegal v,o value: " + voPair + " for variable " + varindex + ", observation " + obsindex); } if (v == varindex + 1 && o == obsindex + 1) { // This v,o must be defined in the STRLs section: line[varindex] = readGSO(reader, v, o); if (line[varindex] == null) { throw new IOException("Failed to read GSO value for " + voPair); } } else { // This one must have been cached already: if (cachedGSOs.get(voPair) != null && !cachedGSOs.get(voPair).equals("")) { line[varindex] = cachedGSOs.get(voPair); } else { throw new IOException("GSO string unavailable for v,o value " + voPair); } } } } } // Dump the row of data to the tab-delimited file: pwout.println(StringUtils.join(line, "\t")); } } scanner.close(); pwout.close(); reader.readClosingTag(TAG_STRLS); } else { // If this data file doesn't use STRLs, we can just skip // this section, and assume that we are done with the // tabular data file. reader.readPrimitiveSection(TAG_STRLS); } //reader.readClosingTag(TAG_STRLS); }