Example usage for java.util Scanner useDelimiter

List of usage examples for java.util Scanner useDelimiter

Introduction

In this page you can find the example usage for java.util Scanner useDelimiter.

Prototype

public Scanner useDelimiter(String pattern) 

Source Link

Document

Sets this scanner's delimiting pattern to a pattern constructed from the specified String .

Usage

From source file:org.fao.geonet.kernel.csw.services.GetRecords.java

/**
 * If the request contains a Query element, it must have attribute typeNames.
 *
 * The OGC 07-045 spec is more restrictive than OGC 07-006.
 *
 * OGC 07-006 10.8.4.8:/* w  ww.  j a v a  2  s.  c  o m*/
 * The typeNames parameter is a list of one or more names of queryable entities in the catalogue's information model
 * that may be constrained in the predicate of the query. In the case of XML realization of the OGC core metadata
 * properties (Subclause 10.2.5), the element csw:Record is the only queryable entity. Other information models may
 * include more than one queryable component. For example, queryable components for the XML realization of the ebRIM
 * include rim:Service, rim:ExtrinsicObject and rim:Association. In such cases the application profile shall
 * describe how multiple typeNames values should be processed.
 * In addition, all or some of the these queryable entity names may be specified in the query to define which
 * metadata record elements the query should present in the response to the GetRecords operation.
 *
 * OGC 07-045 8.2.2.1.1:
 * Mandatory: Must support *one* of csw:Record? or gmd:MD_Metadata? in a query. Default value is csw:Record?.
 *
 * (note how OGC 07-045 mixes up a mandatory parameter that has a default value !!)
 *
 * We'll go for the default value option rather than the mandatory-ness. So: if typeNames is not present or empty,
 * "csw:Record" is used.
 *
 * If the request does not contain exactly one (or comma-separated, both) of the values specified in OGC 07-045,
 * an exception is thrown. If both are present "gmd:MD_Metadata" is preferred.
 *
 * @param query query element
 * @return typeName
 * @throws MissingParameterValueEx if typeNames is missing
 * @throws InvalidParameterValueEx if typeNames does not have one of the mandated values
 */
private String checkTypenames(Element query) throws MissingParameterValueEx, InvalidParameterValueEx {
    if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
        Log.debug(Geonet.CSW_SEARCH, "checking typenames in query:\n" + Xml.getString(query));
    }
    //
    // get the prefix used for CSW namespace used in this input document
    //
    String cswPrefix = getPrefixForNamespace(query, Csw.NAMESPACE_CSW);
    if (cswPrefix == null) {
        if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
            Log.debug(Geonet.CSW_SEARCH,
                    "checktypenames: csw prefix not found, using " + Csw.NAMESPACE_CSW.getPrefix());
        }
        cswPrefix = Csw.NAMESPACE_CSW.getPrefix();
    }
    //
    // get the prefix used for GMD namespace used in this input document
    //
    String gmdPrefix = getPrefixForNamespace(query, Csw.NAMESPACE_GMD);
    if (gmdPrefix == null) {
        if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
            Log.debug(Geonet.CSW_SEARCH,
                    "checktypenames: gmd prefix not found, using " + Csw.NAMESPACE_GMD.getPrefix());
        }
        gmdPrefix = Csw.NAMESPACE_GMD.getPrefix();
    }
    if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
        Log.debug(Geonet.CSW_SEARCH,
                "checktypenames: csw prefix set to " + cswPrefix + ", gmd prefix set to " + gmdPrefix);
    }

    Attribute typeNames = query.getAttribute("typeNames", query.getNamespace());
    typeNames = query.getAttribute("typeNames");
    if (typeNames != null) {
        String typeNamesValue = typeNames.getValue();
        // empty typenames element
        if (StringUtils.isEmpty(typeNamesValue)) {
            return cswPrefix + ":Record";
        }
        // not empty: scan comma-separated string
        Scanner commaSeparator = new Scanner(typeNamesValue);
        commaSeparator.useDelimiter(",");
        String result = cswPrefix + ":Record";
        while (commaSeparator.hasNext()) {
            String typeName = commaSeparator.next();
            typeName = typeName.trim();
            if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
                Log.debug(Geonet.CSW_SEARCH, "checking typename in query:" + typeName);
            }
            if (!(typeName.equals(cswPrefix + ":Record") || typeName.equals(gmdPrefix + ":MD_Metadata"))) {
                throw new InvalidParameterValueEx("typeNames", "invalid value");
            }
            if (typeName.equals(gmdPrefix + ":MD_Metadata")) {
                return typeName;
            }
        }
        return result;
    }
    // missing typeNames element
    else {
        return cswPrefix + ":Record";
    }
}

From source file:com.github.pffy.chinese.freq.ChineseFrequency.java

private void analyze() {

    int inputCount = 0;
    int removedCount = 0;
    int hanziCount = 0;
    int uniqueHanziCount = 0;
    int processedCount = 0;

    int freq = 0;

    String csvOutput = this.HEADER_ROW_CSV;
    String tsvOutput = this.HEADER_ROW_TSV;
    String txtOutput = this.HEADER_ROW_TXT;

    String csv, tsv, txt;/*ww  w.  ja  v a2  s .  co  m*/
    String str, input, pinyin, hanzi;
    Scanner sc;
    List<String> hanziList;
    Map<String, Integer> freqMap;
    JSONObject hpdx;
    String[] arr;

    Set<String> unmappedCharacters;

    hpdx = this.hpdx;

    input = this.input;
    inputCount = input.length();

    input = retainHanzi(input);
    removedCount = inputCount - input.length();

    hanziCount = input.length();

    sc = new Scanner(input);
    sc.useDelimiter("");

    hanziList = new ArrayList<String>();
    freqMap = new HashMap<String, Integer>();

    // counts occurrences
    while (sc.hasNext()) {

        str = sc.next();
        hanziList.add(str);

        if (freqMap.containsKey(str)) {
            freqMap.put(str, (Integer) freqMap.get(str).intValue() + 1);
        } else {
            freqMap.put(str, 1);
        }
    }

    // done with Scanner
    sc.close();

    uniqueHanziCount = freqMap.keySet().size();

    SortedMap<String, String> freqTreeMap = new TreeMap<String, String>(Collections.reverseOrder());

    unmappedCharacters = new HashSet<String>();
    for (Entry<String, Integer> counts : freqMap.entrySet()) {

        try {

            hanzi = counts.getKey();
            pinyin = hpdx.getString(hanzi);

        } catch (JSONException je) {

            // add this unmapped character to the list
            unmappedCharacters.add(counts.getKey());

            // not idx mapped yet. that's ok. move on.
            continue;
        }

        if (pinyin.isEmpty()) {
            // if character is unmapped in idx, do not process.
            continue;
        }

        freq = counts.getValue();

        freqTreeMap.put(String.format("%" + this.PADSIZE_FREQ + "s", freq).replace(' ', '0') + "-" + hanzi + "-"
                + pinyin, hanzi + "," + pinyin + "," + freq);
        processedCount++;
    }

    // outputs
    for (Entry<String, String> outputs : freqTreeMap.entrySet()) {

        csv = this.CRLF + outputs.getValue();
        csvOutput += csv;

        tsv = csv.replaceAll(",", "\t");
        tsvOutput += tsv;

        arr = csv.split(",");

        // arr[0] is hanzi. arr[1] is pinyin. arr[2] is freq.
        txt = padSummary(arr[0] + " [" + arr[1] + "]", this.PADSIZE_SUMMARY + 1) + arr[2];
        txtOutput += txt;
    }

    // cleanup
    csvOutput = csvOutput.trim();
    tsvOutput = tsvOutput.trim();
    txtOutput = txtOutput.trim();

    // post-process
    this.csvOutput = csvOutput;
    this.tsvOutput = tsvOutput;
    this.txtOutput = txtOutput;

    // counts
    this.inputCount = inputCount;
    this.removedCount = removedCount;
    this.hanziCount = hanziCount;
    this.uniqueHanziCount = uniqueHanziCount;
    this.processedCount = processedCount;

    this.unmappedCharacters = unmappedCharacters;

    // summary
    String summaryString = "";

    summaryString += padSummary(this.MSG_TOTAL_COUNT, this.PADSIZE_SUMMARY) + inputCount;
    summaryString += this.CRLF + padSummary(this.MSG_REMOVED_COUNT, this.PADSIZE_SUMMARY) + removedCount;
    summaryString += this.CRLF + padSummary(this.MSG_HANZI_COUNT, this.PADSIZE_SUMMARY) + hanziCount;
    summaryString += this.CRLF + padSummary(this.MSG_UNIQUE_COUNT, this.PADSIZE_SUMMARY) + uniqueHanziCount;
    summaryString += this.CRLF + padSummary(this.MSG_PROCESSED_COUNT, this.PADSIZE_SUMMARY) + processedCount;

    this.summary = summaryString;
}

From source file:org.fao.geonet.component.csw.GetRecords.java

/**
 * If the request contains a Query element, it must have attribute typeNames.
 *
 * The OGC 07-045 spec is more restrictive than OGC 07-006.
 *
 * OGC 07-006 10.8.4.8: The typeNames parameter is a list of one or more names of queryable
 * entities in the catalogue's information model that may be constrained in the predicate of the
 * query. In the case of XML realization of the OGC core metadata properties (Subclause 10.2.5),
 * the element csw:Record is the only queryable entity. Other information models may include
 * more than one queryable component. For example, queryable components for the XML realization
 * of the ebRIM include rim:Service, rim:ExtrinsicObject and rim:Association. In such cases the
 * application profile shall describe how multiple typeNames values should be processed. In
 * addition, all or some of the these queryable entity names may be specified in the query to
 * define which metadata record elements the query should present in the response to the
 * GetRecords operation.//ww w  . j  a  v a 2s  . co m
 *
 * OGC 07-045 8.2.2.1.1: Mandatory: Must support *one* of csw:Record or
 * gmd:MD_Metadata in a query. Default value is csw:Record.
 *
 * (note how OGC 07-045 mixes up a mandatory parameter that has a default value !!)
 *
 * We'll go for the default value option rather than the mandatory-ness. So: if typeNames is not
 * present or empty, "csw:Record" is used.
 *
 * If the request does not contain exactly one (or comma-separated, both) of the values
 * specified in OGC 07-045, an exception is thrown. If both are present "gmd:MD_Metadata" is
 * preferred.
 *
 * @param query    query element
 * @param isStrict enable strict error message to comply with GDI-DE Testsuite test
 *                 csw:InterfaceBindings.GetRecords-InvalidRequest
 * @return typeName
 * @throws MissingParameterValueEx if typeNames is missing
 * @throws InvalidParameterValueEx if typeNames does not have one of the mandated values
 */
private String checkTypenames(Element query, boolean isStrict)
        throws MissingParameterValueEx, InvalidParameterValueEx {
    if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
        Log.debug(Geonet.CSW_SEARCH, "checking typenames in query:\n" + Xml.getString(query));
    }
    //
    // get the prefix used for CSW namespace used in this input document
    //
    String cswPrefix = getPrefixForNamespace(query, Csw.NAMESPACE_CSW);
    if (cswPrefix == null) {
        if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
            Log.debug(Geonet.CSW_SEARCH,
                    "checktypenames: csw prefix not found, using " + Csw.NAMESPACE_CSW.getPrefix());
        }
        cswPrefix = Csw.NAMESPACE_CSW.getPrefix();
    }
    //
    // get the prefix used for GMD namespace used in this input document
    //
    String gmdPrefix = getPrefixForNamespace(query, Csw.NAMESPACE_GMD);
    if (gmdPrefix == null) {
        if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
            Log.debug(Geonet.CSW_SEARCH,
                    "checktypenames: gmd prefix not found, using " + Csw.NAMESPACE_GMD.getPrefix());
        }
        gmdPrefix = Csw.NAMESPACE_GMD.getPrefix();
    }
    if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
        Log.debug(Geonet.CSW_SEARCH,
                "checktypenames: csw prefix set to " + cswPrefix + ", gmd prefix set to " + gmdPrefix);
    }

    Attribute typeNames = query.getAttribute("typeNames", query.getNamespace());
    typeNames = query.getAttribute("typeNames");
    if (typeNames != null) {
        String typeNamesValue = typeNames.getValue();
        // empty typenames element
        if (StringUtils.isEmpty(typeNamesValue)) {
            return cswPrefix + ":Record";
        }
        // not empty: scan space-separated string
        @SuppressWarnings("resource")
        Scanner spaceScanner = new Scanner(typeNamesValue);
        spaceScanner.useDelimiter(" ");
        String result = cswPrefix + ":Record";
        while (spaceScanner.hasNext()) {
            String typeName = spaceScanner.next();
            typeName = typeName.trim();
            if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) {
                Log.debug(Geonet.CSW_SEARCH, "checking typename in query:" + typeName);
            }

            if (!_schemaManager.getListOfTypeNames().contains(typeName)) {
                throw new InvalidParameterValueEx("typeNames",
                        String.format("'%s' typename is not valid. Supported values are: %s", typeName,
                                _schemaManager.getListOfTypeNames()));
            }
            if (typeName.equals(gmdPrefix + ":MD_Metadata")) {
                return typeName;
            }
        }
        return result;
    }
    // missing typeNames element
    else {
        if (isStrict) {
            //Mandatory check if strict.
            throw new MissingParameterValueEx("typeNames", String.format(
                    "Attribute 'typeNames' is missing. Supported values are: %s. Default is csw:Record according to OGC 07-045.",
                    _schemaManager.getListOfTypeNames()));
        } else {
            //Return default value according to OGC 07-045.
            return cswPrefix + ":Record";
        }
    }
}

From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMClusters.java

/**
 * Load the histogram from the file. Assumes the histogram is [int, float] format and creates a contiguous histogram
 * from zero//from w  w w  .ja  v a  2 s.c  om
 * 
 * @param filename
 * @return
 */
private HistogramData loadHistogram(String filename) {
    BufferedReader input = null;
    try {
        int f = 0;
        double a = 0;
        String u = "";

        FileInputStream fis = new FileInputStream(filename);
        input = new BufferedReader(new UnicodeReader(fis, null));

        String line;
        int count = 0;

        ArrayList<float[]> data = new ArrayList<float[]>();

        // Read the header and store the calibration if present
        while ((line = input.readLine()) != null) {
            count++;
            if (line.length() == 0)
                continue;
            if (Character.isDigit(line.charAt(0)))
                // This is the first record
                break;
            String[] fields = line.split("[\t, ]+");
            if (fields[0].equalsIgnoreCase("frames"))
                f = Integer.parseInt(fields[1]);
            if (fields[0].equalsIgnoreCase("area"))
                a = Double.parseDouble(fields[1]);
            if (fields[0].equalsIgnoreCase("units"))
                u = fields[1];
        }

        final Pattern pattern = Pattern.compile("[\t, ]+");
        while (line != null) {
            if (line.length() == 0)
                continue;
            if (!Character.isDigit(line.charAt(0)))
                continue;

            // Extract the first 2 fields
            Scanner scanner = new Scanner(line);
            scanner.useLocale(Locale.US);
            scanner.useDelimiter(pattern);

            try {
                int molecules = scanner.nextInt();
                float frequency = scanner.nextFloat();

                // Check for duplicates
                for (float[] d : data) {
                    if (d[0] == molecules) {
                        error("Duplicate molecules field on line " + count);
                        return null;
                    }
                }

                data.add(new float[] { molecules, frequency });
            } catch (InputMismatchException e) {
                error("Incorrect fields on line " + count);
                return null;
            } catch (NoSuchElementException e) {
                error("Incorrect fields on line " + count);
                return null;
            } finally {
                scanner.close();
            }

            // Get the next line
            line = input.readLine();
            count++;
        }

        if (data.isEmpty()) {
            error("No data in file " + filename);
            return null;
        }

        // Create a contiguous histogram from zero
        int maxN = 0;
        for (float[] d : data) {
            if (maxN < d[0])
                maxN = (int) d[0];
        }

        float[][] hist = new float[2][maxN + 1];
        for (int n = 0; n <= maxN; n++) {
            hist[0][n] = n;
            for (float[] d : data) {
                if (n == d[0])
                    hist[1][n] = d[1];
            }
        }
        HistogramData histogramData = new HistogramData(hist, f, a, u);
        histogramData.filename = filename;
        return histogramData;
    } catch (IOException e) {
        IJ.error(TITLE, "Unable to read from file " + filename);
    } finally {
        try {
            if (input != null)
                input.close();
        } catch (IOException e) {
            // Ignore
        }
    }
    return null;
}

From source file:org.aprilis.jrest.compile.Compile.java

/**
 * //  ww w.j  av  a 2 s . c  o  m
 */
private void loadDefinitions(short shRepositoryDepth) {
    try {
        for (File definitionFile : mfDefinitionFile.listFiles()) {
            if (definitionFile.getName().endsWith(Constants.gsDefFileExtension)) {
                // Read the contents of the definition file only upto the
                // first "!" character is noticed; which would be
                // corresponding to a definition, and repeat the same until
                // the time EOF is reached
                //
                boolean hasEncounteredErrors = false;
                FileInputStream fInputStream = new FileInputStream(definitionFile);
                Scanner defFileScanner = new Scanner(fInputStream);

                defFileScanner.useDelimiter(Constants.gsJsonDefinitionDelimiter);

                if (definitionFile.getName().equalsIgnoreCase(Constants.gsJrestDefinitionFileName) == false) {

                    while (defFileScanner.hasNext()) {
                        String jsonContent = defFileScanner.next();

                        if (loadJsonDefinition(jsonContent) == false) {
                            // Something went wrong with the JSON we attempted
                            // to parse, so, tell the user that JSON is screwed
                            mLogger.error(String.format(Exceptions.gsParseError, definitionFile.getName(),
                                    jsonContent));

                            hasEncounteredErrors = true;
                        } // if(loadJsonDefinition(jsonContent) == false)
                    } // while(defFileScanner.hasNext())
                } else {
                    while (defFileScanner.hasNext()) {
                        String jsonContent = defFileScanner.next();

                        if (loadJrestDefinition(jsonContent) == false) {
                            // Something went wrong with the JSON we attempted
                            // to parse, so, tell the user that JSON is screwed
                            mLogger.error(String.format(Exceptions.gsParseError, definitionFile.getName(),
                                    jsonContent));

                            hasEncounteredErrors = true;
                        } // if(loadJrestDefinition(jsonContent) == false)
                    } // while(defFileScanner.hasNext())
                } // if(parseJsonDefinition(jsonContent) == false)

                defFileScanner.close();
                fInputStream.close();

                if (shRepositoryDepth == Constants.gshMinRepoDepth) {
                    if (hasEncounteredErrors == false) {
                        File repoFile = new File(msPathToJrestRepo, definitionFile.getName());

                        if ((repoFile.exists() == true) && (repoFile.isDirectory() == false)) {
                            repoFile.delete();
                        } // if ((repoFile.exists() == true) && (repoFile.isDirectory() ==
                          // false))

                        definitionFile.renameTo(new File(msPathToJrestRepo, definitionFile.getName()));
                    } else {
                        definitionFile.renameTo(
                                new File(definitionFile.getAbsoluteFile() + Constants.gsJsonDefErrorFileExtn));
                    } // if (hasEncounteredErrors == false)
                } // if (shRepositoryDepth == Constants.gshMinRepoDepth)
            } // if(definitionFile.getName().endsWith(..)
        } // for(File definitionFile : mfDefinitionFile.listFiles())

        if (shRepositoryDepth == Constants.gshMaxRepoDepth) {
            mbJrestRepositoryRead = true;
        }
    } catch (Exception e) {
        e.printStackTrace(moPrintWriter);

        mLogger.error(moStringWriter.toString());
    } // end of try ... catch block
}

From source file:com.aurel.track.dbase.InitDatabase.java

private static void insertPostLoadData(String file) {
    Connection coni = null;//from  w w w  .jav a2  s .co m
    Connection cono = null;
    try {
        coni = getConnection();
        cono = getConnection();
        Statement ostmt = cono.createStatement();
        if (isFirstStartEver) {
            LOGGER.info("Filling some post load data...");
            try {

                URL populateURL = ApplicationBean.getInstance().getServletContext().getResource(file);
                InputStream in = populateURL.openStream();
                java.util.Scanner s = new java.util.Scanner(in, "UTF-8");
                s.useDelimiter(";");
                String st = null;
                StringBuilder stb = new StringBuilder();
                int line = 0;

                while (s.hasNext()) {
                    stb.append(s.nextLine().trim());
                    st = stb.toString();
                    ++line;
                    if (!st.isEmpty() && !st.startsWith("--") && !st.startsWith("/*")) {
                        if (st.endsWith(";")) {
                            stb = new StringBuilder(); // clear buffer
                            st = st.substring(0, st.length() - 1); // remove the semicolon
                            try {
                                ostmt.executeUpdate(st);
                                LOGGER.info(st);
                            } catch (Exception exc) {
                                LOGGER.error("Problem inserting post-load data: " + exc.getMessage());
                                LOGGER.error("Line " + line + ": " + st);
                            }
                        } else {
                            stb.append(" ");
                        }
                    } else {
                        stb = new StringBuilder();
                    }
                }
                s.close();
                in.close();

            } catch (Exception e) {
                System.err.println(ExceptionUtils.getStackTrace(e));
            }
            LOGGER.info("Post-load data is okay.");
        }
        ApplicationStarter.getInstance().actualizePercentComplete(
                ApplicationStarter.getInstance().INIT_DB_DATA_STEP, ApplicationStarter.INIT_DB_DATA_TEXT);
    } catch (Exception e) {
        LOGGER.error("Problem inserting post-load objects: " + e.getMessage());
        LOGGER.debug(STACKTRACE, e);
    } finally {
        if (coni != null) {
            try {
                coni.close();
            } catch (SQLException e) {
                LOGGER.info("Closing connection failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        }
        if (cono != null) {
            try {
                cono.close();
            } catch (SQLException e) {
                LOGGER.info("Closing connection failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        }
    }
}

From source file:org.araqne.docxcod.test.FreeMarkerTest.java

@Test
public void UserDefMethodTest() throws Exception {
    InputStreamReader templateReader = null;
    InputStreamReader inputReader = null;
    Scanner scanner = null;
    try {//from   w ww . j a  v  a  2s.c  o  m
        Configuration cfg = new Configuration();
        cfg.setObjectWrapper(new DefaultObjectWrapper());

        inputReader = new InputStreamReader(getClass().getResourceAsStream("/nestedListTest.in"));
        JSONTokener tokener = new JSONTokener(inputReader);
        Map<String, Object> rootMap = JsonHelper.parse((JSONObject) tokener.nextValue());

        MakeNewChartFunction makeNewChartFunction = new MakeNewChartFunction();
        rootMap.put("makeNewChart", makeNewChartFunction);

        templateReader = new InputStreamReader(getClass().getResourceAsStream("/userDefMethodTest.fpl"));
        Template t = new Template("UserDefMethodTest", templateReader, cfg);

        StringWriter out = new StringWriter();

        t.process(rootMap, out);

        scanner = new Scanner(getClass().getResourceAsStream("/userDefMethodTest.out"));
        String expectedOutput = scanner.useDelimiter("\\A").next();

        assertEquals(expectedOutput, out.toString());
        assertEquals(3, makeNewChartFunction.callCount);

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        CloseableHelper.safeClose(inputReader);
        if (scanner != null)
            scanner.close();
    }
}

From source file:com.aurel.track.dbase.InitDatabase.java

private static void insertNullObjectsAndSampleData() {
    ResultSet rs = null;/*from w ww  .j a v a2  s .  c o m*/
    Connection coni = null;
    Connection cono = null;
    try {
        coni = getConnection();
        cono = getConnection();
        Statement istmt = coni.createStatement();
        Statement ostmt = cono.createStatement();
        LOGGER.info("Testing for NULL objects...");
        // --------------- T S I T E ----------------------
        rs = istmt.executeQuery("SELECT * FROM TSITE");
        if (rs == null || !rs.next()) {
            try {
                ostmt.execute("INSERT INTO TSITE " + "(OBJECTID) " + "VALUES (1)");
                LOGGER.info("Inserted TSITE");
            } catch (Exception exc) {
                LOGGER.error("Problem inserting TSITE object: " + exc.getMessage());
                LOGGER.debug(STACKTRACE, exc);
            }
        }

        // --------------- T P R O J E C T T Y P E ----------------------
        rs = istmt.executeQuery("SELECT * FROM TPROJECTTYPE WHERE OBJECTID = 0");
        if (rs == null || !rs.next()) {
            try {
                ostmt.execute(
                        "INSERT INTO TPROJECTTYPE " + "(OBJECTID, LABEL, NOTIFYOWNERLEVEL, NOTIFYMANAGERLEVEL) "
                                + "VALUES (0, 'Generic Space', 0, 1)");
                LOGGER.info("Inserted NULL project (PKEY = 0) into TPROJECTTYPE");
            } catch (Exception exc) {
                LOGGER.error("Problem inserting NULL object for TPROJECTTYPE: " + exc.getMessage());
                LOGGER.debug(STACKTRACE, exc);
            }
        }

        rs = istmt.executeQuery("SELECT * FROM TPROJECTTYPE WHERE OBJECTID = -1");
        if (rs == null || !rs.next()) {
            try {
                ostmt.execute("INSERT INTO TPROJECTTYPE " + "(OBJECTID, LABEL, DEFAULTFORPRIVATE) "
                        + "VALUES (-1, 'Private Project', 'Y')");
                LOGGER.info("Inserted Private project (PKEY = -1) into TPROJECTTYPE");
            } catch (Exception exc) {
                LOGGER.error("Problem inserting private space in TPROJECTTYPE: " + exc.getMessage());
                LOGGER.debug(STACKTRACE, exc);
            }
        }

        rs = istmt.executeQuery("SELECT * FROM TPROJECT WHERE PKEY = 0");
        // ------------------- T P R O J E C T  -----------------------
        if (rs == null || !rs.next()) {
            try {
                ostmt.execute("INSERT INTO TPROJECT " + "(PKEY, LABEL, DEFOWNER, DEFMANAGER, PROJECTTYPE) "
                        + "VALUES (0, 'Generic Space', 1, 1, 0)");
                LOGGER.info("Inserted NULL project (PKEY = 0) into TPROJECT");
            } catch (Exception exc) {
                LOGGER.error("Problem inserting NULL object for TPROJECT: " + exc.getMessage());
            }
        }

        // ----------------------- T R O L E ------------------------------
        rs = istmt.executeQuery("SELECT * FROM TROLE WHERE PKEY = -1");
        if (rs == null || !rs.next()) {
            try {
                ostmt.execute("INSERT INTO TROLE " + "(PKEY, LABEL, ACCESSKEY, EXTENDEDACCESSKEY, PROJECTTYPE) "
                        + "VALUES (-1, 'PrivateRole', 0, '111111111111', 0)");
                LOGGER.info("Inserted private role (PKEY = -1) into TROLE");
            } catch (Exception exc) {
                LOGGER.error("Problem inserting NULL object for TROLE: " + exc.getMessage());
                LOGGER.debug(STACKTRACE, exc);
            }
        }

        LOGGER.info("NULL objects are okay.");

        if (isFirstStartEver) {
            LOGGER.info("Filling some sample data...");
            try {

                URL populateURL = ApplicationBean.getInstance().getServletContext().getResource(populateSql);
                if (populateURL == null) {
                    ClassLoader cl = InitDatabase.class.getClassLoader();
                    populateURL = cl.getResource(populateSql);
                }
                InputStream in = populateURL.openStream();
                java.util.Scanner s = new java.util.Scanner(in, "UTF-8");
                s.useDelimiter(";");
                String st = null;
                StringBuffer stb = new StringBuffer();
                int line = 0;

                ApplicationStarter.getInstance().actualizePercentComplete(
                        ApplicationStarter.getInstance().INIT_DB_DATA_STEP,
                        ApplicationStarter.INIT_DB_DATA_TEXT);

                while (s.hasNext()) {
                    stb.append(s.nextLine().trim());
                    st = stb.toString();
                    ++line;
                    if (!st.isEmpty() && !st.startsWith("--") && !st.startsWith("/*")) {
                        if (st.endsWith(";")) {
                            stb = new StringBuffer(); // clear buffer
                            st = st.substring(0, st.length() - 1); // remove the semicolon
                            try {
                                ostmt.executeUpdate(st);
                            } catch (Exception exc) {
                                LOGGER.error("Problem inserting sample data: " + exc.getMessage());
                                LOGGER.error("Line " + line + ": " + st);
                            }
                        } else {
                            stb.append(" ");
                        }
                    } else {
                        stb = new StringBuffer();
                    }
                }
                s.close();
                in.close();

            } catch (Exception e) {
                LOGGER.error(ExceptionUtils.getStackTrace(e));
            }
            LOGGER.info("Sample data is okay.");

            ApplicationStarter.getInstance().actualizePercentComplete(
                    ApplicationStarter.getInstance().INIT_DB_DATA_STEP, ApplicationStarter.INIT_DB_DATA_TEXT);
        }
    } catch (Exception e) {
        LOGGER.error("Problem inserting null objects: " + e.getMessage());
        LOGGER.debug(STACKTRACE, e);
    } finally {
        if (coni != null) {
            try {
                coni.close();
            } catch (SQLException e) {
                LOGGER.info("Closing connection failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        }
        if (cono != null) {
            try {
                cono.close();
            } catch (SQLException e) {
                LOGGER.info("Closing connection failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        }
    }
}

From source file:org.olat.core.util.vfs.version.VersionsFileManager.java

private boolean isVersionsXmlFile(VFSLeaf fVersions) {
    if (fVersions == null || !fVersions.exists()) {
        return false;
    }/*from  w w  w .j a va 2  s.c om*/
    InputStream in = fVersions.getInputStream();
    if (in == null) {
        return false;
    }

    Scanner scanner = new Scanner(in);
    scanner.useDelimiter(TAG_PATTERN);

    boolean foundVersionsTag = false;
    while (scanner.hasNext()) {
        String tag = scanner.next();
        if ("versions".equals(tag)) {
            foundVersionsTag = true;
            break;
        }
    }

    scanner.close();
    IOUtils.closeQuietly(in);
    return foundVersionsTag;
}

From source file:org.meveo.service.billing.impl.InvoiceService.java

public String getXMLInvoice(Invoice invoice, String invoiceNumber, User currentUser, boolean refreshInvoice)
        throws BusinessException, FileNotFoundException {
    String brPath = getBillingRunPath(invoice.getBillingRun(), invoice.getAuditable().getCreated(),
            currentUser.getProvider().getCode());
    File billingRundir = new File(brPath);
    xmlInvoiceCreator.createXMLInvoice(invoice.getId(), billingRundir,
            invoice.getInvoiceType().getCode().equals(invoiceTypeService.getAdjustementCode()), refreshInvoice);
    String thePrefix = "";
    if (invoice.getInvoiceType().getCode().equals(invoiceTypeService.getAdjustementCode())) {
        thePrefix = paramBean.getProperty("invoicing.invoiceAdjustment.prefix", "_IA_");
    }//from  w w  w. j  a va 2  s  .  c  om
    String xmlCanonicalPath = brPath + File.separator + thePrefix + invoiceNumber + ".xml";
    Scanner scanner = new Scanner(new File(xmlCanonicalPath));
    String xmlContent = scanner.useDelimiter("\\Z").next();
    scanner.close();
    log.debug("getXMLInvoice  invoiceNumber:{} done.", invoiceNumber);

    return xmlContent;
}