Example usage for java.util Vector isEmpty

List of usage examples for java.util Vector isEmpty

Introduction

In this page you can find the example usage for java.util Vector isEmpty.

Prototype

public synchronized boolean isEmpty() 

Source Link

Document

Tests if this vector has no components.

Usage

From source file:org.schabi.newpipe.extractor.services.youtube.YoutubeStreamExtractor.java

@Override
public List<VideoStream> getVideoStreams() throws ParsingException {
    Vector<VideoStream> videoStreams = new Vector<>();

    try {/*from  w  w  w  .jav  a 2 s.co m*/
        String encodedUrlMap;
        // playerArgs could be null if the video is age restricted
        if (playerArgs == null) {
            encodedUrlMap = videoInfoPage.get("url_encoded_fmt_stream_map");
        } else {
            encodedUrlMap = playerArgs.getString("url_encoded_fmt_stream_map");
        }
        for (String url_data_str : encodedUrlMap.split(",")) {
            try {
                // This loop iterates through multiple streams, therefor tags
                // is related to one and the same stream at a time.
                Map<String, String> tags = Parser
                        .compatParseMap(org.jsoup.parser.Parser.unescapeEntities(url_data_str, true));

                int itag = Integer.parseInt(tags.get("itag"));

                if (itagIsSupported(itag)) {
                    ItagItem itagItem = getItagItem(itag);
                    if (itagItem.itagType == ItagType.VIDEO) {
                        String streamUrl = tags.get("url");
                        // if video has a signature: decrypt it and add it to the url
                        if (tags.get("s") != null) {
                            streamUrl = streamUrl + "&signature="
                                    + decryptSignature(tags.get("s"), decryptionCode);
                        }
                        videoStreams.add(
                                new VideoStream(streamUrl, itagItem.mediaFormatId, itagItem.resolutionString));
                    }
                }
            } catch (Exception e) {
                //todo: dont log throw an error
                System.err.println("Could not get Video stream.");
                e.printStackTrace();
            }
        }

    } catch (Exception e) {
        throw new ParsingException("Failed to get video streams", e);
    }

    if (videoStreams.isEmpty()) {
        throw new ParsingException("Failed to get any video stream");
    }
    return videoStreams;
}

From source file:org.unitime.timetable.test.BatchStudentSectioningLoader.java

public Student loadStudent(org.unitime.timetable.model.Student s, Hashtable courseTable, Hashtable classTable) {
    sLog.debug("Loading student " + s.getUniqueId() + " (id=" + s.getExternalUniqueId() + ", name="
            + s.getFirstName() + " " + s.getMiddleName() + " " + s.getLastName() + ")");
    Student student = new Student(s.getUniqueId().longValue());
    if (iLoadStudentInfo)
        loadStudentInfo(student, s);//from  ww w.ja v  a  2 s  . c o m
    int priority = 0;
    for (Iterator i = new TreeSet(s.getCourseDemands()).iterator(); i.hasNext();) {
        CourseDemand cd = (CourseDemand) i.next();
        if (cd.getFreeTime() != null) {
            Request request = new FreeTimeRequest(cd.getUniqueId().longValue(), priority++,
                    cd.isAlternative().booleanValue(), student,
                    new TimeLocation(cd.getFreeTime().getDayCode().intValue(),
                            cd.getFreeTime().getStartSlot().intValue(), cd.getFreeTime().getLength().intValue(),
                            0, 0, s.getSession().getDefaultDatePattern().getUniqueId(), "",
                            s.getSession().getDefaultDatePattern().getPatternBitSet(), 0));
            sLog.debug("  -- added request " + request);
        } else if (!cd.getCourseRequests().isEmpty()) {
            Vector courses = new Vector();
            HashSet selChoices = new HashSet();
            HashSet wlChoices = new HashSet();
            HashSet assignedSections = new HashSet();
            Config assignedConfig = null;
            for (Iterator j = new TreeSet(cd.getCourseRequests()).iterator(); j.hasNext();) {
                org.unitime.timetable.model.CourseRequest cr = (org.unitime.timetable.model.CourseRequest) j
                        .next();
                Course course = (Course) courseTable.get(cr.getCourseOffering().getUniqueId());
                if (course == null) {
                    sLog.warn("  -- course " + cr.getCourseOffering().getCourseName() + " not loaded");
                    continue;
                }
                for (Iterator k = cr.getClassWaitLists().iterator(); k.hasNext();) {
                    ClassWaitList cwl = (ClassWaitList) k.next();
                    Section section = course.getOffering().getSection(cwl.getClazz().getUniqueId().longValue());
                    if (section != null) {
                        if (cwl.getType().equals(ClassWaitList.TYPE_SELECTION))
                            selChoices.add(section.getChoice());
                        else if (cwl.getType().equals(ClassWaitList.TYPE_WAITLIST))
                            wlChoices.add(section.getChoice());
                    }
                }
                if (assignedConfig == null) {
                    for (Iterator k = cr.getClassEnrollments().iterator(); k.hasNext();) {
                        StudentClassEnrollment sce = (StudentClassEnrollment) k.next();
                        Section section = course.getOffering()
                                .getSection(sce.getClazz().getUniqueId().longValue());
                        if (section != null) {
                            assignedSections.add(section);
                            assignedConfig = section.getSubpart().getConfig();
                        }
                    }
                }
                courses.addElement(course);
            }
            if (courses.isEmpty())
                continue;
            CourseRequest request = new CourseRequest(cd.getUniqueId().longValue(), priority++,
                    cd.isAlternative().booleanValue(), student, courses, cd.isWaitlist().booleanValue(),
                    cd.getTimestamp().getTime());
            request.getSelectedChoices().addAll(selChoices);
            request.getWaitlistedChoices().addAll(wlChoices);
            if (assignedConfig != null && assignedSections.size() == assignedConfig.getSubparts().size()) {
                Enrollment enrollment = new Enrollment(request, 0, assignedConfig, assignedSections,
                        getAssignment());
                request.setInitialAssignment(enrollment);
            }
            sLog.debug("  -- added request " + request);
        } else {
            sLog.warn("  -- course demand " + cd.getUniqueId() + " has no course requests");
        }
    }

    return student;
}

From source file:org.ecoinformatics.seek.datasource.eml.eml2.Eml200DataSource.java

/**
 * This method will read a row vector from data source, either from
 * resultset which excuted by data query or delimiterdReader which reader
 * from data inputtream - _reader. This method will be called in asFired
 * method// w  w w .j ava2s. c om
 */
public Vector gotRowVectorFromSource() throws Exception {
    Vector rowVector = new Vector();
    if (_resultSet != null) {

        ResultSet rs = _resultSet.getResultSet();
        ResultSetMetaData metadata = rs.getMetaData();
        int columnSize = metadata.getColumnCount();

        if (rs.next()) {
            for (int i = 0; i < columnSize; i++) {
                String str = rs.getString(i + 1);
                rowVector.add(str);
            }
        }
    } else if (_reader != null && (!_hasSQLCommand || _numberOfEntities == 1)) {
        if (_selectedTableEntity.isSimpleDelimited()) {
            _simpleDelimitedReader.setCollapseDelimiter(_selectedTableEntity.getCollapseDelimiter());
            _simpleDelimitedReader.setNumFooterLines(_selectedTableEntity.getNumFooterLines());
            rowVector = _simpleDelimitedReader.getRowDataVectorFromStream();
        } else {
            rowVector = _complexFormatReader.getRowDataVectorFromStream();
        }

    }

    if (rowVector.isEmpty()) {
        _endOfResultSet = true;
    }
    return rowVector;
}

From source file:edu.ku.brc.specify.conversion.ConvertTaxonHelper.java

/**
 * Converts the taxonomy tree definition from the old taxonomicunittype table to the new table
 * pair: TaxonTreeDef & TaxonTreeDefItems.
 * // w  w  w  .j  a v  a  2 s .  c  om
 * @param taxonomyTypeId the tree def id in taxonomicunittype
 * @return the TaxonTreeDef object
 * @throws SQLException
 */
public void convertTaxonTreeDefinition(final CollectionInfo colInfo) {
    if (!colInfo.isInUse()) {
        return;
    }

    TaxonTreeDef taxonTreeDef = newTaxonInfoHash.get(colInfo.getTaxonNameId());
    if (taxonTreeDef != null) {
        colInfo.setTaxonTreeDef(taxonTreeDef);
        return;
    }

    Integer oldTaxonRootId = colInfo.getTaxonNameId();
    Integer taxonomyTypeId = colInfo.getTaxonomyTypeId();

    try {
        Statement st = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);

        taxonTreeDef = new TaxonTreeDef();
        taxonTreeDef.initialize();

        String sql = "SELECT TaxonomyTypeName, KingdomID FROM taxonomytype WHERE TaxonomyTypeID = "
                + taxonomyTypeId;
        log.debug(sql);
        ResultSet rs = st.executeQuery(sql);
        rs.next();
        String taxonomyTypeName = rs.getString(1);
        int kingdomID = rs.getInt(2);
        rs.close();

        taxonTreeDef.setName(taxonomyTypeName + " taxonomy tree");
        taxonTreeDef.setRemarks("Tree converted from " + oldDBName);
        taxonTreeDef.setFullNameDirection(TreeDefIface.FORWARD);

        sql = String.format(
                "SELECT RankID, RankName, RequiredParentRankID, TaxonomicUnitTypeID FROM taxonomicunittype "
                        + "WHERE TaxonomyTypeID = %d AND (Kingdom = %d  OR RankID = 0) ORDER BY RankID",
                taxonomyTypeId, kingdomID);
        log.debug(sql);
        rs = st.executeQuery(sql);

        Hashtable<Integer, Integer> rankId2TxnUntTypId = new Hashtable<Integer, Integer>();
        int rank;
        String name;
        int requiredRank;

        Vector<TaxonTreeDefItem> items = new Vector<TaxonTreeDefItem>();
        Vector<Integer> enforcedRanks = new Vector<Integer>();

        while (rs.next()) {
            rank = rs.getInt(1);
            name = rs.getString(2);
            requiredRank = rs.getInt(3);

            int taxUnitTypeId = rs.getInt(4);

            if (StringUtils.isEmpty(name) || (rank > 0 && requiredRank == 0)) {
                continue;
            }

            if (rankId2TxnUntTypId.get(rank) != null) {
                String msg = String.format(
                        "Old TreeDef has two of the same Rank %d, throwing it out.\n\nYou must fix this before proceeding.",
                        rank);
                tblWriter.logError(msg);
                log.debug(msg);
                UIRegistry.displayErrorDlg(msg);
                System.exit(0);
            }
            rankId2TxnUntTypId.put(rank, taxUnitTypeId);

            log.debug(rank + "  " + name + "  TaxonomicUnitTypeID: " + taxUnitTypeId);

            TaxonTreeDefItem ttdi = new TaxonTreeDefItem();
            ttdi.initialize();
            ttdi.setName(name);
            ttdi.setFullNameSeparator(" ");
            ttdi.setRankId(rank);
            ttdi.setTreeDef(taxonTreeDef);
            taxonTreeDef.getTreeDefItems().add(ttdi);

            ttdi.setIsInFullName(rank >= TaxonTreeDef.GENUS);

            // setup the parent/child relationship
            if (items.isEmpty()) {
                ttdi.setParent(null);
            } else {
                ttdi.setParent(items.lastElement());
            }
            items.add(ttdi);
            enforcedRanks.add(requiredRank);
        }
        rs.close();

        for (TaxonTreeDefItem i : items) {
            i.setIsEnforced(enforcedRanks.contains(i.getRankId()));
        }

        try {
            Session session = HibernateUtil.getNewSession();
            Transaction trans = session.beginTransaction();
            session.save(taxonTreeDef);
            trans.commit();
            session.close();

        } catch (Exception ex) {
            ex.printStackTrace();
            throw new RuntimeException(ex);
        }

        IdMapperMgr idMapperMgr = IdMapperMgr.getInstance();
        IdMapperIFace tutMapper = idMapperMgr.get("TaxonomicUnitType", "TaxonomicUnitTypeID");
        IdMapperIFace taxonomyTypeMapper = idMapperMgr.get("TaxonomyType", "TaxonomyTypeID");

        //tutMapper.reset();

        //if (taxonomyTypeMapper.get(taxonomyTypeId) == null)
        //{
        taxonomyTypeMapper.put(taxonomyTypeId, taxonTreeDef.getId());
        //}

        for (TaxonTreeDefItem ttdi : taxonTreeDef.getTreeDefItems()) {
            int ttdiId = rankId2TxnUntTypId.get(ttdi.getRankId());
            log.debug("Mapping " + ttdiId + " -> " + ttdi.getId() + "  RankId: " + ttdi.getRankId());
            tutMapper.put(ttdiId, ttdi.getId());
        }

        newTaxonInfoHash.put(oldTaxonRootId, taxonTreeDef);

        CollectionInfo ci = getCIByTaxonTypeId(taxonomyTypeId);
        ci.setTaxonTreeDef(taxonTreeDef);

        taxonTreeDefHash.put(taxonomyTypeId, taxonTreeDef);
        log.debug("Hashing taxonomyTypeId: " + taxonomyTypeId + " ->  taxonTreeDefId:" + taxonTreeDef.getId());

    } catch (SQLException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    }
}

From source file:org.executequery.gui.importexport.ImportDelimitedWorker.java

private Object doWork() {

    // the process result
    String processResult = null;// w  w  w.  ja v a  2s  .  co m

    // are we halting on any error
    int onError = getParent().getOnError();
    haltOnError = (onError == ImportExportProcess.STOP_TRANSFER);

    boolean isBatch = getParent().runAsBatchProcess();

    appendProgressText("Beginning import from delimited file process...");
    appendProgressText("Using connection: " + getParent().getDatabaseConnection().getName());

    // ---------------------------------------
    // table specific counters

    // the table statement result
    int tableInsertCount = 0;

    // the records processed for this table
    int tableRowCount = 0;

    // the table commit count
    int tableCommitCount = 0;

    // ---------------------------------------
    // total import process counters

    // the current commit block size
    int commitCount = 0;

    // the total records inserted
    int totalInsertCount = 0;

    // the total records processed
    int totalRecordCount = 0;

    // the error count
    int errorCount = 0;

    // the current line number
    int lineNumber = 0;

    int rollbackSize = getParent().getRollbackSize();
    int rollbackCount = 0;

    FileReader fileReader = null;
    BufferedReader reader = null;
    DateFormat dateFormat = null;

    try {
        // retrieve the import files
        Vector files = getParent().getDataFileVector();
        int fileCount = files.size();

        // whether to trim whitespace
        boolean trimWhitespace = getParent().trimWhitespace();

        // whether this table has a date/time field
        boolean hasDate = false;

        // whether we are parsing date formats
        boolean parsingDates = parseDateValues();

        // column names are first row
        boolean hasColumnNames = getParent().includeColumnNames();

        // currently bound variables in the prepared statement
        Map<ColumnData, String> boundVariables = null;

        // ignored indexes of columns from the file
        List<Integer> ignoredIndexes = null;

        if (hasColumnNames) {
            boundVariables = new HashMap<ColumnData, String>();
            ignoredIndexes = new ArrayList<Integer>();
            appendProgressText("Using column names from input file's first row.");
        }

        // columns to be imported that are in the file
        Map<ColumnData, String> fileImportedColumns = new HashMap<ColumnData, String>();

        // whether the data format failed (switch structure)
        boolean failed = false;

        // define the delimiter
        String delim = getParent().getDelimiter();

        // ---------------------------
        // --- initialise counters ---
        // ---------------------------

        // the table's column count
        int columnCount = -1;

        // the length of each line in the file
        int rowLength = -1;

        // progress bar values
        int progressStatus = -1;

        // ongoing progress value
        int progressCheck = -1;

        // the import file size
        long fileSize = -1;

        // set the date format

        if (parseDateValues()) {

            try {

                dateFormat = createDateFormatter();

            } catch (IllegalArgumentException e) {

                errorCount++;
                outputExceptionError("Error applying date mask", e);

                return FAILED;
            }

        }

        // record the start time
        start();

        // setup the regex matcher for delims

        // ----------------------------------------------------------------
        // below was the original pattern from oreilly book.
        // discovered issues when parsing values with quotes
        // in them - not only around them.
        /*
        String regex =
            "(?:^|\\" +
            delim +
            ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \" | ( [^\"\\" +
            delim + "]*+ ) )";
        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");
        */
        // ----------------------------------------------------------------

        // modified above to regex below
        // added the look-ahead after the close quote
        // and removed the quote from the last regex pattern

        String escapedDelim = escapeDelim(delim);

        String regex = "(?:^|" + escapedDelim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \"(?="
                + escapedDelim + "?) | ( [^" + escapedDelim + "]*+ ) )";

        // ----------------------------------------------------------------
        // changed above to the following - seems to work for now
        // regex pattern in full - where <delim> is the delimiter to use
        //      \"([^\"]+?)\"<delim>?|([^<delim>]+)<delim>?|<delim>
        //
        // fixed oreilly one - not running this one
        // ----------------------------------------------------------------

        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");

        // ----------------------------------------
        // --- begin looping through the tables ---
        // ----------------------------------------

        // ensure the connection has auto-commit to false
        conn = getConnection();
        conn.setAutoCommit(false);

        int currentRowLength = 0;
        boolean insertLine = false;

        // the number of columns actually available in the file
        int filesColumnCount = 0;

        for (int i = 0; i < fileCount; i++) {

            lineNumber = 0;
            tableInsertCount = 0;
            tableCommitCount = 0;
            rollbackCount = 0;
            tableRowCount = 0;
            rowLength = 0;

            if (Thread.interrupted()) {
                setProgressStatus(100);
                throw new InterruptedException();
            }

            tableCount++;

            DataTransferObject dto = (DataTransferObject) files.elementAt(i);

            // initialise the file object
            File inputFile = new File(dto.getFileName());

            outputBuffer.append("---------------------------\nTable: ");
            outputBuffer.append(dto.getTableName());
            outputBuffer.append("\nImport File: ");
            outputBuffer.append(inputFile.getName());
            appendProgressText(outputBuffer);

            // setup the reader objects
            fileReader = new FileReader(inputFile);
            reader = new BufferedReader(fileReader);

            // retrieve the columns to be imported (or all)
            Vector<ColumnData> columns = getColumns(dto.getTableName());
            columnCount = columns.size();
            filesColumnCount = columnCount;

            // the wntire row read
            String row = null;

            // the current delimited value
            String value = null;

            // the ignored column count
            int ignoredCount = 0;

            // clear the file columns cache
            fileImportedColumns.clear();

            // if the first row in the file has the column
            // names compare these with the columns selected
            if (hasColumnNames) {

                // init the bound vars cache with the selected columns
                boundVariables.clear();

                for (int k = 0; k < columnCount; k++) {

                    boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                }

                row = reader.readLine();
                lineNumber++;

                String[] _columns = MiscUtils.splitSeparatedValues(row, delim);
                if (_columns != null && _columns.length > 0) {

                    filesColumnCount = _columns.length;

                    // --------------------------------------
                    // first determine if we have any columns in the
                    // input file that were not selected for import

                    // reset the ignored columns
                    ignoredIndexes.clear();

                    // set up another list to re-add the columns in
                    // the order in which they appear in the file.
                    // all other columns will be added to the end
                    Vector<ColumnData> temp = new Vector<ColumnData>(columnCount);

                    ColumnData cd = null;
                    int ignoredIndex = -1;
                    for (int j = 0; j < _columns.length; j++) {
                        ignoredIndex = j;
                        String column = _columns[j];

                        for (int k = 0; k < columnCount; k++) {
                            cd = columns.get(k);
                            String _column = cd.getColumnName();

                            if (_column.equalsIgnoreCase(column)) {
                                temp.add(cd);
                                fileImportedColumns.put(cd, INCLUDED_COLUMN);
                                ignoredIndex = -1;
                                break;
                            }

                        }

                        if (ignoredIndex != -1) {

                            ignoredIndexes.add(Integer.valueOf(ignoredIndex));
                        }

                    }
                    ignoredCount = ignoredIndexes.size();

                    // if we didn't find any columns at all, show warning
                    if (temp.isEmpty()) {

                        String message = "No matching column names were "
                                + "found within the specified file's first line.\n"
                                + "The current file will be ignored.";

                        outputBuffer.append(message);
                        appendProgressWarningText(outputBuffer);

                        int yesNo = GUIUtilities.displayYesNoDialog(message + "\nDo you wish to continue?",
                                "Warning");

                        if (yesNo == JOptionPane.YES_OPTION) {
                            continue;
                        } else {
                            throw new InterruptedException();
                        }

                    } else {

                        // add any other selected columns to the
                        // end of the temp list with the columns
                        // available in the file
                        boolean addColumn = false;
                        for (int k = 0; k < columnCount; k++) {
                            addColumn = false;
                            cd = columns.get(k);
                            for (int j = 0, n = temp.size(); j < n; j++) {
                                addColumn = true;
                                if (temp.get(j) == cd) {
                                    addColumn = false;
                                    break;
                                }
                            }

                            if (addColumn) {
                                temp.add(cd);
                            }

                        }
                        columns = temp; // note: size should not have changed
                    }

                }
            }
            // otherwise just populate the columns in the file
            // with all the selected columns
            else {

                for (int j = 0; j < columnCount; j++) {

                    fileImportedColumns.put(columns.get(j), INCLUDED_COLUMN);
                }

            }

            /*
            Log.debug("ignored count: " + ignoredCount);
            for (int j = 0; j < columnCount; j++) {
            Log.debug("column: " + columns.get(j));
            }
            */

            fileSize = inputFile.length();
            progressStatus = 10;
            progressCheck = (int) (fileSize / progressStatus);

            // prepare the statement
            prepareStatement(dto.getTableName(), columns);

            if (parsingDates && dateFormat == null) {

                // check for a date data type
                for (int j = 0; j < columnCount; j++) {

                    if (dateFormat == null && !hasDate) {

                        ColumnData cd = columns.get(j);

                        if (fileImportedColumns.containsKey(cd)) {

                            if (cd.isDateDataType()) {

                                hasDate = true;
                                break;
                            }

                        }

                    }
                }

                if (hasDate && dateFormat == null) {

                    String pattern = verifyDate();

                    if (StringUtils.isNotBlank(pattern)) {

                        fileReader.close();
                        setProgressStatus(100);
                        throw new InterruptedException();
                    }

                    dateFormat = createDateFormatter(pattern);
                }

            }

            rowLength = 0;

            while ((row = reader.readLine()) != null) {

                insertLine = true;
                lineNumber++;
                tableRowCount++;
                totalRecordCount++;

                if (Thread.interrupted()) {

                    fileReader.close();
                    printTableResult(tableRowCount, tableCommitCount, dto.getTableName());

                    setProgressStatus(100);
                    throw new InterruptedException();
                }

                currentRowLength = row.length();

                if (currentRowLength == 0) {

                    outputBuffer.append("Line ");
                    outputBuffer.append(lineNumber);
                    outputBuffer.append(" contains no delimited values");
                    appendProgressWarningText(outputBuffer);

                    int yesNo = GUIUtilities.displayYesNoDialog("No values provided from line " + lineNumber
                            + " - the row is blank.\n" + "Do you wish to continue?", "Warning");

                    if (yesNo == JOptionPane.YES_OPTION) {
                        continue;
                    } else {
                        throw new InterruptedException();
                    }
                }

                rowLength += currentRowLength;
                if (progressCheck < rowLength) {

                    setProgressStatus(progressStatus);
                    progressStatus += 10;
                    rowLength = 0;
                }

                // reset matcher with current row
                matcher.reset(row);

                int index = 0;
                int lastIndex = -1;
                int loopIgnoredCount = 0;

                //Log.debug(row);

                for (int j = 0; j < filesColumnCount; j++) {

                    if (matcher.find(index)) {

                        String first = matcher.group(2);

                        if (first != null) {

                            value = first;

                        } else {

                            qMatcher.reset(matcher.group(1));
                            value = qMatcher.replaceAll("\"");
                        }

                        index = matcher.end();

                        // check if its an ignored column
                        if (ignoredCount > 0) {

                            if (isIndexIgnored(ignoredIndexes, j)) {

                                loopIgnoredCount++;
                                continue;
                            }

                        }

                    } else {

                        // not enough delims check
                        if (j < (filesColumnCount - 1) && index > (currentRowLength - 1)) {

                            outputBuffer.append("Insufficient number of column ");
                            outputBuffer.append("values provided at line ");
                            outputBuffer.append(lineNumber);
                            appendProgressErrorText(outputBuffer);

                            int yesNo = GUIUtilities
                                    .displayYesNoDialog("Insufficient number of values provided from line "
                                            + lineNumber + ".\n" + "Do you wish to continue?", "Warning");

                            if (yesNo == JOptionPane.YES_OPTION) {

                                insertLine = false;
                                break;

                            } else {

                                throw new InterruptedException();
                            }

                        } else {

                            // check if we're on a delim the matcher didn't pick up

                            int delimLength = delim.length();

                            if (row.substring(index, index + delimLength).equals(delim)) {

                                // increment index
                                index++;
                                // assign as null value
                                value = null;
                            }

                        }

                    }

                    // check if we landed on the same index - likely null value
                    if (index == lastIndex) {
                        index++;
                    }
                    lastIndex = index;

                    if (value != null && value.trim().length() == 0) {
                        value = null;
                    }

                    try {
                        ColumnData cd = columns.get(j - loopIgnoredCount);
                        setValue(value, getIndexOfColumn(columns, cd) + 1, cd.getSQLType(), trimWhitespace,
                                dateFormat);

                        if (hasColumnNames) {
                            boundVariables.put(cd, VARIABLE_BOUND);
                        }

                    } catch (ParseException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing date value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;

                    } catch (NumberFormatException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;
                    }

                }

                if (!insertLine) {

                    prepStmnt.clearParameters();
                    continue;
                }

                if (failed && haltOnError) {

                    processResult = FAILED;
                    break;
                }

                // execute the statement
                try {

                    // check all variables are bound if we used
                    // the column names from the first row
                    if (hasColumnNames) {

                        index = 0;
                        // check all variables are bound - insert NULL otherwise

                        for (Map.Entry<ColumnData, String> entry : boundVariables.entrySet()) {

                            ColumnData cd = entry.getKey();

                            if (VARIABLE_NOT_BOUND.equals(entry.getValue())) {

                                index = getIndexOfColumn(columns, cd);
                                prepStmnt.setNull(index + 1, cd.getSQLType());
                            }

                        }

                    }

                    if (isBatch) {
                        prepStmnt.addBatch();
                    } else {
                        int result = prepStmnt.executeUpdate();
                        tableInsertCount += result;
                        commitCount += result;
                    }

                    rollbackCount++;
                    // check the rollback segment
                    if (rollbackCount == rollbackSize) {
                        if (isBatch) {
                            int result = getBatchResult(prepStmnt.executeBatch())[0];
                            tableInsertCount += result;
                            commitCount += result;
                            prepStmnt.clearBatch();
                        }
                        conn.commit();
                        totalInsertCount += commitCount;
                        tableCommitCount = tableInsertCount;
                        rollbackCount = 0;
                        commitCount = 0;
                    }

                    // reset bound variables
                    if (hasColumnNames) {
                        for (int k = 0; k < columnCount; k++) {
                            boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                        }
                    }

                } catch (SQLException e) {
                    logException(e);
                    errorCount++;

                    if (!isBatch) {
                        outputBuffer.append("Error inserting data from line ");
                        outputBuffer.append(lineNumber);
                        outputExceptionError(null, e);
                    } else {
                        outputBuffer.append("Error on last batch execution");
                        outputExceptionError(null, e);
                    }

                    if (haltOnError) {
                        processResult = FAILED;
                        conn.rollback();
                        getParent().cancelTransfer();
                        throw new InterruptedException();
                    }

                }

            }

            // ----------------------------
            // file/table has ended here

            if (isBatch) {

                int[] batchResult = null;

                try {
                    batchResult = getBatchResult(prepStmnt.executeBatch());
                    int result = batchResult[0];
                    tableInsertCount += result;
                    commitCount += result;
                    tableCommitCount = tableInsertCount;
                } catch (BatchUpdateException e) {
                    logException(e);
                    int[] updateCounts = e.getUpdateCounts();
                    batchResult = getBatchResult(updateCounts);
                    errorCount += batchResult[1];
                    if (errorCount == 0) {
                        errorCount = 1;
                    }

                    outputBuffer.append("An error occured during the batch process: ");
                    outputBuffer.append(e.getMessage());

                    SQLException _e = e.getNextException();
                    while (_e != null) {
                        outputBuffer.append("\nNext Exception: ");
                        outputBuffer.append(_e.getMessage());
                        _e = _e.getNextException();
                    }

                    outputBuffer.append("\n\nRecords processed to the point ");
                    outputBuffer.append("where this error occurred: ");
                    outputBuffer.append(updateCounts.length);
                    appendProgressErrorText(outputBuffer);
                    processResult = FAILED;
                }

                //  Log.debug("commitCount: " + commitCount +
                //                      " batch: " + batchResult[0]);

                if (tableRowCount != tableInsertCount) {
                    conn.rollback();

                    if (onError == ImportExportProcess.STOP_TRANSFER) {
                        getParent().cancelTransfer();
                        processResult = FAILED;
                        throw new InterruptedException();
                    }

                }

            }

            boolean doCommit = true;
            if (failed && !isBatch && rollbackSize != ImportExportProcess.COMMIT_END_OF_ALL_FILES) {

                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the last block?",
                        "Confirm commit");

                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            // do the commit if ok from above
            // and if rollback size selected is end of file
            if (rollbackSize == ImportExportProcess.COMMIT_END_OF_FILE) {
                if (doCommit) {
                    conn.commit();
                    totalInsertCount += commitCount;
                    tableCommitCount = tableInsertCount;
                    commitCount = 0;
                } else {
                    conn.rollback();
                }
            }

            // update the progress display
            printTableResult(tableRowCount, tableInsertCount, dto.getTableName());
            setProgressStatus(100);

            // reset the checks
            hasDate = false;
            failed = false;

        }

        // commit the last remaining block or where
        // set to commit at the end of all files
        if (rollbackSize != ImportExportProcess.COMMIT_END_OF_FILE) {
            setProgressStatus(100);
            boolean doCommit = true;
            if (errorCount > 0 && errorCount != totalRecordCount) {
                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the changes?",
                        "Confirm commit");
                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }

        }

        processResult = SUCCESS;
    } catch (InterruptedException e) {

        if (processResult != FAILED) {
            processResult = CANCELLED;
        }

        try {
            if (prepStmnt != null) {
                prepStmnt.cancel();
            }
            if (conn != null) {
                conn.rollback();
            }
        } catch (SQLException e2) {
            outputExceptionError("Error rolling back transaction", e);
        }

    } catch (Exception e) {
        logException(e);
        outputBuffer.append("Error processing data from line ");
        outputBuffer.append(lineNumber);
        outputExceptionError("\nUnrecoverable error importing table data from file", e);

        int yesNo = GUIUtilities.displayYesNoDialog(
                "The process encountered errors.\n" + "Do you wish to commit the last transaction block?",
                "Confirm commit");
        boolean doCommit = (yesNo == JOptionPane.YES_OPTION);

        try {
            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }
        } catch (SQLException e2) {
            logException(e2);
            outputExceptionError("Error processing last transaction block", e2);
        }
        processResult = FAILED;
    } finally {
        finish();
        releaseResources(getParent().getDatabaseConnection());

        if (totalRecordCount == 0 || errorCount > 0) {
            processResult = FAILED;
        }

        setTableCount(tableCount);
        setRecordCount(totalRecordCount);
        setRecordCountProcessed(totalInsertCount);
        setErrorCount(errorCount);

        setProgressStatus(100);
        GUIUtilities.scheduleGC();

        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
            }
        }
        if (prepStmnt != null) {
            try {
                prepStmnt.close();
            } catch (SQLException e) {
            }
        }

    }

    return processResult;
}

From source file:net.sf.jabref.JabRef.java

public Optional<Vector<ParserResult>> processArguments(String[] args, boolean initialStartup) {

    cli = new JabRefCLI(args);

    if (!cli.isBlank() && cli.isDebugLogging()) {
        JabRefLogger.setDebug();//w ww  .  j av  a 2  s .c  o m
    }

    if (initialStartup && cli.isShowVersion()) {
        cli.displayVersion();
    }

    if (initialStartup && cli.isHelp()) {
        cli.printUsage();
        return Optional.empty();
    }

    // Check if we should reset all preferences to default values:
    if (cli.isPreferencesReset()) {
        String value = cli.getPreferencesReset();
        if ("all".equals(value.trim())) {
            try {
                System.out.println(Localization.lang("Setting all preferences to default values."));
                Globals.prefs.clear();
            } catch (BackingStoreException e) {
                System.err.println(Localization.lang("Unable to clear preferences."));
                e.printStackTrace();
            }
        } else {
            String[] keys = value.split(",");
            for (String key : keys) {
                if (Globals.prefs.hasKey(key.trim())) {
                    System.out.println(Localization.lang("Resetting preference key '%0'", key.trim()));
                    Globals.prefs.clear(key.trim());
                } else {
                    System.out.println(Localization.lang("Unknown preference key '%0'", key.trim()));
                }
            }
        }

    }

    // Check if we should import preferences from a file:
    if (cli.isPreferencesImport()) {
        try {
            Globals.prefs.importPreferences(cli.getPreferencesImport());
            CustomEntryTypesManager.loadCustomEntryTypes(Globals.prefs);
            ExportFormats.initAllExports();
        } catch (JabRefException ex) {
            LOGGER.error("Cannot import preferences", ex);
        }
    }

    // Vector to put imported/loaded database(s) in.
    Vector<ParserResult> loaded = new Vector<>();
    Vector<String> toImport = new Vector<>();
    if (!cli.isBlank() && (cli.getLeftOver().length > 0)) {
        for (String aLeftOver : cli.getLeftOver()) {
            // Leftover arguments that have a "bib" extension are interpreted as
            // bib files to open. Other files, and files that could not be opened
            // as bib, we try to import instead.
            boolean bibExtension = aLeftOver.toLowerCase().endsWith("bib");
            ParserResult pr = null;
            if (bibExtension) {
                pr = JabRef.openBibFile(aLeftOver, false);
            }

            if ((pr == null) || (pr == ParserResult.INVALID_FORMAT)) {
                // We will try to import this file. Normally we
                // will import it into a new tab, but if this import has
                // been initiated by another instance through the remote
                // listener, we will instead import it into the current database.
                // This will enable easy integration with web browsers that can
                // open a reference file in JabRef.
                if (initialStartup) {
                    toImport.add(aLeftOver);
                } else {
                    loaded.add(JabRef.importToOpenBase(aLeftOver).orElse(ParserResult.INVALID_FORMAT));
                }
            } else if (pr != ParserResult.FILE_LOCKED) {
                loaded.add(pr);
            }

        }
    }

    if (!cli.isBlank() && cli.isFileImport()) {
        toImport.add(cli.getFileImport());
    }

    for (String filenameString : toImport) {
        importFile(filenameString).ifPresent(loaded::add);
    }

    if (!cli.isBlank() && cli.isImportToOpenBase()) {
        importToOpenBase(cli.getImportToOpenBase()).ifPresent(loaded::add);
    }

    if (!cli.isBlank() && cli.isFetcherEngine()) {
        fetch(cli.getFetcherEngine()).ifPresent(loaded::add);
    }

    if (cli.isExportMatches()) {
        if (!loaded.isEmpty()) {
            String[] data = cli.getExportMatches().split(",");
            String searchTerm = data[0].replace("\\$", " "); //enables blanks within the search term:
            //? stands for a blank
            ParserResult pr = loaded.elementAt(loaded.size() - 1);
            BibDatabase dataBase = pr.getDatabase();

            SearchQuery query = new SearchQuery(searchTerm,
                    Globals.prefs.getBoolean(JabRefPreferences.SEARCH_CASE_SENSITIVE),
                    Globals.prefs.getBoolean(JabRefPreferences.SEARCH_REG_EXP));
            BibDatabase newBase = new DatabaseSearcher(query, dataBase).getDatabaseFromMatches(); //newBase contains only match entries

            //export database
            if ((newBase != null) && (newBase.getEntryCount() > 0)) {
                String formatName;

                //read in the export format, take default format if no format entered
                switch (data.length) {
                case 3:
                    formatName = data[2];
                    break;
                case 2:
                    //default ExportFormat: HTML table (with Abstract & BibTeX)
                    formatName = "tablerefsabsbib";
                    break;
                default:
                    System.err.println(Localization.lang("Output file missing").concat(". \n \t ")
                            .concat(Localization.lang("Usage")).concat(": ")
                            + JabRefCLI.getExportMatchesSyntax());
                    return Optional.empty();
                } //end switch

                //export new database
                IExportFormat format = ExportFormats.getExportFormat(formatName);
                if (format == null) {
                    System.err.println(Localization.lang("Unknown export format") + ": " + formatName);
                } else {
                    // We have an ExportFormat instance:
                    try {
                        System.out.println(Localization.lang("Exporting") + ": " + data[1]);
                        format.performExport(newBase, pr.getMetaData(), data[1], pr.getEncoding(), null);
                    } catch (Exception ex) {
                        System.err.println(Localization.lang("Could not export file") + " '" + data[1] + "': "
                                + ex.getMessage());
                    }
                }
            } /*end if newBase != null*/ else {
                System.err.println(Localization.lang("No search matches."));
            }
        } else {
            System.err.println(Localization.lang("The output option depends on a valid input option."));
        } //end if(loaded.size > 0)
    }

    if (cli.isGenerateBibtexKeys()) {
        regenerateBibtexKeys(loaded);
    }

    if (cli.isAutomaticallySetFileLinks()) {
        automaticallySetFileLinks(loaded);
    }

    if (cli.isFileExport()) {
        if (!loaded.isEmpty()) {
            String[] data = cli.getFileExport().split(",");

            if (data.length == 1) {
                // This signals that the latest import should be stored in BibTeX
                // format to the given file.
                if (!loaded.isEmpty()) {
                    ParserResult pr = loaded.elementAt(loaded.size() - 1);
                    if (!pr.isInvalid()) {
                        try {
                            System.out.println(Localization.lang("Saving") + ": " + data[0]);
                            SavePreferences prefs = SavePreferences.loadForSaveFromPreferences(Globals.prefs);
                            Defaults defaults = new Defaults(BibDatabaseMode.fromPreference(
                                    Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE)));
                            BibDatabaseWriter databaseWriter = new BibDatabaseWriter();
                            SaveSession session = databaseWriter.saveDatabase(
                                    new BibDatabaseContext(pr.getDatabase(), pr.getMetaData(), defaults),
                                    prefs);

                            // Show just a warning message if encoding didn't work for all characters:
                            if (!session.getWriter().couldEncodeAll()) {
                                System.err.println(Localization.lang("Warning") + ": " + Localization.lang(
                                        "The chosen encoding '%0' could not encode the following characters:",
                                        session.getEncoding().displayName()) + " "
                                        + session.getWriter().getProblemCharacters());
                            }
                            session.commit(new File(data[0]));
                        } catch (SaveException ex) {
                            System.err.println(Localization.lang("Could not save file.") + "\n"
                                    + ex.getLocalizedMessage());
                        }
                    }
                } else {
                    System.err
                            .println(Localization.lang("The output option depends on a valid import option."));
                }
            } else if (data.length == 2) {
                // This signals that the latest import should be stored in the given
                // format to the given file.
                ParserResult pr = loaded.elementAt(loaded.size() - 1);

                // Set the global variable for this database's file directory before exporting,
                // so formatters can resolve linked files correctly.
                // (This is an ugly hack!)
                File theFile = pr.getFile();
                if (!theFile.isAbsolute()) {
                    theFile = theFile.getAbsoluteFile();
                }
                MetaData metaData = pr.getMetaData();
                metaData.setFile(theFile);
                Globals.prefs.fileDirForDatabase = metaData.getFileDirectory(Globals.FILE_FIELD)
                        .toArray(new String[0]);
                Globals.prefs.databaseFile = metaData.getFile();
                System.out.println(Localization.lang("Exporting") + ": " + data[0]);
                IExportFormat format = ExportFormats.getExportFormat(data[1]);
                if (format == null) {
                    System.err.println(Localization.lang("Unknown export format") + ": " + data[1]);
                } else {
                    // We have an ExportFormat instance:
                    try {
                        format.performExport(pr.getDatabase(), pr.getMetaData(), data[0], pr.getEncoding(),
                                null);
                    } catch (Exception ex) {
                        System.err.println(Localization.lang("Could not export file") + " '" + data[0] + "': "
                                + ex.getMessage());
                    }
                }

            }
        } else {
            System.err.println(Localization.lang("The output option depends on a valid import option."));
        }
    }

    LOGGER.debug("Finished export");

    if (cli.isPreferencesExport()) {
        try {
            Globals.prefs.exportPreferences(cli.getPreferencesExport());
        } catch (JabRefException ex) {
            LOGGER.error("Cannot export preferences", ex);
        }
    }

    if (!cli.isBlank() && cli.isAuxImport()) {
        boolean usageMsg = false;

        if (!loaded.isEmpty()) // bibtex file loaded
        {
            String[] data = cli.getAuxImport().split(",");

            if (data.length == 2) {
                ParserResult pr = loaded.firstElement();
                AuxCommandLine acl = new AuxCommandLine(data[0], pr.getDatabase());
                BibDatabase newBase = acl.perform();

                boolean notSavedMsg = false;

                // write an output, if something could be resolved
                if (newBase != null) {
                    if (newBase.getEntryCount() > 0) {
                        String subName = StringUtil.getCorrectFileName(data[1], "bib");

                        try {
                            System.out.println(Localization.lang("Saving") + ": " + subName);
                            SavePreferences prefs = SavePreferences.loadForSaveFromPreferences(Globals.prefs);
                            BibDatabaseWriter databaseWriter = new BibDatabaseWriter();
                            Defaults defaults = new Defaults(BibDatabaseMode.fromPreference(
                                    Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE)));
                            SaveSession session = databaseWriter
                                    .saveDatabase(new BibDatabaseContext(newBase, defaults), prefs);

                            // Show just a warning message if encoding didn't work for all characters:
                            if (!session.getWriter().couldEncodeAll()) {
                                System.err.println(Localization.lang("Warning") + ": " + Localization.lang(
                                        "The chosen encoding '%0' could not encode the following characters:",
                                        session.getEncoding().displayName()) + " "
                                        + session.getWriter().getProblemCharacters());
                            }
                            session.commit(new File(subName));
                        } catch (SaveException ex) {
                            System.err.println(Localization.lang("Could not save file.") + "\n"
                                    + ex.getLocalizedMessage());
                        }

                        notSavedMsg = true;
                    }
                }

                if (!notSavedMsg) {
                    System.out.println(Localization.lang("no database generated"));
                }
            } else {
                usageMsg = true;
            }
        } else {
            usageMsg = true;
        }

        if (usageMsg) {
            System.out.println(Localization.lang("no base-BibTeX-file specified") + "!");
            System.out.println(Localization.lang("usage") + " :");
            System.out.println("jabref --aux infile[.aux],outfile[.bib] base-BibTeX-file");
        }
    }

    return Optional.of(loaded);
}

From source file:org.zaproxy.zap.extension.pscanrulesAlpha.StrictTransportSecurityScanner.java

@Override
public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) {
    long start = System.currentTimeMillis();
    Vector<String> stsOption = msg.getResponseHeader().getHeaders(STS_HEADER);
    String metaHSTS = getMetaHSTSEvidence(source);

    if (msg.getRequestHeader().isSecure()) { // No point reporting missing for non-SSL resources
        // Content available via both HTTPS and HTTP is a separate though related issue
        if (stsOption == null) { // Header NOT found
            boolean report = true;
            if (!this.getAlertThreshold().equals(AlertThreshold.LOW)
                    && HttpStatusCode.isRedirection(msg.getResponseHeader().getStatusCode())) {
                // Only report https redirects to the same domain at low threshold
                try {
                    String redirStr = msg.getResponseHeader().getHeader(HttpHeader.LOCATION);
                    URI srcUri = msg.getRequestHeader().getURI();
                    URI redirUri = new URI(redirStr, false);
                    if (redirUri.isRelativeURI() || (redirUri.getScheme().equalsIgnoreCase("https")
                            && redirUri.getHost().equals(srcUri.getHost())
                            && redirUri.getPort() == srcUri.getPort())) {
                        report = false;//ww  w. j a va2s  .c o  m
                    }
                } catch (Exception e) {
                    // Ignore, so report the missing header
                }
            }
            if (report) {
                raiseAlert(VulnType.HSTS_MISSING, null, msg, id);
            }
        } else if (stsOption.size() > 1) { // More than one header found
            raiseAlert(VulnType.HSTS_MULTIPLE_HEADERS, null, msg, id);
        } else { // Single HSTS header entry
            String stsOptionString = stsOption.get(0);
            Matcher badAgeMatcher = BAD_MAX_AGE_PATT.matcher(stsOptionString);
            Matcher maxAgeMatcher = MAX_AGE_PATT.matcher(stsOptionString);
            Matcher malformedMaxAgeMatcher = MALFORMED_MAX_AGE.matcher(stsOptionString);
            Matcher wellformedMatcher = WELL_FORMED_PATT.matcher(stsOptionString);
            if (!wellformedMatcher.matches()) {
                // Well formed pattern didn't match (perhaps curly quotes or some other unwanted
                // character(s))
                raiseAlert(VulnType.HSTS_MALFORMED_CONTENT, STS_HEADER, msg, id);
            } else if (badAgeMatcher.find()) {
                // Matched BAD_MAX_AGE_PATT, max-age is zero
                raiseAlert(VulnType.HSTS_MAX_AGE_DISABLED, badAgeMatcher.group(), msg, id);
            } else if (!maxAgeMatcher.find()) {
                // Didn't find a digit value associated with max-age
                raiseAlert(VulnType.HSTS_MAX_AGE_MISSING, stsOption.get(0), msg, id);
            } else if (malformedMaxAgeMatcher.find()) {
                // Found max-age but it was malformed
                raiseAlert(VulnType.HSTS_MALFORMED_MAX_AGE, stsOption.get(0), msg, id);
            }
        }
    } else if (AlertThreshold.LOW.equals(this.getAlertThreshold()) && stsOption != null
            && !stsOption.isEmpty()) {
        // isSecure is false at this point
        // HSTS Header found on non-HTTPS response (technically there could be more than one
        // but we only care that there is one or more)
        raiseAlert(VulnType.HSTS_ON_PLAIN_RESP, stsOption.get(0), msg, id);
    }

    if (metaHSTS != null) {
        // HSTS found defined by META tag
        raiseAlert(VulnType.HSTS_META, metaHSTS, msg, id);
    }

    if (logger.isDebugEnabled()) {
        logger.debug("\tScan of record " + id + " took " + (System.currentTimeMillis() - start) + " ms");
    }
}

From source file:edu.umn.cs.sthadoop.operations.STRangeQuery.java

public static void rangeQueryOperation(OperationsParams parameters) throws Exception {
    final OperationsParams params = parameters;

    final Path[] paths = params.getPaths();
    if (paths.length <= 1 && !params.checkInput()) {
        printUsage();/*from  w  ww .  j  ava 2  s. c o  m*/
        System.exit(1);
    }
    if (paths.length >= 2 && !params.checkInputOutput()) {
        printUsage();
        System.exit(1);
    }
    if (params.get("rect") == null) {
        String x1 = "-" + Double.toString(Double.MAX_VALUE);
        String y1 = "-" + Double.toString(Double.MAX_VALUE);
        String x2 = Double.toString(Double.MAX_VALUE);
        String y2 = Double.toString(Double.MAX_VALUE);
        System.out.println(x1 + "," + y1 + "," + x2 + "," + y2);
        params.set("rect", x1 + "," + y1 + "," + x2 + "," + y2);
        //         System.err.println("You must provide a query range");
        //         printUsage();
        //         System.exit(1);
    }

    if (params.get("interval") == null) {
        System.err.println("Temporal range missing");
        printUsage();
        System.exit(1);
    }

    TextSerializable inObj = params.getShape("shape");
    if (!(inObj instanceof STPoint) && !(inObj instanceof STRectangle)) {
        LOG.error("Shape is not instance of STPoint or STRectangle");
        printUsage();
        System.exit(1);
    }

    // Get spatio-temporal slices.
    List<Path> STPaths = getIndexedSlices(params);
    final Path outPath = params.getOutputPath();
    final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle());

    // All running jobs
    final Vector<Long> resultsCounts = new Vector<Long>();
    Vector<Job> jobs = new Vector<Job>();
    Vector<Thread> threads = new Vector<Thread>();

    long t1 = System.currentTimeMillis();
    for (Path stPath : STPaths) {
        final Path inPath = stPath;
        for (int i = 0; i < queryRanges.length; i++) {
            final OperationsParams queryParams = new OperationsParams(params);
            OperationsParams.setShape(queryParams, "rect", queryRanges[i]);
            if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) {
                // Run in local mode
                final Rectangle queryRange = queryRanges[i];
                final Shape shape = queryParams.getShape("shape");
                final Path output = outPath == null ? null
                        : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i)));
                Thread thread = new Thread() {
                    @Override
                    public void run() {
                        FSDataOutputStream outFile = null;
                        final byte[] newLine = System.getProperty("line.separator", "\n").getBytes();
                        try {
                            ResultCollector<Shape> collector = null;
                            if (output != null) {
                                FileSystem outFS = output.getFileSystem(queryParams);
                                final FSDataOutputStream foutFile = outFile = outFS.create(output);
                                collector = new ResultCollector<Shape>() {
                                    final Text tempText = new Text2();

                                    @Override
                                    public synchronized void collect(Shape r) {
                                        try {
                                            tempText.clear();
                                            r.toText(tempText);
                                            foutFile.write(tempText.getBytes(), 0, tempText.getLength());
                                            foutFile.write(newLine);
                                        } catch (IOException e) {
                                            e.printStackTrace();
                                        }
                                    }
                                };
                            } else {
                                outFile = null;
                            }
                            long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams,
                                    collector);
                            resultsCounts.add(resultCount);
                        } catch (IOException e) {
                            e.printStackTrace();
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        } finally {
                            try {
                                if (outFile != null)
                                    outFile.close();
                            } catch (IOException e) {
                                e.printStackTrace();
                            }
                        }
                    }
                };
                thread.start();
                threads.add(thread);
            } else {
                // Run in MapReduce mode
                Path outTempPath = outPath == null ? null
                        : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName());
                queryParams.setBoolean("background", true);
                Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams);
                jobs.add(job);
            }
        }
    }

    while (!jobs.isEmpty()) {
        Job firstJob = jobs.firstElement();
        firstJob.waitForCompletion(false);
        if (!firstJob.isSuccessful()) {
            System.err.println("Error running job " + firstJob);
            System.err.println("Killing all remaining jobs");
            for (int j = 1; j < jobs.size(); j++)
                jobs.get(j).killJob();
            System.exit(1);
        }
        Counters counters = firstJob.getCounters();
        Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS);
        resultsCounts.add(outputRecordCounter.getValue());
        jobs.remove(0);
    }
    while (!threads.isEmpty()) {
        try {
            Thread thread = threads.firstElement();
            thread.join();
            threads.remove(0);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    long t2 = System.currentTimeMillis();
    System.out.println("QueryPlan:");
    for (Path stPath : STPaths) {
        System.out.println(stPath.getName());
    }
    System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis");
    System.out.println("Results counts: " + resultsCounts);
}

From source file:net.sourceforge.floggy.persistence.Weaver.java

/**
 * DOCUMENT ME!/* www  .  j  a va2 s.  c o  m*/
*
* @throws CannotCompileException DOCUMENT ME!
* @throws IOException DOCUMENT ME!
* @throws NotFoundException DOCUMENT ME!
*/
protected void addPersistableMetadataManagerClass()
        throws CannotCompileException, IOException, NotFoundException {
    alreadyProcessedMetadatas.addAll(configuration.getPersistableMetadatas());

    Set metadatas = alreadyProcessedMetadatas;
    StringBuffer buffer = new StringBuffer();

    buffer.append("public static void init() throws Exception {\n");
    buffer.append("rmsBasedMetadatas = new java.util.Hashtable();\n");
    buffer.append("classBasedMetadatas = new java.util.Hashtable();\n");
    buffer.append("java.util.Hashtable persistableImplementations = null;\n");
    buffer.append("java.util.Vector indexMetadatas = null;\n");
    buffer.append("java.util.Vector fields = null;\n");

    for (Iterator iterator = metadatas.iterator(); iterator.hasNext();) {
        PersistableMetadata metadata = (PersistableMetadata) iterator.next();
        boolean isAbstract = metadata.isAbstract();
        String className = metadata.getClassName();
        String superClassName = metadata.getSuperClassName();
        String[] fieldNames = metadata.getFieldNames();
        int[] fieldTypes = metadata.getFieldTypes();
        Hashtable persistableImplementations = metadata.getPersistableImplementations();
        String recordStoreName = metadata.getRecordStoreName();
        int persistableStrategy = metadata.getPersistableStrategy();
        Vector indexMetadatas = metadata.getIndexMetadatas();
        String[] descendents = metadata.getDescendents();

        StringBuffer fieldNamesBuffer = new StringBuffer("new String[");
        StringBuffer fieldTypesBuffer = new StringBuffer("new int[");
        boolean addHeader = true;

        for (int i = 0; i < fieldNames.length; i++) {
            if (addHeader) {
                fieldNamesBuffer.append("]{");
                fieldTypesBuffer.append("]{");
                addHeader = false;
            }

            fieldNamesBuffer.append("\"");
            fieldNamesBuffer.append(fieldNames[i]);
            fieldNamesBuffer.append("\",");

            fieldTypesBuffer.append(fieldTypes[i]);
            fieldTypesBuffer.append(",");
        }

        if (addHeader) {
            fieldNamesBuffer.append("0]");
            fieldTypesBuffer.append("0]");
        } else {
            fieldNamesBuffer.deleteCharAt(fieldNamesBuffer.length() - 1);
            fieldNamesBuffer.append("}");
            fieldTypesBuffer.deleteCharAt(fieldTypesBuffer.length() - 1);
            fieldTypesBuffer.append("}");
        }

        if ((persistableImplementations != null) && !persistableImplementations.isEmpty()) {
            buffer.append("persistableImplementations = new java.util.Hashtable();\n");

            Enumeration enumeration = persistableImplementations.keys();

            while (enumeration.hasMoreElements()) {
                String fieldName = (String) enumeration.nextElement();
                String classNameOfField = (String) persistableImplementations.get(fieldName);
                buffer.append("persistableImplementations.put(\"");
                buffer.append(fieldName);
                buffer.append("\", \"");
                buffer.append(classNameOfField);
                buffer.append("\");\n");
            }
        } else {
            buffer.append("persistableImplementations = null;\n");
        }

        if ((indexMetadatas != null) && !indexMetadatas.isEmpty()) {
            buffer.append("indexMetadatas = new java.util.Vector();\n");

            Enumeration enumeration = indexMetadatas.elements();

            while (enumeration.hasMoreElements()) {
                IndexMetadata indexMetadata = (IndexMetadata) enumeration.nextElement();

                buffer.append("fields = new java.util.Vector();\n");

                Vector fields = indexMetadata.getFields();

                for (int j = 0; j < fields.size(); j++) {
                    buffer.append("fields.addElement(\"");
                    buffer.append(fields.elementAt(j));
                    buffer.append("\");\n");
                }

                buffer.append(
                        "indexMetadatas.addElement(new net.sourceforge.floggy.persistence.impl.IndexMetadata(\"");
                buffer.append(indexMetadata.getRecordStoreName());
                buffer.append("\", \"");
                buffer.append(indexMetadata.getName());
                buffer.append("\", fields));\n");
            }
        } else {
            buffer.append("indexMetadatas = null;\n");
        }

        StringBuffer descendentsBuffer = new StringBuffer("new String[");
        addHeader = true;

        if (descendents != null) {
            for (int i = 0; i < descendents.length; i++) {
                if (addHeader) {
                    descendentsBuffer.append("]{");
                    addHeader = false;
                }

                descendentsBuffer.append("\"");
                descendentsBuffer.append(descendents[i]);
                descendentsBuffer.append("\",");
            }
        }

        if (addHeader) {
            descendentsBuffer.append("0]");
        } else {
            descendentsBuffer.deleteCharAt(descendentsBuffer.length() - 1);
            descendentsBuffer.append("}");
        }

        buffer.append("classBasedMetadatas.put(\"" + className
                + "\", new net.sourceforge.floggy.persistence.impl.PersistableMetadata(" + isAbstract + ", \""
                + className + "\", "
                + ((superClassName != null) ? ("\"" + superClassName + "\", ") : ("null, "))
                + fieldNamesBuffer.toString() + ", " + fieldTypesBuffer.toString()
                + ", persistableImplementations, indexMetadatas, " + "\"" + recordStoreName + "\", "
                + persistableStrategy + ", " + descendentsBuffer.toString() + "));\n");
    }

    buffer.append("load();\n");
    buffer.append("}\n");

    CtClass ctClass = this.classpathPool
            .get("net.sourceforge.floggy.persistence.impl.PersistableMetadataManager");
    CtMethod[] methods = ctClass.getMethods();

    for (int i = 0; i < methods.length; i++) {
        if (methods[i].getName().equals("init")) {
            ctClass.removeMethod(methods[i]);
        }
    }

    ctClass.addMethod(CtNewMethod.make(buffer.toString(), ctClass));
    embeddedClassesOutputPool.addClass(ctClass);
}

From source file:com.duroty.application.chat.manager.ChatManager.java

/**
 * DOCUMENT ME!/*from  w  w  w .jav a2s.  c  o  m*/
 *
 * @param hsession DOCUMENT ME!
 * @param username DOCUMENT ME!
 * @param away DOCUMENT ME!
 *
 * @throws ChatException DOCUMENT ME!
 */
public String ping(Session hsession, String username, int away) throws ChatException {
    try {
        Vector buddiesOnline = new Vector();
        Vector buddiesOffline = new Vector();
        Vector buddies = new Vector();
        Vector messages = new Vector();
        JSONObject json = new JSONObject();

        Users user = getUser(hsession, username);

        if (user.getUseIsOnline() >= 3) {
            user.setUseLastPing(new Date());
            hsession.update(user);
            hsession.flush();
        } else if (user.getUseLastState() > 0) {
            user.setUseIsOnline(user.getUseLastState());
            user.setUseLastPing(new Date());
            hsession.update(user);
            hsession.flush();
        } else {
            user.setUseIsOnline(0);
            user.setUseCustomMessage(null);
            user.setUseLastPing(new Date());
            hsession.update(user);
            hsession.flush();

            throw new NotLoggedInException();
        }

        json.put("state", user.getUseIsOnline());
        json.put("lastState", user.getUseLastState());
        json.put("awayMessage", user.getUseCustomMessage());

        Criteria crit2 = hsession.createCriteria(Conversations.class);
        crit2.add(Restrictions.eq("usersByConvRecipientIdint", user));

        ScrollableResults scroll2 = crit2.scroll();

        int numMessages = 0;

        while (scroll2.next()) {
            Conversations conversations = (Conversations) scroll2.get(0);
            messages.addElement(new ConversationsObj(conversations.getUsersByConvSenderIdint().getUseUsername(),
                    conversations.getConvMessage()));

            hsession.delete(conversations);

            numMessages++;
        }

        hsession.flush();

        json.put("numMessages", new Integer(numMessages));
        json.put("messages", messages);

        Criteria crit1 = hsession.createCriteria(BuddyList.class);
        crit1.add(Restrictions.eq("usersByBuliOwnerIdint", user));
        crit1.add(Restrictions.eq("buliActive", new Boolean(true)));
        crit1.addOrder(Order.desc("buliLastDate"));

        ScrollableResults scroll1 = crit1.scroll();

        while (scroll1.next()) {
            BuddyList buddyList = (BuddyList) scroll1.get(0);
            Users buddy = buddyList.getUsersByBuliBuddyIdint();

            if (buddy.isUseActive()) {
                String name = buddy.getUseName();

                if (StringUtils.isBlank(name)) {
                    name = buddy.getUseUsername();
                }

                if (buddy.getUseIsOnline() == 0) {
                    buddiesOffline
                            .addElement(new BuddyObj(name, buddy.getUseUsername(), buddy.getUseIsOnline()));
                } else {
                    buddiesOnline
                            .addElement(new BuddyObj(name, buddy.getUseUsername(), buddy.getUseIsOnline()));
                }
            }
        }

        if (!buddiesOnline.isEmpty()) {
            buddies.addAll(buddiesOnline);
        }

        if (!buddiesOffline.isEmpty()) {
            buddies.addAll(buddiesOffline);
        }

        json.put("buddy", buddies);

        return json.toString();
    } catch (Exception e) {
        throw new ChatException(e);
    } finally {
        GeneralOperations.closeHibernateSession(hsession);
    }
}