Example usage for java.util Vector clear

List of usage examples for java.util Vector clear

Introduction

In this page you can find the example usage for java.util Vector clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this Vector.

Usage

From source file:org.zywx.wbpalmstar.engine.universalex.EUExWindow.java

/**
 * //from www  .  ja v  a 2s  .c  o m
 *
 * @param params
 */
private void closePluginViewContainerMsg(String[] params) {
    if (params == null || params.length < 1) {
        errorCallback(0, 0, "error params!");
        return;
    }
    try {
        JSONObject json = new JSONObject(params[0]);
        String opid = json.getString("id");

        EBrowserWindow mWindow = mBrwView.getBrowserWindow();
        int count = mWindow.getChildCount();
        for (int i = 0; i < count; i++) {
            View view = mWindow.getChildAt(i);
            if (view instanceof ContainerViewPager) {
                ContainerViewPager pager = (ContainerViewPager) view;
                if (opid.equals(pager.getContainerVO().getId())) {
                    removeViewFromCurrentWindow(pager);
                    ContainerAdapter adapter = (ContainerAdapter) pager.getAdapter();
                    Vector<FrameLayout> views = adapter.getViewList();
                    int size = views.size();
                    for (int j = 0; j < size; j++) {
                        views.get(j).removeAllViews();
                    }
                    views.clear();
                    pager = null;
                    String js = SCRIPT_HEADER + "if(" + function_cbClosePluginViewContainer + "){"
                            + function_cbClosePluginViewContainer + "(" + opid + "," + EUExCallback.F_C_TEXT
                            + ",'" + "success" + "'" + SCRIPT_TAIL;
                    onCallback(js);
                    return;
                }
            } //end instance
        } //end for
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.commoncrawl.hadoop.mergeutils.SequenceFileMerger.java

/**
 * merge the inputs//from   ww w. j av a 2  s . c  o  m
 * 
 * @param reporter
 * @throws IOException
 */
@SuppressWarnings("unchecked")
public void mergeAndSpill(final Reporter reporter) throws IOException {
    long sortStartTime = System.currentTimeMillis();

    // allocate our sort array
    MergeResultSegment<KeyType, ValueType> sortArray[] = new MergeResultSegment[_segmentList.size() + 1];

    KeyType lastCombinerKey = null;
    Vector<ValueType> valueBuffer = new Vector<ValueType>();

    while (_segmentList.getHead() != null) {
        MergeResultSegment<KeyType, ValueType> spillSegment = null;
        try {
            // get the head element
            spillSegment = _segmentList.removeHead();
            // and spill its current key/value pair ...
            // LOG.info("Spilling Segment:" + spillSegment.getName() + " Key:" +
            // spillSegment.getKey().toString());
            // LOG.info("Spilling Segment:" + spillSegment.getName() + " Key:" +
            // spillSegment.getKey().toString());
            // if no combiner spill directly ...
            if (_optionalCombiner == null) {
                _mergedRecordCount++;
                // ok in the case of optimized keys ...
                if (_optimizedKeyGenerator != null) {
                    // spill only the raw key, skipping the optimized key part ...

                    /*
                     * LOG.info("Spilling Record From Segment:" + spillSegment.getName()
                     * + " OptKeyValue:" +
                     * spillSegment.getOptimizedKey().getLongKeyValue() + " HeaderSize:"
                     * + spillSegment.getOptimizedKey().getHeaderSize() + " KeySize:" +
                     * (spillSegment.getRawKeyData().getLength() -
                     * spillSegment.getOptimizedKey().getHeaderSize() - 4) +
                     * " KeyDataLength:" + spillSegment.getRawKeyData().getLength() );
                     */

                    // ok segments with optimized keys have {optimized key header} +
                    // {original-key-len} preceeding the actual key bytes
                    // and optional buffer data at tail end of value

                    _rawWriter.spillRawRecord(spillSegment.getRawKeyData().getData(),
                            spillSegment.getOptimizedKey().getHeaderSize() + 4,
                            spillSegment.getRawKeyData().getLength()
                                    - spillSegment.getOptimizedKey().getHeaderSize() - 4,
                            spillSegment.getRawValueData().getData(), 0,
                            spillSegment.getRawValueData().getLength()
                                    - spillSegment.getOptimizedKey().getDataBufferSize());

                } else if (_rawComparator != null) {
                    _rawWriter.spillRawRecord(spillSegment.getRawKeyData().getData(), 0,
                            spillSegment.getRawKeyData().getLength(), spillSegment.getRawValueData().getData(),
                            0, spillSegment.getRawValueData().getLength());
                } else {
                    _writer.spillRecord(spillSegment.getKey(), spillSegment.getValue());
                }
            } else {
                if (valueBuffer.size() != 0 && lastCombinerKey.compareTo(spillSegment.getKey()) != 0) {

                    // LOG.info("DEBUG:Spilling Combined Values for Key:" +
                    // lastCombinerKey.toString() + " Value Count:" +
                    // valueBuffer.size());
                    // combine and flush last set of values ...
                    _mergedRecordCount++;
                    _writer.spillRecord(lastCombinerKey,
                            _optionalCombiner.combineValues(lastCombinerKey, valueBuffer));
                    // clear accumulation buffer
                    valueBuffer.clear();
                }
                if (valueBuffer.size() == 0) {
                    // set current key as lastKey
                    lastCombinerKey = spillSegment.getKey();
                }
                // add value to buffer
                valueBuffer.add(spillSegment.getValue());
            }
            // and see if there is a next item for the spilled segment
            if (spillSegment.next()) {
                _inputRecordCount++;
                // yes, ok insert it back into the list at the appropriate position
                // ...
                if (_segmentList.size() == 0) {
                    _segmentList.addHead(spillSegment);
                } else {
                    // first convert existing list to array
                    addItemsToArray(sortArray, _segmentList);
                    // next find insertion position
                    MergeResultSegment<KeyType, ValueType> insertionPos = _findInsertionPos(spillSegment,
                            sortArray, _segmentList.size());
                    // if null, add to head ...
                    if (insertionPos == null) {
                        // LOG.info("DEBUG:Adding Key:" + spillSegment.getKey().toString()
                        // + " Before:" + _segmentList.getHead().getKey().toString());
                        _segmentList.addHead(spillSegment);
                    } else {
                        // LOG.info("DEBUG:Adding Key:" + spillSegment.getKey().toString()
                        // + " After:" + insertionPos.getKey().toString());
                        _segmentList.insertAfter(insertionPos, spillSegment);
                    }
                }
            }
            // otherwise ...
            else {
                // close the segment
                // LOG.info("Segment:" + spillSegment.getName() +
                // " Exhausted. Closing");
                try {
                    spillSegment.close();
                } catch (IOException e) {
                    LOG.error("Segment:" + spillSegment.getName() + " Exception:"
                            + CCStringUtils.stringifyException(e));
                } finally {
                    _completedSegmentCount++;
                }
            }
        } catch (IOException e) {
            LOG.error(CCStringUtils.stringifyException(e));
            if (spillSegment != null) {
                LOG.error("Error during splill of segment:" + spillSegment.getName() + " Exception:"
                        + CCStringUtils.stringifyException(e));
            }
        }

        if (_mergedRecordCount % 100000 == 0) {
            updateProgress(reporter);
            LOG.info("Merged " + _mergedRecordCount + " Items");
        }
    }

    updateProgress(reporter);

    // now, if combiner is not null and there is a value buffered up ..
    if (_optionalCombiner != null && valueBuffer.size() != 0) {
        _mergedRecordCount++;
        // combine and flush last set of values ...
        _writer.spillRecord(lastCombinerKey, _optionalCombiner.combineValues(lastCombinerKey, valueBuffer));
        // clear combiner buffer ..
        valueBuffer.clear();
    }
    LOG.info("Merge took:" + (System.currentTimeMillis() - sortStartTime) + " InputRecordCount:"
            + _inputRecordCount + " MergedRecordCount:" + _mergedRecordCount);
}

From source file:org.unitime.timetable.solver.exam.ui.ExamInfoModel.java

protected Vector<ExamRoomInfo> findRooms(ExamPeriod period, int minRoomSize, int maxRoomSize, String filter,
        boolean allowConflicts) {
    Vector<ExamRoomInfo> rooms = new Vector<ExamRoomInfo>();
    boolean reqRoom = false;
    boolean reqBldg = false;
    boolean reqGroup = false;

    Exam exam = getExam().getExam(new ExamDAO().getSession());
    Set<Long> canShareRoom = getCanShareRoomExams(getExam().getExamId());

    Set groupPrefs = exam.getPreferences(RoomGroupPref.class);
    Set roomPrefs = exam.getPreferences(RoomPref.class);
    Set bldgPrefs = exam.getPreferences(BuildingPref.class);
    Set featurePrefs = exam.getPreferences(RoomFeaturePref.class);

    TreeSet locations = findAllExamLocations(period.getSession().getUniqueId(),
            period.getExamType().getUniqueId());
    Hashtable<Long, Set<Long>> locationTable = Location.findExamLocationTable(period.getUniqueId());

    if (getExamAssignment() != null) {
        if (getExamAssignment().getPeriod().equals(period) && getExamAssignment().getRooms() != null)
            for (ExamRoomInfo room : getExamAssignment().getRooms()) {
                Set<Long> exams = locationTable.get(room.getLocationId());
                if (exams != null)
                    exams.remove(getExam().getExamId());
            }/*www. ja va  2 s .  c o  m*/
    }
    if (iChange != null) {
        for (ExamAssignment conflict : iChange.getConflicts()) {
            if (conflict.getPeriod().equals(period) && conflict.getRooms() != null)
                for (ExamRoomInfo room : conflict.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams != null)
                        exams.remove(conflict.getExamId());
                }
        }
        for (ExamAssignment current : iChange.getAssignments()) {
            ExamAssignment initial = iChange.getInitial(current);
            if (initial != null && initial.getPeriod().equals(period) && initial.getRooms() != null)
                for (ExamRoomInfo room : initial.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams != null)
                        exams.remove(initial.getExamId());
                }
        }
        for (ExamAssignment current : iChange.getAssignments()) {
            if (!iExam.getExamId().equals(current.getExamId()) && current.getPeriod().equals(period)
                    && current.getRooms() != null)
                for (ExamRoomInfo room : current.getRooms()) {
                    Set<Long> exams = locationTable.get(room.getLocationId());
                    if (exams == null) {
                        exams = new HashSet<Long>();
                        locationTable.put(room.getLocationId(), exams);
                    }
                    exams.add(current.getExamId());
                }
        }
    }

    rooms: for (Iterator i1 = locations.iterator(); i1.hasNext();) {
        Location room = (Location) i1.next();

        boolean shouldNotBeUsed = PreferenceLevel.sStronglyDiscouraged
                .equals(room.getExamPreference(period).getPrefProlog());

        boolean add = true;

        PreferenceCombination pref = new SumPreferenceCombination();

        // --- group preference ----------
        PreferenceCombination groupPref = PreferenceCombination.getDefault();
        for (Iterator i2 = groupPrefs.iterator(); i2.hasNext();) {
            RoomGroupPref p = (RoomGroupPref) i2.next();
            if (p.getRoomGroup().getRooms().contains(room))
                groupPref.addPreferenceProlog(p.getPrefLevel().getPrefProlog());
        }

        if (groupPref.getPreferenceProlog().equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqGroup && !groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))
            add = false;

        if (!reqGroup && (groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))) {
            reqGroup = true;
            rooms.clear();
        }

        if (!groupPref.getPreferenceProlog().equals(PreferenceLevel.sProhibited)
                && !groupPref.getPreferenceProlog().equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(groupPref.getPreferenceProlog());

        // --- room preference ------------
        String roomPref = room.getExamPreference(period).getPrefProlog();

        for (Iterator i2 = roomPrefs.iterator(); i2.hasNext();) {
            RoomPref p = (RoomPref) i2.next();
            if (room.equals(p.getRoom())) {
                roomPref = p.getPrefLevel().getPrefProlog();
                shouldNotBeUsed = false;
                break;
            }
        }

        if (roomPref != null && roomPref.equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqRoom && (roomPref == null || !roomPref.equals(PreferenceLevel.sRequired)))
            add = false;

        if (!reqRoom && (roomPref != null && roomPref.equals(PreferenceLevel.sRequired))) {
            reqRoom = true;
            rooms.clear();
        }

        if (roomPref != null && !roomPref.equals(PreferenceLevel.sProhibited)
                && !roomPref.equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(roomPref);

        // --- building preference ------------
        Building bldg = (room instanceof Room ? ((Room) room).getBuilding() : null);

        String bldgPref = null;
        for (Iterator i2 = bldgPrefs.iterator(); i2.hasNext();) {
            BuildingPref p = (BuildingPref) i2.next();
            if (bldg != null && bldg.equals(p.getBuilding())) {
                bldgPref = p.getPrefLevel().getPrefProlog();
                break;
            }
        }

        if (bldgPref != null && bldgPref.equals(PreferenceLevel.sProhibited))
            add = false;

        if (reqBldg && (bldgPref == null || !bldgPref.equals(PreferenceLevel.sRequired)))
            add = false;

        if (!reqBldg && (bldgPref != null && bldgPref.equals(PreferenceLevel.sRequired))) {
            reqBldg = true;
            rooms.clear();
        }

        if (bldgPref != null && !bldgPref.equals(PreferenceLevel.sProhibited)
                && !bldgPref.equals(PreferenceLevel.sRequired))
            pref.addPreferenceProlog(bldgPref);

        // --- room features preference --------  
        boolean acceptableFeatures = true;
        PreferenceCombination featurePref = new MinMaxPreferenceCombination();
        for (Iterator i2 = featurePrefs.iterator(); i2.hasNext();) {
            RoomFeaturePref roomFeaturePref = (RoomFeaturePref) i2.next();
            RoomFeature feature = roomFeaturePref.getRoomFeature();
            String p = roomFeaturePref.getPrefLevel().getPrefProlog();

            boolean hasFeature = feature.getRooms().contains(room);
            if (p.equals(PreferenceLevel.sProhibited) && hasFeature) {
                acceptableFeatures = false;
            }
            if (p.equals(PreferenceLevel.sRequired) && !hasFeature) {
                acceptableFeatures = false;
            }
            if (p != null && hasFeature && !p.equals(PreferenceLevel.sProhibited)
                    && !p.equals(PreferenceLevel.sRequired))
                featurePref.addPreferenceProlog(p);
        }
        pref.addPreferenceInt(featurePref.getPreferenceInt());

        if (!acceptableFeatures)
            add = false;

        if (!add || shouldNotBeUsed)
            continue;

        Set<Long> exams = locationTable.get(room.getUniqueId());
        boolean roomConflict = false;
        if (exams != null && !exams.isEmpty()) {
            for (Long other : exams) {
                if (!canShareRoom.contains(other)) {
                    roomConflict = true;
                    if (!allowConflicts)
                        continue rooms;
                    if (iChange != null && iChange.getCurrent(other) != null)
                        continue rooms;
                }
            }
        }

        int cap = (getExam().getSeatingType() == Exam.sSeatingTypeExam ? room.getExamCapacity()
                : room.getCapacity());
        if (minRoomSize >= 0 && cap < minRoomSize)
            continue;
        if (maxRoomSize >= 0 && cap > maxRoomSize)
            continue;

        if (PreferenceLevel.sProhibited.equals(room.getExamPreference(period).getPrefProlog()))
            continue;

        if (!match(room.getLabel(), filter))
            continue;

        if (RoomAvailability.getInstance() != null) {
            Collection<TimeBlock> times = RoomAvailability.getInstance().getRoomAvailability(room.getUniqueId(),
                    period.getStartTime(), period.getEndTime(),
                    period.getExamType().getType() == ExamType.sExamTypeFinal
                            ? RoomAvailabilityInterface.sFinalExamType
                            : RoomAvailabilityInterface.sMidtermExamType);
            if (times != null)
                for (TimeBlock time : times) {
                    if (period.overlap(time)) {
                        sLog.info("Room " + room.getLabel() + " is not avaiable due to " + time);
                        continue rooms;
                    }
                }
        }

        rooms.add(new ExamRoomInfo(room, (roomConflict ? 1000 : 0) + pref.getPreferenceInt()));
    }

    return rooms;
}

From source file:edu.stanford.cfuller.imageanalysistools.clustering.ObjectClustering.java

/**
 * Applies the complex clustering to an Image with objects that have already been grouped into initial guesses of clusters.
 *
 * This will use the cluster guesses as a starting point and attempt to subdivide these clusters using Gaussian mixture model clustering on each cluster individually.
 *
 * @param input             An Image mask labeled such that each object in the Image is assigned a unique nonzero greylevel value.  These should start at 1 and be consecutive.
 * @param original          The original image (not currently used... this is here to maintain the interface with a previous version that used this image)
 * @param maxClusters       A rough upper bound to the number of clusters expected in the Image.  More clusters than this may be found, but if a clustering iteration finds more clusters than this as the best solution, it will terminate the clustering.
 * @param clusterImage      A version of the Image mask relabeled such that each object in the Image is assigned a greylevel value corresponding to its cluster.  Each cluster should have a unique value, these should start at 1, and they should be consecutive.
 *///from   ww  w.j ava2  s.  c  om
public static void doClusteringWithInitializedClusters(WritableImage input, Image original, int maxClusters,
        Image clusterImage) {

    //input.writeToFile("/Users/cfuller/Desktop/filter_intermediates/input.ome.tif");
    //original.writeToFile("/Users/cfuller/Desktop/filter_intermediates/original.ome.tif");
    //clusterImage.writeToFile("/Users/cfuller/Desktop/filter_intermediates/clusterImage.ome.tif");

    final double interdist_cutoff = 0.89;

    Vector<ClusterObject> clusterObjects = new Vector<ClusterObject>();
    Vector<Cluster> clusters = new Vector<Cluster>();

    double bestRatio = 1.0;

    int bestK = 0;

    Image bestImage = null;

    double overallMaxL = 1.0 * Double.MAX_VALUE;

    int repeatThis = 0;
    int numRepeats = 3;

    RelabelFilter rlf = new RelabelFilter();

    WritableImage origCopy = ImageFactory.createWritable(clusterImage);

    Histogram h_ssf = new Histogram(origCopy);

    int k_init = h_ssf.getMaxValue();

    int numAttempts = 0;

    int lastBestK = 0;

    for (int k = 1; k <= maxClusters; k++) {

        numAttempts++;

        int orig_k = k;

        double interdist;

        double currMaxL = -1.0 * Double.MAX_VALUE;

        int n = 0;

        double L = -1.0 * Double.MAX_VALUE;

        Image candidateNewBestImage = null;

        if (numAttempts == 1 || bestImage == null) {

            k = k_init;

            bestImage = ImageFactory.create(origCopy);

            bestK = k_init;

        }

        candidateNewBestImage = ImageFactory.createShallow(bestImage);

        Histogram h = new Histogram(bestImage);

        int currentMaxImageValue = h.getMaxValue();

        double sumL = 0;

        WritableImage singleCluster = ImageFactory.createWritable(input);

        WritableImage dividedClusterTemp = ImageFactory.createWritable(singleCluster);

        for (int clusterNumber = 1; clusterNumber <= h.getMaxValue(); clusterNumber++) {

            singleCluster.copy(input);

            dividedClusterTemp.copy(singleCluster);

            ImageCoordinate clusterMin = ImageCoordinate.cloneCoord(singleCluster.getDimensionSizes());
            ImageCoordinate clusterMax = ImageCoordinate.createCoordXYZCT(0, 0, 0, 0, 0);

            for (ImageCoordinate i : singleCluster) {

                if (bestImage.getValue(i) != clusterNumber) {

                    singleCluster.setValue(i, 0);

                } else {

                    //find the min and max bounds of this cluster

                    for (Integer dim : i) {
                        if (i.get(dim) < clusterMin.get(dim)) {
                            clusterMin.set(dim, i.get(dim));
                        }
                        if (i.get(dim) >= clusterMax.get(dim)) {
                            clusterMax.set(dim, i.get(dim) + 1);
                        }
                    }

                }
            }

            singleCluster.setBoxOfInterest(clusterMin, clusterMax);

            rlf.apply(singleCluster);

            Histogram hSingleCluster = new Histogram(singleCluster);

            int nSingleCluster = hSingleCluster.getMaxValue();

            boolean accepted = false;

            double tempBestRatio = Double.MAX_VALUE;

            double tempBestL = 0;

            int origCurrentMaxImageValue = currentMaxImageValue;

            WritableImage tempCandidateNewBestImage = ImageFactory.createWritable(candidateNewBestImage);

            int kMax = ((bestK < 3) ? 6 : 4);

            if (kMax > nSingleCluster) {
                kMax = nSingleCluster;
            }

            for (int tempK = 2; tempK < kMax; tempK++) {

                //java.util.logging.Logger.getLogger("edu.stanford.cfuller.imageanalysistools").info("tempK: " + Integer.toString(tempK));

                for (int repeatCounter = 0; repeatCounter < numRepeats; repeatCounter++) {

                    boolean accept = false;

                    int tempCurrentMaxImageValue = origCurrentMaxImageValue;

                    n = initializeObjectsAndClustersFromImage(singleCluster, clusterObjects, clusters, tempK);

                    L = DEGaussianMixtureModelClustering.go(singleCluster, clusterObjects, clusters, tempK, n);

                    interdist = getInterClusterDistances(clusterObjects, clusters, tempK, n);

                    if (interdist < interdist_cutoff && interdist < tempBestRatio) {

                        accept = true;
                        accepted = true;
                        tempBestRatio = interdist;

                        tempBestL = L;

                    }

                    if (accept) {

                        dividedClusterTemp.copy(singleCluster);

                        dividedClusterTemp.setBoxOfInterest(clusterMin, clusterMax);

                        clustersToMask(dividedClusterTemp, clusterObjects, clusters, tempK);

                        //dividedClusterTemp.writeToFile("/Users/cfuller/Desktop/filter_intermediates/divided_" + clusterNumber + ".ome.tif");

                        int newClusterValue = tempCurrentMaxImageValue;

                        tempCandidateNewBestImage.copy(candidateNewBestImage);

                        for (ImageCoordinate i : singleCluster) {

                            if (dividedClusterTemp.getValue(i) > 1) {

                                tempCandidateNewBestImage.setValue(i,
                                        newClusterValue + dividedClusterTemp.getValue(i) - 1);

                            }
                        }

                        tempCurrentMaxImageValue = newClusterValue + tempK - 1;

                        currentMaxImageValue = tempCurrentMaxImageValue;
                    }

                    clusterObjects.clear();
                    clusters.clear();

                }

            }

            if (accepted) {

                sumL += tempBestL;
                candidateNewBestImage = tempCandidateNewBestImage;
            } else {

                if (nSingleCluster > 0) {

                    n = initializeObjectsAndClustersFromImage(singleCluster, clusterObjects, clusters, 1);
                    sumL += DEGaussianMixtureModelClustering.go(singleCluster, clusterObjects, clusters, 1, n);

                }

                clusterObjects.clear();

                clusters.clear();

            }

            dividedClusterTemp.clearBoxOfInterest();
            singleCluster.clearBoxOfInterest();
            clusterMin.recycle();
            clusterMax.recycle();

        }

        k = currentMaxImageValue;

        n = initializeObjectsAndClustersFromClusterImage(input, candidateNewBestImage, clusterObjects, clusters,
                k);
        L = sumL;

        double tempL = -1.0 * L;

        if (numAttempts == 1) {

        } else {
        }

        interdist = getInterClusterDistances(clusterObjects, clusters, clusters.size(), clusterObjects.size());

        if (interdist == -1) {
            interdist = 1;
        }

        double ratio = interdist;

        if (numAttempts == 1) {

            overallMaxL = tempL;

            bestRatio = Double.MAX_VALUE;

        }

        if (tempL >= overallMaxL && ratio < bestRatio) {

            bestRatio = ratio;

            lastBestK = bestK;
            bestK = k;

            repeatThis = 0;

            WritableImage newBestImage = ImageFactory.createWritable(input);

            clustersToMask(newBestImage, clusterObjects, clusters, bestK);

            rlf.apply(newBestImage);

            bestImage = newBestImage;

            overallMaxL = tempL;

        }

        if (tempL > currMaxL) {
            currMaxL = tempL;
        }

        clusters.clear();
        clusterObjects.clear();

        if (++repeatThis < numRepeats) {
            k = orig_k;
        } else {
            repeatThis = 0;
        }

        if (orig_k > k) {

            k = orig_k;
        }

        candidateNewBestImage = null;

        if (k > maxClusters)
            break;
        if (repeatThis == 0 && bestK == lastBestK)
            break;
        if (numAttempts >= maxClusters)
            break;

    }

    input.copy(bestImage);

}

From source file:edu.ku.brc.specify.tasks.subpane.qb.QueryBldrPane.java

/**
 * @param rootTable//w  w  w .ja va2 s  .  co  m
 * @param distinct
 * @param qfps
 * @param tblTree
 * @param keysToRetrieve
 * @return HQLSpecs for the current fields and settings.
 */
public static HQLSpecs buildHQL(final TableQRI rootTable, final boolean distinct,
        final Vector<QueryFieldPanel> qfps, final TableTree tblTree, final RecordSetIFace keysToRetrieve,
        final boolean searchSynonymy, final boolean isSchemaExport, final Timestamp lastExportTime,
        final boolean disjunct) throws ParseException {
    if (qfps.size() == 0)
        return null;

    if (keysToRetrieve != null && keysToRetrieve.getNumItems() == 0)
        return null;

    StringBuilder fieldsStr = new StringBuilder();
    Vector<BaseQRI> list = new Vector<BaseQRI>();
    StringBuilder criteriaStr = new StringBuilder();
    StringBuilder orderStr = new StringBuilder();
    LinkedList<SortElement> sortElements = new LinkedList<SortElement>();
    boolean postSortPresent = false;
    boolean debug = false;
    ProcessNode root = new ProcessNode();
    int fldPosition = distinct ? 0 : 1;

    for (QueryFieldPanel qfi : qfps) {
        if (qfi.getFieldQRI() == null) {
            continue;
        }

        qfi.updateQueryField();

        if (qfi.isForDisplay()) {
            fldPosition++;
        }

        if (debug) {
            log.debug("\nNode: " + qfi.getFieldName());
        }

        SortElement orderSpec = qfi.getOrderSpec(distinct ? fldPosition - 1 : fldPosition - 2);
        if (orderSpec != null) {
            boolean isPostSortSpec = qfi.getFieldQRI() instanceof TreeLevelQRI
                    || qfi.getFieldQRI() instanceof RelQRI;
            //dis regard post sorts that may have been saved before
            //fix for bug #9407
            if (!isSchemaExport) {
                postSortPresent |= isPostSortSpec;
            }
            if (!isPostSortSpec || !isSchemaExport) {
                sortElements.add(orderSpec);
            }
        }

        // Create a Stack (list) of parent from
        // the current node up to the top
        // basically we are creating a path of nodes
        // to determine if we need to create a new node in the tree
        list.clear();
        FieldQRI pqri = qfi.getFieldQRI();
        TableTree parent = pqri.getTableTree();
        if (qfi.isForDisplay() || qfi.hasCriteria() || orderSpec != null || pqri instanceof RelQRI) {
            boolean addToList = true;
            if (pqri instanceof RelQRI) {
                RelQRI relQRI = (RelQRI) pqri;
                RelationshipType relType = relQRI.getRelationshipInfo().getType();

                // XXX Formatter.getSingleField() checks for ZeroOrOne and
                // OneToOne rels.

                if (!relType.equals(RelationshipType.ManyToOne)
                        && !relType.equals(RelationshipType.ManyToMany)/*
                                                                       * treat
                                                                       * manytomany
                                                                       * as
                                                                       * onetomany
                                                                       */) // Maybe
                                                                                                                            // need
                                                                                                                            // to
                                                                                                                            // consider
                                                                                                                            // some
                                                                                                                            // types
                                                                                                                            // of
                                                                                                                            // OneToOne
                                                                                                                            // also?????????
                {
                    parent = parent.getParent();
                    if (isSchemaExport && lastExportTime != null) {
                        addToList = true;
                    } else {
                        // parent will initially point to the related table
                        // and don't need to add related table unless it has
                        // children displayed/queried,
                        addToList = false;
                    }
                } else {
                    DataObjDataFieldFormatIFace formatter = relQRI.getDataObjFormatter(qfi.getFormatName());
                    if (formatter != null) {
                        boolean isSingleSimpleFormat = formatter.getSingleField() != null
                                && formatter.getFields()[0].getSep() == null;
                        addToList = isSingleSimpleFormat || (isSchemaExport && lastExportTime != null);
                    } else {
                        addToList = false;
                    }
                }
            }
            if (addToList) {
                list.insertElementAt(pqri, 0);
            }
            while (parent != tblTree) {
                list.insertElementAt(parent.getTableQRI(), 0);
                parent = parent.getParent();
            }

            if (debug) {
                log.debug("Path From Top Down:");
                for (BaseQRI qri : list) {
                    log.debug("  " + qri.getTitle());
                }
            }

            // Now walk the stack top (the top most parent)
            // down and if the path form the top down doesn't
            // exist then add a new node
            ProcessNode parentNode = root;
            int q = 0;
            for (BaseQRI qri : list) {
                if (debug) {
                    log.debug("ProcessNode[" + qri.getTitle() + "]");
                }
                q++;
                if (!parentNode.contains(qri) && (qri instanceof TableQRI || q == list.size())) {
                    ProcessNode newNode = new ProcessNode(qri);
                    parentNode.getKids().add(newNode);
                    if (debug) {
                        log.debug("Adding new node[" + newNode.getQri().getTitle() + "] to Node["
                                + (parentNode.getQri() == null ? "root" : parentNode.getQri().getTitle())
                                + "]");
                    }
                    parentNode = newNode;
                } else {
                    for (ProcessNode kidNode : parentNode.getKids()) {
                        if (kidNode.getQri().equals(qri)) {
                            parentNode = kidNode;
                            break;
                        }
                    }
                }
            }

            if (debug) {
                log.debug("Current Tree:");
                printTree(root, 0);
            }
        }
    }

    if (debug) {
        printTree(root, 0);
    }

    StringBuilder fromStr = new StringBuilder();
    TableAbbreviator tableAbbreviator = new TableAbbreviator();
    List<Pair<DBTableInfo, String>> fromTbls = new LinkedList<Pair<DBTableInfo, String>>();
    boolean hqlHasSynJoins = processTree(root, fromStr, fromTbls, 0, tableAbbreviator, tblTree, qfps,
            searchSynonymy, isSchemaExport, lastExportTime);

    StringBuilder sqlStr = new StringBuilder();
    sqlStr.append("select ");
    //if (distinct /*|| hqlHasSynJoins*/)
    {
        sqlStr.append("distinct ");
    }
    if (!distinct) {
        fieldsStr.append(tableAbbreviator.getAbbreviation(rootTable.getTableTree()));
        fieldsStr.append(".");
        fieldsStr.append(rootTable.getTableInfo().getIdFieldName());
    }

    List<Pair<String, Object>> paramsToSet = new LinkedList<Pair<String, Object>>();
    boolean visibleFldExists = false;
    for (QueryFieldPanel qfi : qfps) {
        if (qfi.getFieldQRI() == null) {
            continue;
        }

        if (qfi.isForDisplay()) {
            visibleFldExists = true;
            String fldSpec = qfi.getFieldQRI().getSQLFldSpec(tableAbbreviator, false, isSchemaExport,
                    qfi.getFormatName());
            if (StringUtils.isNotEmpty(fldSpec)) {
                if (fieldsStr.length() > 0) {
                    fieldsStr.append(", ");
                }
                fieldsStr.append(fldSpec);
            }
        }
        if (keysToRetrieve == null || qfi.isEnforced()) {
            String criteria = qfi.getCriteriaFormula(tableAbbreviator, paramsToSet);
            boolean isDisplayOnly = StringUtils.isEmpty(criteria);
            if (!isDisplayOnly) {
                if (criteria.equals("2+2=2") && qfi.isNegated()) {
                    criteria = "";
                }
                if (criteria.length() > 0 && hqlHasSynJoins && isSynSearchable(qfi.getFieldQRI())
                        && !qfi.isEmptyCriterion()) {
                    criteria = adjustForSynSearch(
                            tableAbbreviator.getAbbreviation(qfi.getFieldQRI().getTable().getTableTree()),
                            criteria, qfi.isNegated());
                }
                if (!isDisplayOnly && criteriaStr.length() > 0 && criteria.length() > 0) {
                    criteriaStr.append(disjunct ? " OR " : " AND ");
                }
                criteriaStr.append(criteria);
            }
        }
    }
    if (!visibleFldExists) {
        throw new ParseException(getResourceString("QueryBldrPane.NoVisibleColumns"), -1);
    }

    sqlStr.append(fieldsStr);

    sqlStr.append(" from ");
    sqlStr.append(fromStr);

    if (keysToRetrieve != null) {
        if (!StringUtils.isEmpty(criteriaStr.toString())) {
            criteriaStr.append(" and ");
        }
        criteriaStr.append("(");
        criteriaStr.append(tableAbbreviator.getAbbreviation(rootTable.getTableTree()) + "."
                + rootTable.getTableInfo().getIdFieldName() + " in(");
        boolean comma = false;
        int maxInClauseLen = 2500;
        int inClauseLen = 0;
        for (RecordSetItemIFace item : keysToRetrieve.getOrderedItems()) {
            if (inClauseLen == maxInClauseLen) {
                criteriaStr.append(") or ");
                criteriaStr.append(tableAbbreviator.getAbbreviation(rootTable.getTableTree()) + "."
                        + rootTable.getTableInfo().getIdFieldName() + " in(");
                inClauseLen = 0;
            } else if (comma) {
                criteriaStr.append(",");
            } else {
                comma = true;
            }
            criteriaStr.append(item.getRecordId());
            inClauseLen++;
        }
        criteriaStr.append("))");
    } else {
        //Assuming that this not necessary when keysToRetrieve is non-null because
        //the keys will already been filtered properly. (???)

        // Add extra where's for system fields for root table only, see notes below at end of for block
        boolean isRootTbl = true;
        for (Pair<DBTableInfo, String> fromTbl : fromTbls) {
            String specialColumnWhere = null;
            if (fromTbl.getFirst().getTableId() == Attachment.getClassTableId()) {
                String prefix = fromTbl.getSecond() + ".";
                specialColumnWhere = "((" + prefix + "scopeType = 0 and " + prefix + "scopeID = "
                        + AppContextMgr.getInstance()
                                .getClassObject(edu.ku.brc.specify.datamodel.Collection.class).getCollectionId()
                        + ") or" + "(" + prefix + "scopeType = 1 and " + prefix + "scopeID = "
                        + AppContextMgr.getInstance().getClassObject(Discipline.class).getDisciplineId()
                        + ") or" + "(" + prefix + "scopeType = 2 and " + prefix + "scopeID = "
                        + AppContextMgr.getInstance().getClassObject(Division.class).getDivisionId() + ") or"
                        + "(" + prefix + "scopeType = 3 and " + prefix + "scopeID = "
                        + AppContextMgr.getInstance().getClassObject(Institution.class).getInstitutionId()
                        + "))";
            } else {
                specialColumnWhere = QueryAdjusterForDomain.getInstance().getSpecialColumns(fromTbl.getFirst(),
                        true, !isRootTbl && true/* XXX should only use left join when necessary */,
                        fromTbl.getSecond());
            }
            isRootTbl = false;
            if (StringUtils.isNotEmpty(specialColumnWhere)) {
                if (criteriaStr.length() > 0) {
                    criteriaStr.append(" AND ");
                }
                criteriaStr.append(specialColumnWhere);
            }
            //Actually, assuming data is valid, it should only be necessary to add the Adjustments for the root table?
            //XXX if this works, fix this loop. Also, join parameter code in getSpecialColumns will probably be irrelevant.
            break;
        }
        //...done adding system whereses

        //get only records modified/added since last export of the schema...
        if (isSchemaExport && lastExportTime != null) {
            if (criteriaStr.length() > 0) {
                criteriaStr.append(" AND (");
            }
            String timestampParam = "spparam" + paramsToSet.size();
            paramsToSet.add(new Pair<String, Object>(timestampParam, lastExportTime));
            criteriaStr.append(getTimestampWhere(fromTbls, timestampParam, lastExportTime));
            criteriaStr.append(") ");
        }
    }

    if (criteriaStr.length() > 0) {
        sqlStr.append(" where ");
        sqlStr.append(criteriaStr);
    }

    if (sortElements.size() > 0 && !postSortPresent) {
        for (SortElement se : sortElements) {
            if (!StringUtils.isEmpty(orderStr.toString())) {
                orderStr.append(", ");
            }
            orderStr.append(distinct ? se.getColumn() + 1 : se.getColumn() + 2);
            if (se.getDirection() == SortElement.DESCENDING) {
                orderStr.append(" DESC");
            }
        }
        sortElements.clear();
    }

    if (orderStr.length() > 0) {
        sqlStr.append(" order by ");
        sqlStr.append(orderStr);
    }

    if (debug) {
        log.debug(sqlStr.toString());
        log.debug("sort:");
        for (SortElement s : sortElements) {
            log.debug("  " + s.getColumn() + " - " + s.getDirection());
        }
    }

    String result = sqlStr.toString();
    if (!checkHQL(result))
        return null;

    log.info(result);
    return new HQLSpecs(result, paramsToSet, sortElements, hqlHasSynJoins);
}

From source file:edu.ku.brc.specify.tasks.subpane.wb.wbuploader.Uploader.java

/**
 * @return true if the dataset can be uploaded.
 * // ww  w.  ja va  2s . c  o  m
 * Checks that the import mapping and graph are OK. Checks that all required data (TreeDefs,
 * TreeDefItems, DeterminationStatuses, etc) is present in the database.
 * 
 * Saves messages for each problem.
 */
public Vector<UploadMessage> verifyUploadability() throws UploaderException, ClassNotFoundException {
    Vector<UploadMessage> errors = new Vector<UploadMessage>();
    try {
        Vector<Vector<Table>> missingTbls = new Vector<Vector<Table>>();

        //check that parents exist for one-to-one children (which are required to be defined as many-to-one parents 
        //in hibernate)
        for (UploadTable t : uploadTables) {
            if (t.isOneToOneChild() && !t.getHasChildren() && !(t.getTblClass().equals(LocalityDetail.class)
                    || t.getTblClass().equals(GeoCoordDetail.class))) {
                Vector<Vertex<Table>> vs = db.getGraph()
                        .getAdjacentVertices(new Vertex<Table>(t.getTable().getName(), t.getTable()));
                Vector<Table> tbls = new Vector<Table>();
                for (Vertex<Table> vertex : vs) {
                    tbls.add(vertex.getData());
                }
                missingTbls.add(tbls);
            }
        }
        if (!uploadGraph.isConnected()) {
            missingTbls.addAll(getMissingTbls());
        }
        if (missingTbls.size() > 0) {
            Vector<Pair<String, Vector<Table>>> missingTblHints = new Vector<Pair<String, Vector<Table>>>();
            int h = 1;
            for (Vector<Table> tbls : missingTbls) {
                String msg = "";
                if (tbls != null && tbls.size() > 0) {
                    msg += " ";
                    for (int t = 0; t < tbls.size(); t++) {
                        if (t > 0) {
                            msg += ", ";
                        }
                        msg += tbls.get(t).getTableInfo().getTitle();
                    }
                }
                if (!msg.equals("")) {
                    missingTblHints.add(new Pair<String, Vector<Table>>(
                            String.format(getResourceString("WB_UPLOAD_MISSING_TBL_HINT"), h++, msg), tbls));
                }
            }
            if (missingTblHints.size() > 0) {
                errors.add(new BaseUploadMessage(getResourceString("WB_UPLOAD_MISSING_TBL_HINTS")));
                for (Pair<String, Vector<Table>> hint : missingTblHints) {
                    errors.add(new InvalidStructure("   " + hint.getFirst(), hint.getSecond()));
                }
            } else {
                errors.add(new BaseUploadMessage(getResourceString("WB_UPLOAD_MISSING_TBL_NO_HINTS")));
            }
        }
    } catch (DirectedGraphException ex) {
        throw new UploaderException(ex, UploaderException.ABORT_IMPORT);
    }

    errors.addAll(validateConsistency());

    if (!verifyAttachments()) {
        String msg = String.format(UIRegistry.getResourceString("WB_UPLOAD_NO_ATTACHABLES"),
                getAttachableStr());
        errors.add(new BaseUploadMessage(msg));
    }

    //if tables are missing return now, because spurious errors may be generated.
    if (errors.size() != 0) {
        return errors;
    }

    // now find out what data is not available in the dataset and not available in the database
    // Considering such issues 'structural' for now.
    missingRequiredClasses.clear();
    missingRequiredFields.clear();
    Iterator<RelatedClassSetter> rces;
    Iterator<DefaultFieldEntry> dfes;
    for (UploadTable t : uploadTables) {
        try {
            rces = t.getRelatedClassDefaults();
        } catch (ClassNotFoundException ex) {
            log.error(ex);
            return null;
        }
        while (rces.hasNext()) {
            missingRequiredClasses.add(rces.next());
        }

        try {
            dfes = t.getMissingRequiredFlds();
        } catch (NoSuchMethodException ex) {
            log.error(ex);
            return null;
        }
        while (dfes.hasNext()) {
            missingRequiredFields.add(dfes.next());
        }
    }
    resolver = new MissingDataResolver(missingRequiredClasses, missingRequiredFields);
    for (RelatedClassSetter rcs : missingRequiredClasses) {
        if (!rcs.isDefined()) {
            // Assume it is undefined because no related data exists in the database.
            // Also assuming (currently erroneously) that definition problems related to
            // choosing
            // from multiple existing related data have been resolved through user interaction.
            String tblName = DBTableIdMgr.getInstance()
                    .getByShortClassName(rcs.getRelatedClass().getSimpleName()).getTitle();
            // a very vague message...
            String msg = getResourceString("WB_UPLOAD_MISSING_DBDATA") + ": " + tblName;
            errors.add(new InvalidStructure(msg, this));
        }
    }

    Vector<DefaultFieldEntry> undefinedDfes = new Vector<DefaultFieldEntry>();
    for (DefaultFieldEntry dfe : missingRequiredFields) {
        if (!dfe.isDefined()) {
            undefinedDfes.add(dfe);
        }
    }
    //now remove possibly confusing or redundant dfes.
    Collections.sort(undefinedDfes, new Comparator<DefaultFieldEntry>() {

        /* (non-Javadoc)
         * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
         */
        @Override
        public int compare(DefaultFieldEntry o1, DefaultFieldEntry o2) {
            int result = o1.getUploadTbl().getTable().getName()
                    .compareTo(o2.getUploadTbl().getTable().getName());
            if (result != 0) {
                return result;
            }
            boolean o1IsUserFld = o1.getUploadFld() == null || o1.getUploadFld().getIndex() != -1;
            boolean o2IsUserFld = o2.getUploadFld() == null || o2.getUploadFld().getIndex() != -1;
            if (o1IsUserFld == o2IsUserFld) {
                return (o1.getFldName().compareTo(o2.getFldName()));
            }
            if (o1IsUserFld) {
                return -1;
            }
            return 1;
        }

    });
    UploadTable currentTbl = null;
    Vector<DefaultFieldEntry> dfes4Tbl = new Vector<DefaultFieldEntry>();
    Vector<DefaultFieldEntry> dfes2Remove = new Vector<DefaultFieldEntry>();
    for (DefaultFieldEntry dfe : undefinedDfes) {
        if (dfe.getUploadTbl() != currentTbl) {
            if (dfes4Tbl.size() > 1) {
                boolean gotAUserFld = false;
                for (DefaultFieldEntry tblDfe : dfes4Tbl) {
                    boolean isAUserFld = tblDfe.getUploadFld() == null
                            || tblDfe.getUploadFld().getIndex() != -1;
                    gotAUserFld = gotAUserFld || isAUserFld;
                    if (!isAUserFld && gotAUserFld) {
                        //remove weird fields if there are other non-weird fields from the table
                        dfes2Remove.add(tblDfe);
                    }
                }
            }
            dfes4Tbl.clear();
            currentTbl = dfe.getUploadTbl();
        }
        dfes4Tbl.add(dfe);
    }
    if (dfes4Tbl.size() > 1) {
        boolean gotAUserFld = false;
        for (DefaultFieldEntry tblDfe : dfes4Tbl) {
            boolean isAUserFld = tblDfe.getUploadFld() == null || tblDfe.getUploadFld().getIndex() != -1;
            gotAUserFld = gotAUserFld || isAUserFld;
            if (!isAUserFld && gotAUserFld) {
                //remove weird fields if there are other non-weird(or weird) fields from the table
                dfes2Remove.add(tblDfe);
            }
        }
    }
    for (DefaultFieldEntry dfe : dfes2Remove) {
        undefinedDfes.remove(dfe);
    }
    for (DefaultFieldEntry dfe : undefinedDfes) {
        // see note above for missignRequiredClasses iteration
        // another very vague message...
        String msg = getResourceString("WB_UPLOAD_MISSING_DBDATA") + ": "
                + dfe.getUploadTbl().getTable().getTableInfo().getTitle() + "." + dfe.getFldName(); // i18n (dfe.getFldName() is not using title nor wb
                                                                                                                                                                      // column header)
        errors.add(new InvalidStructure(msg, this));
    }

    for (UploadTable t : uploadTables) {
        errors.addAll(t.verifyUploadability());
    }

    return errors;
}

From source file:com.truledger.client.Client.java

/**
 * Compute the balance hash of all the server-signed messages in subdirs of balanceKey of db.
 * @param db/*from   w  w  w.  ja v  a  2  s  .c om*/
 * @param unpacker Parses and matches a server-signed message string into a Parser.Dict instance 
 * @param balancekey
 * @param acctbals if non-null, maps acct names to maps of assetids to non-server-signed balance messages.
 * @return
 * @throws ClientException
 */
public Utility.DirHash balancehash(FSDB db, Utility.MsgUnpacker unpacker, String balancekey,
        StringMapMap acctbals) throws ClientException {
    String hash = null;
    int hashcnt = 0;
    String[] accts = db.contents(balancekey);
    if (acctbals != null) {
        Vector<String> acctsv = new Vector<String>();
        Set<String> keys = acctbals.keySet();
        for (String key : keys) {
            if (Utility.position(key, accts) < 0)
                acctsv.add(key);
        }
        int size = acctsv.size();
        if (size > 0) {
            String[] newaccts = new String[accts.length + size];
            int i = 0;
            for (String acct : accts)
                newaccts[i++] = acct;
            for (String acct : acctsv)
                newaccts[i++] = acct;
            accts = newaccts;
        }
    }
    Vector<String> newitemsv = new Vector<String>();
    Vector<String> removednamesv = new Vector<String>();
    for (String acct : accts) {
        newitemsv.clear();
        removednamesv.clear();
        StringMap newacct = acctbals != null ? acctbals.get(acct) : null;
        if (newacct != null) {
            Set<String> assetids = newacct.keySet();
            for (String assetid : assetids) {
                String msg = newacct.get(assetid);
                newitemsv.add(msg);
                removednamesv.add(assetid);
            }
        }
        int cnt = newitemsv.size();
        String[] newitems = cnt > 0 ? newitemsv.toArray(new String[cnt]) : null;
        cnt = removednamesv.size();
        String[] removednames = cnt > 0 ? removednamesv.toArray(new String[cnt]) : null;
        try {
            Utility.DirHash dirHash = Utility.dirhash(db, balancekey + '.' + acct, unpacker, removednames,
                    newitems);
            if (dirHash != null) {
                hash = hash == null ? dirHash.hash : hash + '.' + dirHash.hash;
                hashcnt += dirHash.count;
            }
        } catch (Exception e) {
            throw new ClientException(e);
        }
    }
    if (hashcnt > 1)
        hash = Crypto.sha1(hash);
    return new Utility.DirHash(hash == null ? "" : hash, hashcnt);
}

From source file:edu.ku.brc.specify.web.SpecifyExplorer.java

/**
 * @param dataObj//from ww  w  .j a v a2 s  . co m
 */
protected void processDataList(final PrintWriter out, final List<?> list, final String sql)

{
    if (StringUtils.isEmpty(template)) {
        out.println("The template file is empty!");
    }

    FormDataObjIFace dataObj = (FormDataObjIFace) list.get(0);

    String linkField = "";
    ClassDisplayInfo cdi = classHash.get(dataObj.getClass().getSimpleName());
    if (cdi != null) {
        linkField = cdi.getLinkField();
    }

    int contentInx = template.indexOf(contentTag);
    String subContent = template.substring(0, contentInx);
    out.println(StringUtils.replace(subContent, "<!-- Title -->", dataObj.getIdentityTitle()));

    //fillLabelMap(dataObj, labelMap);

    Hashtable<Integer, String> ordered = new Hashtable<Integer, String>();
    Vector<String> unOrdered = new Vector<String>();

    Hashtable<String, Boolean> hasData = new Hashtable<String, Boolean>();

    out.println("<table border=\"0\" width=\"100%\"<tr><td nowrap=\"nowrap\">select * "
            + StringUtils.replace(sql, packageName, "") + "</td></tr>");
    out.println("<tr><td nowrap=\"nowrap\">Records Returned: " + list.size() + "</td></tr></table><br/>");
    out.println("<table width=\"100%\" cellspacing=\"0\" class=\"brdr\">\n");
    try {
        for (Object dobj : list) {
            dataObj = (FormDataObjIFace) dobj;
            for (Field field : dataObj.getClass().getDeclaredFields()) {
                String fieldName = field.getName();
                FieldDisplayInfo fdi = cdi.getField(fieldName);

                if (fdi != null && (fdi.isSkipped() || !fdi.isForDisplay())) {
                    continue;
                }

                String fldNameLower = fieldName.toLowerCase();
                if (fldNameLower.startsWith(dataObj.getClass().getSimpleName().toLowerCase())
                        && fldNameLower.endsWith("id")) {
                    continue;
                }

                try {
                    Object data = getData(field, dataObj);
                    if (data != null && !(data instanceof Set<?>)) {
                        hasData.put(fieldName, true);
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }
        }

        out.println("<tr><th class=\"brdr\">Row</th>");
        for (Field field : dataObj.getClass().getDeclaredFields()) {
            String fieldName = field.getName();

            if (hasData.get(fieldName) == null) {
                continue;
            }

            FieldDisplayInfo fdi = cdi.getField(fieldName); // should never be null

            String labelStr = labelMap.get(fieldName);
            if (StringUtils.isEmpty(labelStr)) {
                labelStr = UIHelper.makeNamePretty(fieldName);
            }

            String row = "<th class=\"brdr\">" + labelStr + "</th>";
            Integer inx = fdi != null ? fdi.getOrder() : null;
            if (inx == null) {
                unOrdered.add(row);
            } else {
                ordered.put(inx, row);
            }

        }
        fillRows(out, ordered, unOrdered);

        out.println("</tr>");

        int cnt = 1;
        for (Object dobj : list) {
            ordered.clear();
            unOrdered.clear();

            out.println("<tr><th class=\"brdr\" align=\"center\">" + cnt + "</th>");

            dataObj = (FormDataObjIFace) dobj;

            for (Field field : dataObj.getClass().getDeclaredFields()) {
                String fieldName = field.getName();

                if (hasData.get(fieldName) == null) {
                    continue;
                }

                FieldDisplayInfo fdi = cdi.getField(fieldName);

                String row = null;
                try {
                    Object data = getData(field, dataObj);
                    if (data != null && !(data instanceof Set<?>)) {
                        String val;
                        if (fieldName.equals(linkField)) {
                            val = formatFDI(dataObj, formatValue(data));
                        } else {
                            val = formatValue(data);
                        }
                        row = "<td align=\"center\" class=\"brdr" + ((cnt % 2 == 0) ? "even" : "odd") + "\">"
                                + (StringUtils.isNotEmpty(val) ? val : "&nbsp;") + "</td>";
                    } else {
                        row = "<td align=\"center\" class=\"brdr" + ((cnt % 2 == 0) ? "even" : "odd")
                                + "\">&nbsp;</td>";
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                }

                if (row != null) {
                    Integer inx = fdi != null ? fdi.getOrder() : null;
                    if (inx == null) {
                        unOrdered.add(row);
                    } else {
                        ordered.put(inx, row);
                    }
                }
            }
            fillRows(out, ordered, unOrdered);
            out.println("</tr>\n");
            cnt++;
        }
        out.println("</table>\n");

        // This should be externalized
        if (dataObj.getClass() == Locality.class || dataObj.getClass() == CollectingEvent.class
                || dataObj.getClass() == CollectionObject.class || dataObj.getClass() == Taxon.class
                || dataObj.getClass() == Accession.class) {
            createMapLink(out, list, dataObj.getClass());
        }

        out.println(template.substring(contentInx + contentTag.length() + 1, template.length()));

    } catch (Exception ex) {
        ex.printStackTrace();
        System.out.println(ex.toString());
        out.println("Sorry");
    }
}

From source file:initializers.FSInitializer.java

@Override
public void doAnalysis(Federation federation, List<?> dataProviders, boolean fedFlag, String[] elements2Analyze,
        String elmtVoc) throws InstantiationException, IllegalAccessException, ClassNotFoundException,
        SAXException, ParserConfigurationException {
    // TODO Auto-generated method stub

    // Vector<String> xmlElements = new Vector<>();
    HashMap<String, Double> xmlElements = new HashMap<>();
    Vector<String> xmlElementsDistinct = new Vector<>();

    //HashMap<String,HashMap<HashMap<String, String>, Integer>> attributes = new HashMap<>();
    HashMap<String, Integer> attributes = new HashMap<>();

    HashMap<String, Integer> elementDims = new HashMap<>();
    HashMap<String, Integer> elementCompletness = new HashMap<>();
    Vector<String> elementEntropy = new Vector<>();
    HashMap<String, Double> elementImportance = new HashMap<>();

    Properties props = new Properties();
    try {/*from  w w w. ja va  2 s.  c o  m*/
        props.load(new FileInputStream("configure.properties"));

    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    }

    String resultsPath = props.getProperty(AnalyticsConstants.resultsPath);
    String[] elementVocs = elmtVoc.split(",");

    ConfigureLogger conf = new ConfigureLogger();
    Logger logger = conf.getLogger("vocAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "vocAnalysis.log");

    Logger loggerAtt = conf.getLogger("attributeAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "attributeAnalysis.log");

    Logger loggerEl = conf.getLogger("elementAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "elementAnalysis.log");

    for (int i = 0; i < dataProviders.size(); i++) {

        String[] extensions = { "xml" };
        //FileUtils utils = new FileUtils();
        Collection<File> xmls = FileUtils.listFiles((File) dataProviders.get(i), extensions, true);

        String filterXMLs = props.getProperty(AnalyticsConstants.filteringEnabled);

        if (filterXMLs.equalsIgnoreCase("true")) {
            Filtering filtering = new Filtering();
            String expression = props.getProperty(AnalyticsConstants.xpathExpression);
            System.out.println("Filtering is enabled.");
            Iterator<File> iterator = xmls.iterator();
            while (iterator.hasNext()) {
                File next = iterator.next();
                if (!filtering.filterXML(next, expression)) {
                    System.out.println("File:" + next.getName() + " is filtered out.");
                    iterator.remove();
                } else
                    System.out.println("File:" + next.getName() + " is kept in xmls' collection.");

            }
        }

        try {

            // Repository repo = new Repository(xmls, elements2Analyze);

            //distinctAtts, 
            Repository repo = new Repository(xmls, attributes, xmlElements, xmlElementsDistinct, elementDims,
                    elementCompletness, elementEntropy, elementImportance, props);

            repo.setRepoName(((File) dataProviders.get(i)).getName());
            repo.setRecordsNum(xmls.size());

            if (fedFlag) {

                federation.addRepoName(((File) dataProviders.get(i)).getName());

                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + xmls.size());
                repo.parseXMLs(elements2Analyze, elementVocs);

                federation.appendFreqElements(repo.getElementFrequency());

                federation.appendCompletnessElements(repo.getElementCompleteness(), dataProviders.size());
                federation.appendImportanceElements(repo.getElementImportance(), dataProviders.size());

                federation.appendDimensionalityElements(repo.getElementDimensions());

                federation.appendEntropyElements(repo.computeElementEntropy(), dataProviders.size());

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);

                repo.computeElementValueFreq(elementVocs, logger);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                federation.appendFileSize(repo.getFileSizeDistribution());

                federation.appendNoRecords(repo.getXmls().size());
                repo.storeRepoGeneralInfo(true);
                federation.appendInformativeness(repo.getAvgRepoInformativeness());
                federation.appendSchemas(repo.getSchema(false));
                federation.appendRequirements(repo.getRequirements());

                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");
            } else {
                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + repo.getXmls().size());
                repo.parseXMLs(elements2Analyze, elementVocs);
                repo.getElementFrequency();
                repo.getElementCompleteness();
                repo.getElementDimensions();
                repo.getElementImportance();

                repo.computeElementEntropy();

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);
                // System.out.println(repo.getVocabularies());

                repo.computeElementValueFreq(elementVocs, logger);

                repo.storeRepoGeneralInfo(false);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                System.out.println("======================================");
                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");

            }
        } catch (IOException ex) {

            ex.printStackTrace();
        }
        xmlElements.clear();
        xmlElementsDistinct.clear();
        attributes.clear();
        //   distinctAtts.clear();
        elementDims.clear();
        elementCompletness.clear();

        elementEntropy.clear();
        elementImportance.clear();

    }

    if (fedFlag) {
        try {
            federation.getElementsSFrequency();
            federation.getElementsMCompletness();
            federation.getElementsMImportance();
            federation.getElementsMaxDimensionality();
            federation.getElementsMEntropy();
            federation.getAttributesSumFreq(loggerAtt);
            federation.getElementValueSumFreq(elmtVoc, logger);
            System.out.println("Average file size:" + federation.getAverageFileSize() + " Bytes");
            System.out.println("Sum number of records:" + federation.getRecordsSum() + " records");
            System.out.println("Sum storage requirements:" + federation.getRequirements() + " bytes");
            System.out.println("AVG informativeness(bits):" + federation.getAVGInformativeness());

            federation.storeGeneralInfo2CSV();
            this.logElementAnalysis(loggerEl, "Federation", resultsPath);

        } catch (IOException ex) {
            ex.printStackTrace();
        }

    }

}

From source file:org.ohmage.db.DbHelper.java

/**
 * Utility method that populates the Survey and SurveyPrompt tables for the
 * campaign identified by campaignUrn and containing the given xml as
 * campaignXML./*  w  w  w  .  j  av a2 s .c om*/
 * 
 * Note that this method takes a db handle so that it can be used in a
 * transaction.
 * 
 * @param db
 *            a handle to an existing writable db
 * @param campaignUrn
 *            the urn of the campaign for which we're populating subtables
 * @param campaignXML
 *            the XML for the campaign (not validated by this method)
 * @return
 * 
 */
public boolean populateSurveysFromCampaignXML(SQLiteDatabase db, String campaignUrn, String campaignXML) {
    try {
        // dump all the surveys (and consequently survey prompts) before we
        // do anything
        // this is (perhaps surprisingly) desired behavior, as the surveys +
        // survey prompts
        // should always reflect the state of the campaign XML, valid or not
        db.delete(Tables.SURVEYS, Surveys.CAMPAIGN_URN + "=?", new String[] { campaignUrn });

        // do a pass over the XML to gather surveys and survey prompts
        XmlPullParser xpp = Xml.newPullParser();
        xpp.setInput(new ByteArrayInputStream(campaignXML.getBytes("UTF-8")), "UTF-8");
        int eventType = xpp.getEventType();
        String tagName;

        // various stacks to maintain state while walking through the xml
        // tree
        Stack<String> tagStack = new Stack<String>();
        Survey curSurvey = null; // valid only within a survey, null
        // otherwise
        Vector<SurveyPrompt> prompts = new Vector<SurveyPrompt>(); // valid
        // only
        // within
        // a
        // survey,
        // empty
        // otherwise
        Vector<JSONObject> properties = new Vector<JSONObject>(); // valid
        // only
        // within
        // a
        // prompt,
        // empty
        // otherwise

        // iterate through the xml, paying attention only to surveys and
        // prompts
        // note that this does no validation outside of preventing itself
        // from crashing catastrophically
        while (eventType != XmlPullParser.END_DOCUMENT) {
            if (eventType == XmlPullParser.START_TAG) {
                tagName = xpp.getName();
                tagStack.push(tagName);

                if (tagName.equalsIgnoreCase("survey")) {
                    if (curSurvey != null)
                        throw new XmlPullParserException("encountered a survey tag inside another survey tag");

                    curSurvey = new Survey();
                    curSurvey.mCampaignUrn = campaignUrn;

                } else if (tagName.equalsIgnoreCase("prompt")) {
                    SurveyPrompt sp = new SurveyPrompt();
                    // FIXME: add the campaign + survey ID to make lookups
                    // easier?
                    prompts.add(sp);
                } else if (tagName.equalsIgnoreCase("property")) {
                    properties.add(new JSONObject());
                }
            } else if (eventType == XmlPullParser.TEXT) {
                if (tagStack.size() >= 2) {
                    // we may be in an entity>property situation, so check
                    // and assign accordingly
                    if (tagStack.get(tagStack.size() - 2).equalsIgnoreCase("survey")) {
                        // populating the current survey object with its
                        // properties here
                        if (tagStack.peek().equalsIgnoreCase("id"))
                            curSurvey.mSurveyID = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("title"))
                            curSurvey.mTitle = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("description"))
                            curSurvey.mDescription = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("submitText"))
                            curSurvey.mSubmitText = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("showSummary"))
                            curSurvey.mShowSummary = xpp.getText().equals("true") ? true : false;
                        else if (tagStack.peek().equalsIgnoreCase("editSummary"))
                            curSurvey.mEditSummary = xpp.getText().equals("true") ? true : false;
                        else if (tagStack.peek().equalsIgnoreCase("summaryText"))
                            curSurvey.mSummaryText = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("introText"))
                            curSurvey.mIntroText = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("anytime"))
                            curSurvey.mAnytime = xpp.getText().equals("true") ? true : false;
                    } else if (tagStack.get(tagStack.size() - 2).equalsIgnoreCase("prompt")) {
                        SurveyPrompt sp = prompts.lastElement();

                        // populating the last encountered survey prompt
                        // with its properties here
                        if (tagStack.peek().equalsIgnoreCase("id"))
                            sp.mPromptID = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("promptText"))
                            sp.mPromptText = xpp.getText();
                        else if (tagStack.peek().equalsIgnoreCase("promptType"))
                            sp.mPromptType = xpp.getText();
                    } else if (tagStack.get(tagStack.size() - 2).equalsIgnoreCase("property")) {
                        JSONObject curProperty = properties.lastElement();

                        // populating the last encountered property
                        if (tagStack.peek().equalsIgnoreCase("key"))
                            curProperty.put("key", xpp.getText());
                        else if (tagStack.peek().equalsIgnoreCase("label"))
                            curProperty.put("label", xpp.getText());
                        else if (tagStack.peek().equalsIgnoreCase("value"))
                            curProperty.put("value", xpp.getText());
                    }
                }
            } else if (eventType == XmlPullParser.END_TAG) {
                tagName = xpp.getName();
                tagStack.pop();

                if (tagName.equalsIgnoreCase("survey")) {
                    // store the current survey to the database
                    long surveyPID = db.insert(Tables.SURVEYS, null, curSurvey.toCV());

                    // also store all the prompts we accumulated for it
                    for (SurveyPrompt sp : prompts) {
                        sp.mSurveyID = curSurvey.mSurveyID;
                        sp.mSurveyPID = surveyPID;
                        sp.mCompositeID = curSurvey.mCampaignUrn + ":" + curSurvey.mSurveyID;
                        db.insert(Tables.SURVEY_PROMPTS, null, sp.toCV());
                    }

                    // flush the prompts we've stored up so far
                    prompts.clear();

                    // Create Streams here
                    OhmagePDVManager.getInstance().createStreamForSurvey(campaignUrn, curSurvey.mSurveyID);
                    // and clear us from being in any survey
                    curSurvey = null;
                } else if (tagName.equalsIgnoreCase("prompt")) {
                    SurveyPrompt sp = prompts.lastElement();

                    // update the current prompt with the collected
                    // properties
                    JSONArray propertyArray = new JSONArray();

                    for (JSONObject property : properties)
                        propertyArray.put(property);

                    // encode it as json and stuff it in the surveyprompt
                    sp.mProperties = propertyArray.toString();

                    // and wipe the properties
                    properties.clear();
                }
            }

            eventType = xpp.next();
        }
    } catch (UnsupportedEncodingException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return false;
    } catch (XmlPullParserException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return false;
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return false;
    } catch (JSONException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return false;
    }

    return true;
}