Example usage for java.util TreeSet size

List of usage examples for java.util TreeSet size

Introduction

In this page you can find the example usage for java.util TreeSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.dasein.cloud.cloudstack.network.LoadBalancers.java

private void toRule(@Nullable Node node, @Nonnull Map<String, LoadBalancer> current)
        throws InternalException, CloudException {
    NodeList attributes = node.getChildNodes();
    int publicPort = -1, privatePort = -1;
    LbAlgorithm algorithm = null;//from w w w  .j av a  2 s. com
    String publicIp = null;
    String vlanId = null;
    String ruleId = null;
    String lbName = null;
    String lbDesc = ""; // can't be null

    for (int i = 0; i < attributes.getLength(); i++) {
        Node n = attributes.item(i);
        String name = n.getNodeName().toLowerCase();
        String value;

        if (n.getChildNodes().getLength() > 0) {
            value = n.getFirstChild().getNodeValue();
        } else {
            value = null;
        }
        if (name.equals("publicip")) {
            publicIp = value;
        } else if (name.equals("networkid")) {
            vlanId = value;
        } else if (name.equals("id")) {
            ruleId = value;
        } else if (name.equals("publicport") && value != null) {
            publicPort = Integer.parseInt(value);
        } else if (name.equals("privateport") && value != null) {
            privatePort = Integer.parseInt(value);
        } else if (name.equals("algorithm")) {
            if (value == null || value.equals("roundrobin")) {
                algorithm = LbAlgorithm.ROUND_ROBIN;
            } else if (value.equals("leastconn")) {
                algorithm = LbAlgorithm.LEAST_CONN;
            } else if (value.equals("")) {
                algorithm = LbAlgorithm.SOURCE;
            } else {
                algorithm = LbAlgorithm.ROUND_ROBIN;
            }
        } else if (name.equals("name")) {
            lbName = value;
        } else if (name.equals("description")) {
            lbDesc = value;
        }
    }
    LbListener listener = LbListener.getInstance(algorithm, LbPersistence.NONE, LbProtocol.RAW_TCP, publicPort,
            privatePort);
    Collection<String> serverIds = getServersAt(ruleId);

    if (current.containsKey(publicIp)) {
        LoadBalancer lb = current.get(publicIp);

        @SuppressWarnings("deprecation")
        String[] currentIds = lb.getProviderServerIds();
        LbListener[] listeners = lb.getListeners();

        // TODO: WTF?
        Set<Integer> ports = new TreeSet<Integer>();

        for (int port : lb.getPublicPorts()) {
            ports.add(port);
        }
        ports.add(publicPort);

        int[] portList = new int[ports.size()];
        int i = 0;

        for (Integer p : ports) {
            portList[i++] = p;
        }
        //noinspection deprecation
        lb.setPublicPorts(portList);

        boolean there = false;

        for (LbListener l : listeners) {
            if (l.getAlgorithm().equals(listener.getAlgorithm())) {
                if (l.getNetworkProtocol().equals(listener.getNetworkProtocol())) {
                    if (l.getPublicPort() == listener.getPublicPort()) {
                        if (l.getPrivatePort() == listener.getPrivatePort()) {
                            there = true;
                            break;
                        }
                    }
                }
            }
        }
        if (!there) {
            lb.withListeners(listener);
        }
        // TODO: WTF?
        TreeSet<String> newIds = new TreeSet<String>();

        Collections.addAll(newIds, currentIds);
        for (String id : serverIds) {
            newIds.add(id);
        }
        //noinspection deprecation
        lb.setProviderServerIds(newIds.toArray(new String[newIds.size()]));
        //noinspection deprecation
        lb.setName(lbName);
        //noinspection deprecation
        lb.setDescription(lbDesc);
    } else {
        Collection<DataCenter> dcs = getProvider().getDataCenterServices()
                .listDataCenters(getProvider().getContext().getRegionId());
        String[] ids = new String[dcs.size()];
        int i = 0;

        for (DataCenter dc : dcs) {
            ids[i++] = dc.getProviderDataCenterId();
        }

        LoadBalancer lb = LoadBalancer.getInstance(getContext().getAccountNumber(), getContext().getRegionId(),
                publicIp, LoadBalancerState.ACTIVE, lbName, lbDesc, LoadBalancerAddressType.IP, publicIp,
                publicPort).withListeners(listener).operatingIn(ids);
        lb.forVlan(vlanId);
        //noinspection deprecation
        lb.setProviderServerIds(serverIds.toArray(new String[serverIds.size()]));
        current.put(publicIp, lb);
    }
}

From source file:net.firejack.platform.generate.tools.Render.java

/**
 * @param params/* w  w  w  .  jav a 2 s. c o  m*/
 * @return
 */
public String renderWebServiceParams(TreeSet<ServiceParam> params) {
    if (params == null || params.isEmpty())
        return "";
    StringBuilder builder = new StringBuilder();

    int i = 0;
    for (ServiceParam param : params) {
        String name = param.getName();
        ParameterTransmissionType location = param.getLocation();
        if (location == null) {
            builder.append("@WebParam(name = \"request\") ServiceRequest<");
        } else {
            builder.append("@WebParam(name = \"").append(name).append("\") ");
        }

        builder.append(renderType(param));
        if (location == null)
            builder.append(">");
        builder.append(" ").append(name);
        if (i < params.size() - 1) {
            builder.append(",");
        }
        i++;
    }
    return builder.toString();
}

From source file:org.ihtsdo.otf.snomed.service.ConceptLookUpServiceImpl.java

@Override
@Cacheable(value = { "conceptIds" })
public Set<String> getConceptIds(int offset, int limit) throws ConceptServiceException {
    LOGGER.debug("getting concept ids with offset {} and limit {} ", offset, limit);

    TreeSet<String> conceptIds = new TreeSet<String>();

    TitanGraph g = null;//from  w ww.  j  a va 2s .  co  m
    try {

        g = factory.getReadOnlyGraph();

        Iterable<Result<Vertex>> vs = g.indexQuery("concept", "v.sctid:*").offset(offset).limit(limit)
                .vertices();

        for (Result<Vertex> v : vs) {

            String sctid = v.getElement().getProperty(Properties.sctid.toString());

            if (!StringUtils.isEmpty(sctid)) {

                LOGGER.trace("Adding sctid {} to concept id list ", sctid);

                conceptIds.add(sctid);

            }

        }

        RefsetGraphFactory.commit(g);

    } catch (Exception e) {

        LOGGER.error("Error duing concept ids fetch ", e);
        RefsetGraphFactory.rollback(g);

        throw new ConceptServiceException(e);

    } finally {

        RefsetGraphFactory.shutdown(g);

    }
    LOGGER.debug("returning total {} concept ids ", conceptIds.size());

    return Collections.unmodifiableSortedSet(conceptIds);
}

From source file:org.unitime.timetable.onlinesectioning.OnlineSectioningServerImpl.java

@Override
public void update(CourseInfo info) {
    iLock.writeLock().lock();//from  w w  w.  java  2 s .c  om
    try {
        CourseInfo old = iCourseForId.get(info.getUniqueId());
        iCourseForId.put(info.getUniqueId(), info);
        TreeSet<CourseInfo> courses = iCourseForName.get(info.toString());
        if (courses == null) {
            courses = new TreeSet<CourseInfo>();
            iCourseForName.put(info.toString(), courses);
        }
        if (old != null) {
            courses.remove(old);
            iCourses.remove(old);
        }
        courses.add(info);
        iCourses.add(info);
        if (courses.size() == 1)
            for (CourseInfo x : courses)
                x.setHasUniqueName(true);
        else if (courses.size() > 1)
            for (CourseInfo x : courses)
                x.setHasUniqueName(false);
    } finally {
        iLock.writeLock().unlock();
    }
}

From source file:org.apache.bookkeeper.stream.storage.impl.sc.DefaultStorageContainerController.java

@Override
public ClusterAssignmentData computeIdealState(ClusterMetadata clusterMetadata,
        ClusterAssignmentData currentState, Set<BookieSocketAddress> currentCluster) {

    if (currentCluster.isEmpty()) {
        log.info("Current cluster is empty. No alive server is found.");
        return currentState;
    }//w w w  .j  ava 2 s  . c  o  m

    // 1. get current server assignments
    Map<BookieSocketAddress, Set<Long>> currentServerAssignments;
    try {
        currentServerAssignments = currentState.getServersMap().entrySet().stream()
                .collect(Collectors.toMap(e1 -> {
                    try {
                        return new BookieSocketAddress(e1.getKey());
                    } catch (UnknownHostException uhe) {
                        log.error("Invalid cluster ");
                        throw new UncheckedExecutionException(
                                "Invalid server found in current assignment map" + e1.getKey(), uhe);
                    }
                }, e2 -> e2.getValue().getContainersList().stream().collect(Collectors.toSet())));
    } catch (UncheckedExecutionException uee) {
        log.warn("Invalid cluster assignment data is found : {} - {}. Recompute assignment from empty state",
                currentState, uee.getCause().getMessage());
        currentServerAssignments = Maps.newHashMap();
    }
    Set<BookieSocketAddress> currentServersAssigned = currentServerAssignments.keySet();

    // 2. if no servers is assigned, initialize the ideal state
    if (currentServersAssigned.isEmpty()) {
        return initializeIdealState(clusterMetadata, currentCluster);
    }

    // 3. get the cluster diffs
    Set<BookieSocketAddress> serversAdded = Sets.difference(currentCluster, currentServersAssigned)
            .immutableCopy();
    Set<BookieSocketAddress> serversRemoved = Sets.difference(currentServersAssigned, currentCluster)
            .immutableCopy();

    if (serversAdded.isEmpty() && serversRemoved.isEmpty()) {
        // cluster is unchanged, assuming the current state is ideal, no re-assignment is required.
        return currentState;
    }

    log.info(
            "Storage container controller detects cluster changed:\n"
                    + "\t {} servers added: {}\n\t {} servers removed: {}",
            serversAdded.size(), serversAdded, serversRemoved.size(), serversRemoved);

    // 4. compute the containers that owned by servers removed. these containers are needed to be reassigned.
    Set<Long> containersToReassign = currentServerAssignments.entrySet().stream()
            .filter(serverEntry -> !currentCluster.contains(serverEntry.getKey()))
            .flatMap(serverEntry -> serverEntry.getValue().stream()).collect(Collectors.toSet());

    // 5. use an ordered set as priority deque to sort the servers by the number of assigned containers
    TreeSet<Pair<BookieSocketAddress, LinkedList<Long>>> assignmentQueue = new TreeSet<>(
            new ServerAssignmentDataComparator());
    for (Map.Entry<BookieSocketAddress, Set<Long>> entry : currentServerAssignments.entrySet()) {
        BookieSocketAddress host = entry.getKey();

        if (!currentCluster.contains(host)) {
            if (log.isTraceEnabled()) {
                log.trace("Host {} is not in current cluster anymore", host);
            }
            continue;
        } else {
            if (log.isTraceEnabled()) {
                log.trace("Adding host {} to assignment queue", host);
            }
            assignmentQueue.add(Pair.of(host, Lists.newLinkedList(entry.getValue())));
        }
    }

    // 6. add new servers
    for (BookieSocketAddress server : serversAdded) {
        assignmentQueue.add(Pair.of(server, Lists.newLinkedList()));
    }

    // 7. assign the containers that are needed to be reassigned.
    for (Long containerId : containersToReassign) {
        Pair<BookieSocketAddress, LinkedList<Long>> leastLoadedServer = assignmentQueue.pollFirst();
        leastLoadedServer.getValue().add(containerId);
        assignmentQueue.add(leastLoadedServer);
    }

    // 8. rebalance the containers if needed
    int diffAllowed;
    if (assignmentQueue.size() > clusterMetadata.getNumStorageContainers()) {
        diffAllowed = 1;
    } else {
        diffAllowed = clusterMetadata.getNumStorageContainers() % assignmentQueue.size() == 0 ? 0 : 1;
    }

    Pair<BookieSocketAddress, LinkedList<Long>> leastLoaded = assignmentQueue.first();
    Pair<BookieSocketAddress, LinkedList<Long>> mostLoaded = assignmentQueue.last();
    while (mostLoaded.getValue().size() - leastLoaded.getValue().size() > diffAllowed) {
        leastLoaded = assignmentQueue.pollFirst();
        mostLoaded = assignmentQueue.pollLast();

        // move container from mostLoaded to leastLoaded
        Long containerId = mostLoaded.getValue().removeFirst();
        // add the container to the end to avoid balancing this container again.
        leastLoaded.getValue().addLast(containerId);

        assignmentQueue.add(leastLoaded);
        assignmentQueue.add(mostLoaded);

        leastLoaded = assignmentQueue.first();
        mostLoaded = assignmentQueue.last();
    }

    // 9. the new ideal state is computed, finalize it
    Map<String, ServerAssignmentData> newAssignmentMap = Maps.newHashMap();
    assignmentQueue.forEach(assignment -> newAssignmentMap.put(assignment.getKey().toString(),
            ServerAssignmentData.newBuilder().addAllContainers(assignment.getValue()).build()));
    return ClusterAssignmentData.newBuilder().putAllServers(newAssignmentMap).build();
}

From source file:org.geotools.styling.css.CssTranslator.java

/**
 * Organizes them rules by ascending z-index
 * //from w  w  w  .  ja v a 2s  .c o m
 * @param rules
 * @return
 */
private Map<Integer, List<CssRule>> organizeByZIndex(List<CssRule> rules) {
    TreeSet<Integer> indexes = getZIndexesForRules(rules);
    Map<Integer, List<CssRule>> result = new HashMap<>();
    if (indexes.size() == 1) {
        result.put(indexes.first(), rules);
    } else {
        // now for each level extract the sub-rules attached to that level,
        // considering that properties not associated to a level, bind to all levels
        int symbolizerPropertyCount = 0;
        for (Integer index : indexes) {
            List<CssRule> rulesByIndex = new ArrayList<>();
            for (CssRule rule : rules) {
                CssRule subRule = rule.getSubRuleByZIndex(index);
                if (subRule != null) {
                    if (subRule.hasSymbolizerProperty()) {
                        symbolizerPropertyCount++;
                    }
                    rulesByIndex.add(subRule);
                }
            }
            // do we have at least one property that will trigger the generation
            // of a symbolizer in here?
            if (symbolizerPropertyCount > 0) {
                result.put(index, rulesByIndex);
            }
        }
    }

    return result;
}

From source file:org.jumpmind.symmetric.util.SnapshotUtil.java

public static File createSnapshot(ISymmetricEngine engine) {

    String dirName = engine.getEngineName().replaceAll(" ", "-") + "-"
            + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());

    IParameterService parameterService = engine.getParameterService();
    File tmpDir = new File(parameterService.getTempDirectory(), dirName);
    tmpDir.mkdirs();//from  w w  w.  j ava 2  s.com

    File logDir = null;

    String parameterizedLogDir = parameterService.getString("server.log.dir");
    if (isNotBlank(parameterizedLogDir)) {
        logDir = new File(parameterizedLogDir);
    }

    if (logDir != null && logDir.exists()) {
        log.info("Using server.log.dir setting as the location of the log files");
    } else {
        logDir = new File("logs");

        if (!logDir.exists()) {
            File file = findSymmetricLogFile();
            if (file != null) {
                logDir = file.getParentFile();
            }
        }

        if (!logDir.exists()) {
            logDir = new File("../logs");
        }

        if (!logDir.exists()) {
            logDir = new File("target");
        }

        if (logDir.exists()) {
            File[] files = logDir.listFiles();
            if (files != null) {
                for (File file : files) {
                    if (file.getName().toLowerCase().endsWith(".log")) {
                        try {
                            FileUtils.copyFileToDirectory(file, tmpDir);
                        } catch (IOException e) {
                            log.warn("Failed to copy " + file.getName() + " to the snapshot directory", e);
                        }
                    }
                }
            }
        }

    }

    ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
    List<TriggerHistory> triggerHistories = triggerRouterService.getActiveTriggerHistories();
    TreeSet<Table> tables = new TreeSet<Table>();
    for (TriggerHistory triggerHistory : triggerHistories) {
        Table table = engine.getDatabasePlatform().getTableFromCache(triggerHistory.getSourceCatalogName(),
                triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
        if (table != null && !table.getName().toUpperCase()
                .startsWith(engine.getSymmetricDialect().getTablePrefix().toUpperCase())) {
            tables.add(table);
        }
    }

    List<Trigger> triggers = triggerRouterService.getTriggers(true);
    for (Trigger trigger : triggers) {
        Table table = engine.getDatabasePlatform().getTableFromCache(trigger.getSourceCatalogName(),
                trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
        if (table != null) {
            tables.add(table);
        }
    }

    FileWriter fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "config-export.csv"));
        engine.getDataExtractorService().extractConfigurationStandalone(engine.getNodeService().findIdentity(),
                fwriter, TableConstants.SYM_NODE, TableConstants.SYM_NODE_SECURITY,
                TableConstants.SYM_NODE_IDENTITY, TableConstants.SYM_NODE_HOST,
                TableConstants.SYM_NODE_CHANNEL_CTL, TableConstants.SYM_CONSOLE_USER);
    } catch (IOException e) {
        log.warn("Failed to export symmetric configuration", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    FileOutputStream fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "table-definitions.xml"));
        DbExport export = new DbExport(engine.getDatabasePlatform());
        export.setFormat(Format.XML);
        export.setNoData(true);
        export.exportTables(fos, tables.toArray(new Table[tables.size()]));
    } catch (IOException e) {
        log.warn("Failed to export table definitions", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    String tablePrefix = engine.getTablePrefix();

    DbExport export = new DbExport(engine.getDatabasePlatform());
    export.setFormat(Format.CSV);
    export.setNoCreateInfo(true);

    extract(export, new File(tmpDir, "identity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_IDENTITY));

    extract(export, new File(tmpDir, "node.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE));

    extract(export, new File(tmpDir, "nodesecurity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_SECURITY));

    extract(export, new File(tmpDir, "nodehost.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_HOST));

    extract(export, new File(tmpDir, "triggerhist.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_TRIGGER_HIST));

    extract(export, new File(tmpDir, "lock.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_LOCK));

    extract(export, new File(tmpDir, "nodecommunication.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_COMMUNICATION));

    extract(export, 5000, new File(tmpDir, "outgoingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH));

    extract(export, 5000, new File(tmpDir, "incomingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH));

    final int THREAD_INDENT_SPACE = 50;
    fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "threads.txt"));
        ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
        long[] threadIds = threadBean.getAllThreadIds();
        for (long l : threadIds) {
            ThreadInfo info = threadBean.getThreadInfo(l, 100);
            if (info != null) {
                String threadName = info.getThreadName();
                fwriter.append(StringUtils.rightPad(threadName, THREAD_INDENT_SPACE));
                StackTraceElement[] trace = info.getStackTrace();
                boolean first = true;
                for (StackTraceElement stackTraceElement : trace) {
                    if (!first) {
                        fwriter.append(StringUtils.rightPad("", THREAD_INDENT_SPACE));
                    } else {
                        first = false;
                    }
                    fwriter.append(stackTraceElement.getClassName());
                    fwriter.append(".");
                    fwriter.append(stackTraceElement.getMethodName());
                    fwriter.append("()");
                    int lineNumber = stackTraceElement.getLineNumber();
                    if (lineNumber > 0) {
                        fwriter.append(": ");
                        fwriter.append(Integer.toString(stackTraceElement.getLineNumber()));
                    }
                    fwriter.append("\n");
                }
                fwriter.append("\n");
            }
        }
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters.properties"));
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        SortedProperties parameters = new SortedProperties();
        parameters.putAll(effectiveParameters);
        parameters.remove("db.password");
        parameters.store(fos, "parameters.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameter information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters-changed.properties"));
        Properties defaultParameters = new Properties();
        InputStream in = SnapshotUtil.class.getResourceAsStream("/symmetric-default.properties");
        defaultParameters.load(in);
        IOUtils.closeQuietly(in);
        in = SnapshotUtil.class.getResourceAsStream("/symmetric-console-default.properties");
        if (in != null) {
            defaultParameters.load(in);
            IOUtils.closeQuietly(in);
        }
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        Properties changedParameters = new SortedProperties();
        Map<String, ParameterMetaData> parameters = ParameterConstants.getParameterMetaData();
        for (String key : parameters.keySet()) {
            String defaultValue = defaultParameters.getProperty((String) key);
            String currentValue = effectiveParameters.getProperty((String) key);
            if (defaultValue == null && currentValue != null
                    || (defaultValue != null && !defaultValue.equals(currentValue))) {
                changedParameters.put(key, currentValue == null ? "" : currentValue);
            }
        }
        changedParameters.remove("db.password");
        changedParameters.store(fos, "parameters-changed.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameters-changed information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    writeRuntimeStats(engine, tmpDir);
    writeJobsStats(engine, tmpDir);

    if ("true".equals(System.getProperty(SystemConstants.SYSPROP_STANDALONE_WEB))) {
        writeDirectoryListing(engine, tmpDir);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "system.properties"));
        SortedProperties props = new SortedProperties();
        props.putAll(System.getProperties());
        props.store(fos, "system.properties");
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    try {
        File jarFile = new File(getSnapshotDirectory(engine), tmpDir.getName() + ".zip");
        JarBuilder builder = new JarBuilder(tmpDir, jarFile, new File[] { tmpDir }, Version.version());
        builder.build();
        FileUtils.deleteDirectory(tmpDir);
        return jarFile;
    } catch (IOException e) {
        throw new IoException("Failed to package snapshot files into archive", e);
    }
}

From source file:org.commonjava.maven.galley.filearc.internal.ZipListing.java

@Override
public ListingResult call() {
    final File src = getArchiveFile(resource.getLocationUri());
    if (!src.canRead() || src.isDirectory()) {
        return null;
    }//from w w  w. j  a  v a  2 s .  co  m

    final boolean isJar = isJar(resource.getLocationUri());

    final TreeSet<String> filenames = new TreeSet<String>();

    ZipFile zf = null;
    try {
        if (isJar) {
            zf = new JarFile(src);
        } else {
            zf = new ZipFile(src);
        }

        final String path = resource.getPath();
        final int pathLen = path.length();
        for (final ZipEntry entry : Collections.list(zf.entries())) {
            String name = entry.getName();
            if (name.startsWith(path)) {
                name = name.substring(pathLen);

                if (name.startsWith("/") && name.length() > 1) {
                    name = name.substring(1);

                    if (name.indexOf("/") < 0) {
                        filenames.add(name);
                    }
                }
            }
        }

    } catch (final IOException e) {
        error = new TransferException("Failed to get listing for: %s to: %s. Reason: %s", e, resource,
                e.getMessage());
    } finally {
        if (zf != null) {
            try {
                zf.close();
            } catch (final IOException e) {
            }
        }
    }

    if (!filenames.isEmpty()) {
        OutputStream stream = null;
        try {
            stream = target.openOutputStream(TransferOperation.DOWNLOAD);
            stream.write(join(filenames, "\n").getBytes("UTF-8"));

            return new ListingResult(resource, filenames.toArray(new String[filenames.size()]));
        } catch (final IOException e) {
            error = new TransferException("Failed to write listing to: %s. Reason: %s", e, target,
                    e.getMessage());
        } finally {
            closeQuietly(stream);
        }
    }

    return null;
}

From source file:org.overlord.dtgov.taskapi.TaskApi.java

/**
 * Gets a list of all tasks for the authenticated user.  Filters the list based on the
 * criteria included in the {@link FindTasksRequest}.
 * @param findTasksRequest//w  ww .  j a v  a2 s .  co m
 * @param httpRequest
 * @throws Exception
 */
@POST
@Path("find")
@Produces(MediaType.APPLICATION_XML)
@Consumes(MediaType.APPLICATION_XML)
public FindTasksResponse findTasks(final FindTasksRequest findTasksRequest,
        @Context HttpServletRequest httpRequest) throws Exception {
    String currentUser = assertCurrentUser(httpRequest);

    FindTasksResponse response = new FindTasksResponse();

    // Get all tasks - the ones assigned as potential owner *and* the ones assigned as owner.  If
    // there is overlap we'll deal with that during the sort.
    String language = "en-UK"; //$NON-NLS-1$
    //        if (httpRequest.getLocale() != null) {
    //            language = httpRequest.getLocale().toString();
    //        }
    List<TaskSummary> list = taskService.getTasksAssignedAsPotentialOwner(currentUser, language);
    list.addAll(taskService.getTasksOwned(currentUser, language));

    final String orderBy = findTasksRequest.getOrderBy() == null ? "priority" : findTasksRequest.getOrderBy(); //$NON-NLS-1$
    final boolean ascending = findTasksRequest.isOrderAscending();
    TreeSet<TaskSummary> sortedFiltered = new TreeSet<TaskSummary>(
            new TaskSummaryComparator(orderBy, ascending));

    for (TaskSummary task : list) {
        if (accepts(task, findTasksRequest)) {
            sortedFiltered.add(task);
        }
    }

    int startIdx = findTasksRequest.getStartIndex();
    int endIdx = findTasksRequest.getEndIndex();
    int idx = 0;
    for (TaskSummary task : sortedFiltered) {
        if (idx >= startIdx && idx <= endIdx) {
            TaskSummaryType taskSummary = new TaskSummaryType();
            taskSummary.setId(String.valueOf(task.getId()));
            taskSummary.setName(task.getName());
            User actualOwner = task.getActualOwner();
            if (actualOwner != null) {
                taskSummary.setOwner(actualOwner.getId());
            }
            taskSummary.setPriority(task.getPriority());
            taskSummary.setStatus(StatusType.fromValue(task.getStatus().toString()));
            response.getTaskSummary().add(taskSummary);
        }
        idx++;
    }
    response.setTotalResults(sortedFiltered.size());
    return response;
}

From source file:net.semanticmetadata.lire.solr.LireRequestHandler.java

/**
 * Actual search implementation based on (i) hash based retrieval and (ii) feature based re-ranking.
 *
 * @param rsp/*from  w  w  w . j  av a 2  s .com*/
 * @param searcher
 * @param hashFieldName the hash field name
 * @param maximumHits
 * @param terms
 * @param queryFeature
 * @throws IOException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
private void doSearch(SolrQueryRequest req, SolrQueryResponse rsp, SolrIndexSearcher searcher,
        String hashFieldName, int maximumHits, List<Term> terms, Query query, LireFeature queryFeature)
        throws IOException, IllegalAccessException, InstantiationException {
    // temp feature instance
    LireFeature tmpFeature = queryFeature.getClass().newInstance();
    // Taking the time of search for statistical purposes.
    time = System.currentTimeMillis();

    Filter filter = null;
    // if the request contains a filter:
    if (req.getParams().get("fq") != null) {
        // only filters with [<field>:<value> ]+ are supported
        StringTokenizer st = new StringTokenizer(req.getParams().get("fq"), " ");
        LinkedList<Term> filterTerms = new LinkedList<Term>();
        while (st.hasMoreElements()) {
            String[] tmpToken = st.nextToken().split(":");
            if (tmpToken.length > 1) {
                filterTerms.add(new Term(tmpToken[0], tmpToken[1]));
            }
        }
        if (filterTerms.size() > 0)
            filter = new TermsFilter(filterTerms);
    }

    TopDocs docs; // with query only.
    if (filter == null) {
        docs = searcher.search(query, numberOfCandidateResults);
    } else {
        docs = searcher.search(query, filter, numberOfCandidateResults);
    }
    //        TopDocs docs = searcher.search(query, new TermsFilter(terms), numberOfCandidateResults);   // with TermsFilter and boosting by simple query
    //        TopDocs docs = searcher.search(new ConstantScoreQuery(new TermsFilter(terms)), numberOfCandidateResults); // just with TermsFilter
    time = System.currentTimeMillis() - time;
    rsp.add("RawDocsCount", docs.scoreDocs.length + "");
    rsp.add("RawDocsSearchTime", time + "");
    // re-rank
    time = System.currentTimeMillis();
    TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>();
    float maxDistance = -1f;
    float tmpScore;

    String featureFieldName = FeatureRegistry.getFeatureFieldName(hashFieldName);
    // iterating and re-ranking the documents.
    BinaryDocValues binaryValues = MultiDocValues.getBinaryValues(searcher.getIndexReader(), featureFieldName); // ***  #
    BytesRef bytesRef;// = new BytesRef();
    for (int i = 0; i < docs.scoreDocs.length; i++) {
        // using DocValues to retrieve the field values ...
        bytesRef = binaryValues.get(docs.scoreDocs[i].doc);
        tmpFeature.setByteArrayRepresentation(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        // Getting the document from the index.
        // This is the slow step based on the field compression of stored fields.
        //            tmpFeature.setByteArrayRepresentation(d.getBinaryValue(name).bytes, d.getBinaryValue(name).offset, d.getBinaryValue(name).length);
        tmpScore = queryFeature.getDistance(tmpFeature);
        if (resultScoreDocs.size() < maximumHits) { // todo: There's potential here for a memory saver, think of a clever data structure that can do the trick without creating a new SimpleResult for each result.
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            maxDistance = resultScoreDocs.last().getDistance();
        } else if (tmpScore < maxDistance) {
            //                if it is nearer to the sample than at least one of the current set:
            //                remove the last one ...
            resultScoreDocs.remove(resultScoreDocs.last());
            //                add the new one ...
            resultScoreDocs.add(
                    new SimpleResult(tmpScore, searcher.doc(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc));
            //                and set our new distance border ...
            maxDistance = resultScoreDocs.last().getDistance();
        }
    }
    //        System.out.println("** Creating response.");
    time = System.currentTimeMillis() - time;
    rsp.add("ReRankSearchTime", time + "");
    LinkedList list = new LinkedList();
    for (Iterator<SimpleResult> it = resultScoreDocs.iterator(); it.hasNext();) {
        SimpleResult result = it.next();
        HashMap m = new HashMap(2);
        m.put("d", result.getDistance());
        // add fields as requested:
        if (req.getParams().get("fl") == null) {
            m.put("id", result.getDocument().get("id"));
            if (result.getDocument().get("title") != null)
                m.put("title", result.getDocument().get("title"));
        } else {
            String fieldsRequested = req.getParams().get("fl");
            if (fieldsRequested.contains("score")) {
                m.put("score", result.getDistance());
            }
            if (fieldsRequested.contains("*")) {
                // all fields
                for (IndexableField field : result.getDocument().getFields()) {
                    String tmpField = field.name();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            } else {
                StringTokenizer st;
                if (fieldsRequested.contains(","))
                    st = new StringTokenizer(fieldsRequested, ",");
                else
                    st = new StringTokenizer(fieldsRequested, " ");
                while (st.hasMoreElements()) {
                    String tmpField = st.nextToken();
                    if (result.getDocument().getFields(tmpField).length > 1) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getValues(tmpField));
                    } else if (result.getDocument().getFields(tmpField).length > 0) {
                        m.put(result.getDocument().getFields(tmpField)[0].name(),
                                result.getDocument().getFields(tmpField)[0].stringValue());
                    }
                }
            }
        }
        //            m.put(field, result.getDocument().get(field));
        //            m.put(field.replace("_ha", "_hi"), result.getDocument().getBinaryValue(field));
        list.add(m);
    }
    rsp.add("docs", list);
    // rsp.add("Test-name", "Test-val");
}