Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:org.apache.ws.scout.registry.BusinessQueryManagerImpl.java

/**
 * Finds all Service objects that match all of the criteria specified by
 * the parameters of this call.  This is a logical AND operation between
 * all non-null parameters//w  w w.  j  av a2  s.  c o m
 *
 * TODO - support findQualifiers, classifications and specifications
 *
 * @param orgKey
 * @param findQualifiers
 * @param namePatterns
 * @param classifications
 * @param specificationa
 * @return BulkResponse
 * @throws JAXRException
 */
public BulkResponse findServices(Key orgKey, Collection findQualifiers, Collection namePatterns,
        Collection classifications, Collection specificationa) throws JAXRException {
    BulkResponseImpl blkRes = new BulkResponseImpl();

    IRegistry iRegistry = (IRegistry) registryService.getRegistry();
    FindQualifiers juddiFindQualifiers = mapFindQualifiers(findQualifiers);
    Name[] juddiNames = mapNamePatterns(namePatterns);

    try {
        /*
         * hit the registry.  The key is not required for UDDI2
         */

        String id = null;

        if (orgKey != null) {
            id = orgKey.getId();
        }

        ServiceList serviceList = iRegistry.findService(id, juddiNames,
                ScoutJaxrUddiHelper.getCategoryBagFromClassifications(classifications), null,
                juddiFindQualifiers, registryService.getMaxRows());

        /*
         * now convert  from jUDDI ServiceInfo objects to JAXR Services
         */
        if (serviceList != null) {

            ServiceInfos serviceInfos = serviceList.getServiceInfos();
            LinkedHashSet<Service> col = new LinkedHashSet<Service>();

            if (serviceInfos != null && serviceInfos.getServiceInfo() != null) {
                for (ServiceInfo si : serviceInfos.getServiceInfo()) {
                    Service srv = (Service) getRegistryObject(si.getServiceKey(), LifeCycleManager.SERVICE);
                    col.add(srv);
                }

            }
            blkRes.setCollection(col);
        }
    } catch (RegistryException e) {
        throw new JAXRException(e.getLocalizedMessage());
    }

    return blkRes;
}

From source file:org.apache.ws.scout.registry.BusinessQueryManagerV3Impl.java

public BulkResponse findServiceBindings(Key serviceKey, Collection findQualifiers, Collection classifications,
        Collection specifications) throws JAXRException {
    BulkResponseImpl blkRes = new BulkResponseImpl();

    IRegistryV3 iRegistry = (IRegistryV3) registryService.getRegistry();
    FindQualifiers juddiFindQualifiers = mapFindQualifiers(findQualifiers);

    try {//from  w  w w .  ja  v a  2s . c om

        BindingDetail bindingDetail = iRegistry.findBinding(serviceKey.getId(),
                ScoutJaxrUddiV3Helper.getCategoryBagFromClassifications(classifications),
                ScoutJaxrUddiV3Helper.getTModelBagFromSpecifications(specifications), juddiFindQualifiers,
                registryService.getMaxRows());

        /*
         * now convert  from jUDDI ServiceInfo objects to JAXR Services
         */
        if (bindingDetail != null) {

            List<BindingTemplate> bindingTemplateList = bindingDetail.getBindingTemplate();
            BindingTemplate[] bindarr = new BindingTemplate[bindingTemplateList.size()];
            bindingTemplateList.toArray(bindarr);

            LinkedHashSet<ServiceBinding> col = new LinkedHashSet<ServiceBinding>();

            for (int i = 0; bindarr != null && i < bindarr.length; i++) {
                BindingTemplate si = bindarr[i];
                ServiceBinding sb = ScoutUddiV3JaxrHelper.getServiceBinding(si,
                        registryService.getBusinessLifeCycleManager());
                col.add(sb);
                //Fill the Service object by making a call to registry
                Service s = (Service) getRegistryObject(serviceKey.getId(), LifeCycleManager.SERVICE);
                ((ServiceBindingImpl) sb).setService(s);
            }

            blkRes.setCollection(col);
        }
    } catch (RegistryV3Exception e) {
        throw new JAXRException(e.getLocalizedMessage());
    }

    return blkRes;
}

From source file:com.ehsy.solr.util.SimplePostTool.java

/**
 * This method takes as input a list of start URL strings for crawling,
 * adds each one to a backlog and then starts crawling
 * @param args the raw input args from main()
 * @param startIndexInArgs offset for where to start
 * @param out outputStream to write results to
 * @return the number of web pages posted
 *//*from  ww w.j  av a2  s. co m*/
public int postWebPages(String[] args, int startIndexInArgs, OutputStream out) {
    reset();
    LinkedHashSet<URL> s = new LinkedHashSet<>();
    for (int j = startIndexInArgs; j < args.length; j++) {
        try {
            URL u = new URL(normalizeUrlEnding(args[j]));
            s.add(u);
        } catch (MalformedURLException e) {
            warn("Skipping malformed input URL: " + args[j]);
        }
    }
    // Add URLs to level 0 of the backlog and start recursive crawling
    backlog.add(s);
    return webCrawl(0, out);
}

From source file:org.apache.ws.scout.registry.BusinessQueryManagerImpl.java

public BulkResponse findServiceBindings(Key serviceKey, Collection findQualifiers, Collection classifications,
        Collection specifications) throws JAXRException {
    BulkResponseImpl blkRes = new BulkResponseImpl();

    IRegistry iRegistry = (IRegistry) registryService.getRegistry();
    FindQualifiers juddiFindQualifiers = mapFindQualifiers(findQualifiers);

    try {/*from ww  w  . ja  v a 2 s  .  co  m*/

        BindingDetail bindingDetail = iRegistry.findBinding(serviceKey.getId(),
                ScoutJaxrUddiHelper.getCategoryBagFromClassifications(classifications),
                ScoutJaxrUddiHelper.getTModelBagFromSpecifications(specifications), juddiFindQualifiers,
                registryService.getMaxRows());

        /*
         * now convert  from jUDDI ServiceInfo objects to JAXR Services
         */
        if (bindingDetail != null) {

            List<BindingTemplate> bindingTemplateList = bindingDetail.getBindingTemplate();
            BindingTemplate[] bindarr = new BindingTemplate[bindingTemplateList.size()];
            bindingTemplateList.toArray(bindarr);

            LinkedHashSet<ServiceBinding> col = new LinkedHashSet<ServiceBinding>();

            for (int i = 0; bindarr != null && i < bindarr.length; i++) {
                BindingTemplate si = bindarr[i];
                ServiceBinding sb = ScoutUddiJaxrHelper.getServiceBinding(si,
                        registryService.getBusinessLifeCycleManager());
                col.add(sb);
                //Fill the Service object by making a call to registry
                Service s = (Service) getRegistryObject(serviceKey.getId(), LifeCycleManager.SERVICE);
                ((ServiceBindingImpl) sb).setService(s);
            }

            blkRes.setCollection(col);
        }
    } catch (RegistryException e) {
        throw new JAXRException(e.getLocalizedMessage());
    }

    return blkRes;
}

From source file:org.osaf.cosmo.atom.provider.ItemCollectionAdapter.java

private NoteItem processEntryUpdate(ContentProcessor processor, Entry entry, NoteItem item)
        throws ValidationException, ProcessorException {
    EventStamp es = StampUtils.getEventStamp(item);
    Date oldstart = es != null && es.isRecurring() ? es.getStartDate() : null;

    processor.processContent(entry.getContent(), item);

    // oldStart will have a value if the item has an EventStamp
    // and the EventStamp is recurring
    if (oldstart != null) {
        es = StampUtils.getEventStamp(item);
        // Case 1: EventStamp was removed from recurring event, so we
        // have to remove all modifications (a modification doesn't make
        // sense if there is no recurring event)
        if (es == null) {
            LinkedHashSet<ContentItem> updates = new LinkedHashSet<ContentItem>();
            for (NoteItem mod : item.getModifications()) {
                mod.setIsActive(false);//  w  ww. ja  v  a  2 s .  c o  m
                updates.add(mod);
            }
            updates.add(item);

            // Update item and remove modifications in one atomic service call
            contentService.updateContentItems(item.getParents(), updates);
        }
        // Case 2: Start date may have changed on master event.
        else {
            Date newstart = es.getStartDate();

            // If changed, we have to update all the recurrenceIds
            // for any modifications
            if (newstart != null && !newstart.equals(oldstart)) {
                long delta = newstart.getTime() - oldstart.getTime();
                if (log.isDebugEnabled())
                    log.debug("master event start date changed; " + "adjusting modifications by " + delta
                            + " milliseconds");

                LinkedHashSet<ContentItem> updates = new LinkedHashSet<ContentItem>();
                HashSet<NoteItem> copies = new HashSet<NoteItem>();
                HashSet<NoteItem> removals = new HashSet<NoteItem>();

                // copy each modification and update the copy's uid
                // with the new start date
                Iterator<NoteItem> mi = item.getModifications().iterator();
                while (mi.hasNext()) {
                    NoteItem mod = mi.next();

                    // ignore modifications without event stamp
                    if (StampUtils.getEventExceptionStamp(mod) == null)
                        continue;

                    mod.setIsActive(false);
                    removals.add(mod);

                    NoteItem copy = (NoteItem) mod.copy();
                    copy.setModifies(item);

                    EventExceptionStamp ees = StampUtils.getEventExceptionStamp(copy);

                    DateTime oldRid = (DateTime) ees.getRecurrenceId();
                    java.util.Date newRidTime = new java.util.Date(oldRid.getTime() + delta);
                    DateTime newRid = (DateTime) Dates.getInstance(newRidTime, Value.DATE_TIME);
                    if (oldRid.isUtc())
                        newRid.setUtc(true);
                    else
                        newRid.setTimeZone(oldRid.getTimeZone());

                    copy.setUid(new ModificationUid(item, newRid).toString());
                    ees.setRecurrenceId(newRid);

                    // If the modification's dtstart is missing, then
                    // we have to adjust dtstart to be equal to the
                    // recurrenceId.
                    if (isDtStartMissing(StampUtils.getBaseEventStamp(mod))) {
                        ees.setStartDate(ees.getRecurrenceId());
                    }

                    copies.add(copy);
                }

                // add removals first
                updates.addAll(removals);
                // then additions
                updates.addAll(copies);
                // then updates
                updates.add(item);

                // Update everything in one atomic service call
                contentService.updateContentItems(item.getParents(), updates);
            } else {
                // otherwise use simple update
                item = (NoteItem) contentService.updateContent((ContentItem) item);
            }
        }
    } else {
        // use simple update
        item = (NoteItem) contentService.updateContent((ContentItem) item);
    }

    return item;
}

From source file:org.apache.ws.scout.registry.BusinessQueryManagerImpl.java

public BulkResponse findClassificationSchemes(Collection findQualifiers, Collection namePatterns,
        Collection classifications, Collection externalLinks) throws JAXRException {
    //TODO: Handle this better
    LinkedHashSet<ClassificationScheme> col = new LinkedHashSet<ClassificationScheme>();
    Iterator iter = namePatterns.iterator();
    String name = "";
    while (iter.hasNext()) {
        name = (String) iter.next();
        break;//from  www. j a  v a2s .co m
    }

    ClassificationScheme classificationScheme = findClassificationSchemeByName(findQualifiers, name);
    if (classificationScheme != null) {
        col.add(classificationScheme);
    }
    return new BulkResponseImpl(col);
}

From source file:Simulator.PerformanceCalculation.java

public JPanel waitTime() {
    LinkedHashSet no = new LinkedHashSet();
    LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>();

    for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) {
        TraceObject traceObject = entry.getValue();

        if (wait1.get(traceObject.getSurgeonId()) == null) {
            ArrayList details = new ArrayList();
            details.add(traceObject.getWaitTime1());
            details.add(traceObject.getWaitTime2());
            wait1.put(traceObject.getSurgeonId(), details);
        } else {/*from  w ww .j  av a  2  s.  com*/
            wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime1());
            wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2());
        }

        no.add(traceObject.getSurgeonId());
    }
    String[] column = new String[no.size()];

    String series1 = "Wait Time";
    for (int i = 0; i < no.size(); i++) {
        column[i] = "Surgeon " + (i + 1);
    }

    DefaultCategoryDataset dataset = new DefaultCategoryDataset();

    LinkedHashMap<Integer, Double> average = new LinkedHashMap<>();
    for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) {
        Integer integer = entry.getKey();
        ArrayList<Double> arrayList = entry.getValue();
        double total = 0;
        for (Double double1 : arrayList) {
            total += double1;
        }
        average.put(integer, total / (arrayList.size() / 2));
    }

    for (int i = 1; i <= average.size(); i++) {
        dataset.addValue(Math.round(average.get(i) / 600), series1, column[i - 1]);
    }
    JFreeChart chart = ChartFactory.createBarChart("Wait Time", // chart title
            "Surgeon ID", // domain axis label
            "Days", // range axis label
            dataset, // data
            PlotOrientation.VERTICAL, // orientation
            true, // include legend
            true, // tooltips?
            false // URLs?
    );

    return new ChartPanel(chart);
}

From source file:Simulator.PerformanceCalculation.java

public JPanel costAnaylsis() {
    LinkedHashSet no = new LinkedHashSet();
    LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>();

    for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) {
        TraceObject traceObject = entry.getValue();

        if (wait1.get(traceObject.getSurgeonId()) == null) {
            ArrayList details = new ArrayList();
            details.add(traceObject.getWaitTime1());
            details.add(traceObject.getWaitTime2());
            wait1.put(traceObject.getSurgeonId(), details);
        } else {//from  w  w w  .  j ava2s .c  o m
            wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime1());
            wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2());
        }

        no.add(traceObject.getSurgeonId());
    }
    String[] column = new String[no.size()];

    String series1 = "Cost";
    for (int i = 0; i < no.size(); i++) {
        column[i] = "Surgeon " + (i + 1);
    }

    DefaultCategoryDataset dataset = new DefaultCategoryDataset();
    int totalCost = 0;
    LinkedHashMap<Integer, Double> average = new LinkedHashMap<>();
    for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) {
        Integer integer = entry.getKey();
        ArrayList<Double> arrayList = entry.getValue();
        double total = 0;
        for (Double double1 : arrayList) {
            total += double1;
        }
        totalCost += total * Configuration.costOfPatientWaiting;
        average.put(integer, total / 600);
    }

    for (int i = 1; i <= average.size(); i++) {
        dataset.addValue(Math.round(average.get(i) * Configuration.costOfPatientWaiting), series1,
                column[i - 1]);
    }
    totalCostClass = totalCost;
    JFreeChart chart = ChartFactory.createBarChart("Cost", // chart title
            "Surgeon ID", // domain axis label
            "$", // range axis label
            dataset, // data
            PlotOrientation.VERTICAL, // orientation
            true, // include legend
            true, // tooltips?
            false // URLs?
    );

    return new ChartPanel(chart);
}

From source file:org.overlord.sramp.wagon.SrampWagon.java

/**
 * Generates the maven-metadata.xml file dynamically for a given groupId/artifactId pair.  This will
 * list all of the versions available for that groupId+artifactId, along with the latest release and
 * snapshot versions./* w  w w .  j  a  v a2s .  com*/
  * @param gavInfo
  * @param inputData
 * @throws ResourceDoesNotExistException
  */
private void doGenerateArtifactDirMavenMetaData(MavenGavInfo gavInfo, InputData inputData)
        throws ResourceDoesNotExistException {
    // See the comment in {@link SrampWagon#fillInputData(InputData)} about why we're doing this
    // context classloader magic.
    ClassLoader oldCtxCL = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(SrampWagon.class.getClassLoader());
    try {
        String artyPath = gavInfo.getFullName();
        if (gavInfo.isHash()) {
            artyPath = artyPath.substring(0, artyPath.lastIndexOf('.'));
        }
        SrampArchiveEntry entry = this.archive.getEntry(artyPath);
        if (entry == null) {
            QueryResultSet resultSet = client
                    .buildQuery("/s-ramp[@maven.groupId = ? and @maven.artifactId = ?]") //$NON-NLS-1$
                    .parameter(gavInfo.getGroupId()).parameter(gavInfo.getArtifactId())
                    .propertyName("maven.version") //$NON-NLS-1$
                    .count(500).orderBy("createdTimestamp").ascending().query(); //$NON-NLS-1$
            if (resultSet.size() == 0) {
                throw new Exception(Messages.i18n.format("NO_ARTIFACTS_FOUND")); //$NON-NLS-1$
            }

            String groupId = gavInfo.getGroupId();
            String artifactId = gavInfo.getArtifactId();
            String latest = null;
            String release = null;
            String lastUpdated = null;

            LinkedHashSet<String> versions = new LinkedHashSet<String>();
            SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss"); //$NON-NLS-1$
            for (ArtifactSummary artifactSummary : resultSet) {
                String version = artifactSummary.getCustomPropertyValue("maven.version"); //$NON-NLS-1$
                if (versions.add(version)) {
                    latest = version;
                    if (!version.endsWith("-SNAPSHOT")) { //$NON-NLS-1$
                        release = version;
                    }
                }
                lastUpdated = format.format(artifactSummary.getCreatedTimestamp());
            }

            StringBuilder mavenMetadata = new StringBuilder();
            mavenMetadata.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); //$NON-NLS-1$
            mavenMetadata.append("<metadata>\n"); //$NON-NLS-1$
            mavenMetadata.append("  <groupId>").append(groupId).append("</groupId>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            mavenMetadata.append("  <artifactId>").append(artifactId).append("</artifactId>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            mavenMetadata.append("  <versioning>\n"); //$NON-NLS-1$
            mavenMetadata.append("    <latest>").append(latest).append("</latest>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            mavenMetadata.append("    <release>").append(release).append("</release>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            mavenMetadata.append("    <versions>\n"); //$NON-NLS-1$
            for (String version : versions) {
                mavenMetadata.append("      <version>").append(version).append("</version>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            }
            mavenMetadata.append("    </versions>\n"); //$NON-NLS-1$
            mavenMetadata.append("    <lastUpdated>").append(lastUpdated).append("</lastUpdated>\n"); //$NON-NLS-1$ //$NON-NLS-2$
            mavenMetadata.append("  </versioning>\n"); //$NON-NLS-1$
            mavenMetadata.append("</metadata>\n"); //$NON-NLS-1$

            BaseArtifactType artifact = ArtifactType.ExtendedDocument("MavenMetaData").newArtifactInstance(); //$NON-NLS-1$
            this.archive.addEntry(artyPath, artifact, IOUtils.toInputStream(mavenMetadata.toString()));

            entry = this.archive.getEntry(artyPath);
        }

        if (!gavInfo.isHash()) {
            inputData.setInputStream(this.archive.getInputStream(entry));
        } else {
            String hash = generateHash(this.archive.getInputStream(entry), gavInfo.getHashAlgorithm());
            inputData.setInputStream(IOUtils.toInputStream(hash));
        }
    } catch (Exception e) {
        throw new ResourceDoesNotExistException(Messages.i18n.format("FAILED_TO_GENERATE_METADATA"), e); //$NON-NLS-1$
    } finally {
        Thread.currentThread().setContextClassLoader(oldCtxCL);
    }
}

From source file:net.yacy.peers.Protocol.java

private static void remoteSearchProcess(final SearchEvent event, final int count, final long time,
        final String wordhashes, final Seed target, final Blacklist blacklist, final SearchResult result)
        throws SpaceExceededException, InterruptedException {

    // create containers
    final int words = wordhashes.length() / Word.commonHashLength;
    assert words > 0 : "wordhashes = " + wordhashes;
    final List<ReferenceContainer<WordReference>> container = new ArrayList<ReferenceContainer<WordReference>>(
            words);/*from w w  w  .  j a v  a 2  s.c o m*/
    for (int i = 0; i < words; i++) {
        container.add(ReferenceContainer.emptyContainer(Segment.wordReferenceFactory,
                ASCII.getBytes(
                        wordhashes.substring(i * Word.commonHashLength, (i + 1) * Word.commonHashLength)),
                count)); // throws SpaceExceededException
    }

    // insert results to containers
    int term = count;
    Map<String, LinkedHashSet<String>> snip;
    if (event.addResultsToLocalIndex) {
        snip = null;
    } else {
        snip = new HashMap<String, LinkedHashSet<String>>(); // needed to display nodestack results
    }
    List<URIMetadataNode> storeDocs = new ArrayList<URIMetadataNode>(result.links.size());
    for (final URIMetadataNode urlEntry : result.links) {
        if (term-- <= 0) {
            break; // do not process more that requested (in case that evil peers fill us up with rubbish)
        }
        // get one single search result
        if (urlEntry == null) {
            continue;
        }
        assert (urlEntry.hash().length == 12) : "urlEntry.hash() = " + ASCII.String(urlEntry.hash());
        if (urlEntry.hash().length != 12) {
            continue; // bad url hash
        }
        if (blacklist.isListed(BlacklistType.SEARCH, urlEntry.url())) {
            if (Network.log.isInfo()) {
                Network.log.info("remote search: filtered blacklisted url " + urlEntry.url().toNormalform(true)
                        + " from peer " + target.getName());
            }
            continue; // block with backlist
        }

        final String urlRejectReason = Switchboard.getSwitchboard().crawlStacker
                .urlInAcceptedDomain(urlEntry.url());
        if (urlRejectReason != null) {
            if (Network.log.isInfo()) {
                Network.log.info("remote search: rejected url '" + urlEntry.url().toNormalform(true) + "' ("
                        + urlRejectReason + ") from peer " + target.getName());
            }
            continue; // reject url outside of our domain
        }

        // save the url entry
        final Reference entry = urlEntry.word();
        if (entry == null) {
            if (Network.log.isWarn()) {
                Network.log.warn("remote search: no word attached from peer " + target.getName() + ", version "
                        + target.getVersion());
            }
            continue; // no word attached
        }

        // the search-result-url transports all the attributes of word indexes
        if (!Base64Order.enhancedCoder.equal(entry.urlhash(), urlEntry.hash())) {
            Network.log.info("remote search: url-hash " + ASCII.String(urlEntry.hash())
                    + " does not belong to word-attached-hash " + ASCII.String(entry.urlhash()) + "; url = "
                    + urlEntry.url().toNormalform(true) + " from peer " + target.getName());
            continue; // spammed
        }

        // passed all checks, store url
        storeDocs.add(urlEntry);
        ResultURLs.stack(ASCII.String(urlEntry.url().hash()), urlEntry.url().getHost(),
                event.peers.mySeed().hash.getBytes(), UTF8.getBytes(target.hash), EventOrigin.QUERIES);

        if (urlEntry.snippet() != null && urlEntry.snippet().length() > 0
                && !urlEntry.snippet().equals("null")) {
            // we don't store the snippets along the url entry,
            // because they are search-specific.
            // instead, they are placed in a snipped-search cache.
            // System.out.println("--- RECEIVED SNIPPET '" + urlEntry.snippet() + "'");
            TextSnippet.snippetsCache.put(wordhashes, ASCII.String(urlEntry.hash()), urlEntry.snippet());
            // add snippet for snippethandling for nodestack entries (used if not stored to index)
            if (!event.addResultsToLocalIndex) {
                // TODO: must have a snippet even to get the snippetcache entry back when adding to nodestack
                LinkedHashSet<String> sniptxt = new LinkedHashSet<String>();
                sniptxt.add(urlEntry.snippet());
                snip.put(ASCII.String(urlEntry.hash()), sniptxt);
            }
        }

        // add the url entry to the word indexes
        for (final ReferenceContainer<WordReference> c : container) {
            try {
                c.add(entry);
            } catch (final SpaceExceededException e) {
                ConcurrentLog.logException(e);
                break;
            }
        }
    }

    // store remote result to local result container
    // insert one container into the search result buffer
    // one is enough, only the references are used, not the word
    if (event.addResultsToLocalIndex) {
        /*
         * Current thread might be interrupted by SearchEvent.cleanup()
         */
        if (Thread.interrupted()) {
            throw new InterruptedException("solrQuery interrupted");
        }
        WriteMetadataNodeToLocalIndexThread writerToLocalIndex = new WriteMetadataNodeToLocalIndexThread(
                event.query.getSegment(), storeDocs);
        writerToLocalIndex.start();
        try {
            writerToLocalIndex.join();
        } catch (InterruptedException e) {
            /*
             * Current thread interruption might happen while waiting
             * for writeToLocalIndexThread.
             */
            writerToLocalIndex.stopWriting();
            throw new InterruptedException("remoteProcess stopped!");
        }
        event.addRWIs(container.get(0), false, target.getName() + "/" + target.hash, result.totalCount, time);
    } else {
        // feed results as nodes (SolrQuery results) which carry metadata,
        // to prevent a call to getMetaData for RWI results, which would fail (if no metadata in index and no display of these results)
        event.addNodes(storeDocs, null, snip, false, target.getName() + "/" + target.hash, count, true);
    }
    event.addFinalize();
    event.addExpectedRemoteReferences(-count);

    // insert the containers to the index
    for (final ReferenceContainer<WordReference> c : container) {
        try {
            event.query.getSegment().storeRWI(c);
        } catch (final Exception e) {
            ConcurrentLog.logException(e);
        }
    }

    // integrate remote top-words/topics
    if (result.references != null && result.references.length > 0) {
        Network.log.info(
                "remote search: peer " + target.getName() + " sent " + result.references.length + " topics");
        // add references twice, so they can be counted (must have at least 2 entries)
        synchronized (event) {
            event.addTopic(result.references);
            event.addTopic(result.references);
        }
    }
    Network.log.info("remote search: peer " + target.getName() + " sent " + container.get(0).size() + "/"
            + result.totalCount + " references");
}