Example usage for java.lang InterruptedException InterruptedException

List of usage examples for java.lang InterruptedException InterruptedException

Introduction

In this page you can find the example usage for java.lang InterruptedException InterruptedException.

Prototype

public InterruptedException(String s) 

Source Link

Document

Constructs an InterruptedException with the specified detail message.

Usage

From source file:com.threerings.getdown.data.Application.java

/**
 * Unpacks the resources that require it (we know that they're valid).
 *
 * @param unpacked a set of resources to skip because they're already unpacked.
 *//*from w w  w.java 2s. c  o m*/
public void unpackResources(ProgressObserver obs, Set<Resource> unpacked) throws InterruptedException {
    List<Resource> rsrcs = getActiveResources();

    // remove resources that we don't want to unpack
    for (Iterator<Resource> it = rsrcs.iterator(); it.hasNext();) {
        Resource rsrc = it.next();
        if (!rsrc.shouldUnpack() || unpacked.contains(rsrc)) {
            it.remove();
        }
    }

    // obtain the sizes of the resources to unpack
    long[] sizes = new long[rsrcs.size()];
    for (int ii = 0; ii < sizes.length; ii++) {
        sizes[ii] = rsrcs.get(ii).getLocal().length();
    }

    ProgressAggregator pagg = new ProgressAggregator(obs, sizes);
    for (int ii = 0; ii < sizes.length; ii++) {
        if (Thread.interrupted()) {
            throw new InterruptedException("m.applet_stopped");
        }
        Resource rsrc = rsrcs.get(ii);
        ProgressObserver pobs = pagg.startElement(ii);
        if (!rsrc.unpack()) {
            log.info("Failure unpacking resource", "rsrc", rsrc);
        }
        pobs.progress(100);
    }
}

From source file:com.mellanox.r4h.DFSInputStream.java

private ByteBuffer getFirstToComplete(CompletionService<ByteBuffer> hedgedService,
        ArrayList<Future<ByteBuffer>> futures) throws InterruptedException {
    if (futures.isEmpty()) {
        throw new InterruptedException("let's retry");
    }// w  ww . ja va  2 s  . c  o m
    Future<ByteBuffer> future = null;
    try {
        future = hedgedService.take();
        ByteBuffer bb = future.get();
        futures.remove(future);
        return bb;
    } catch (ExecutionException e) {
        // already logged in the Callable
        futures.remove(future);
    } catch (CancellationException ce) {
        // already logged in the Callable
        futures.remove(future);
    }

    throw new InterruptedException("let's retry");
}

From source file:org.talend.designer.core.ui.AbstractMultiPageTalendEditor.java

protected void updateRunJobContext() {
    final JobContextManager manager = (JobContextManager) getProcess().getContextManager();
    if (manager.isModified()) {
        final Map<String, String> nameMap = manager.getNameMap();

        // gcui:add a progressDialog.
        Shell shell = null;//w ww  .jav  a  2s  .co m
        Display display = PlatformUI.getWorkbench().getDisplay();
        if (display != null) {
            shell = display.getActiveShell();
        }
        if (shell == null) {
            display = Display.getCurrent();
            if (display == null) {
                display = Display.getDefault();
            }
            if (display != null) {
                shell = display.getActiveShell();
            }
        }
        ProgressMonitorDialog progressDialog = new ProgressMonitorDialog(shell);
        IRunnableWithProgress runnable = new IRunnableWithProgress() {

            @Override
            public void run(final IProgressMonitor monitor) {
                monitor.beginTask(Messages.getString("AbstractMultiPageTalendEditor_pleaseWait"), //$NON-NLS-1$
                        IProgressMonitor.UNKNOWN);
                Display.getDefault().syncExec(new Runnable() {

                    @Override
                    public void run() {
                        IProxyRepositoryFactory factory = CorePlugin.getDefault().getProxyRepositoryFactory();
                        factory.executeRepositoryWorkUnit(new RepositoryWorkUnit<Object>("..", this) { //$NON-NLS-1$

                            @Override
                            protected void run() throws LoginException, PersistenceException {
                                try {
                                    IProxyRepositoryFactory factory = CorePlugin.getDefault()
                                            .getProxyRepositoryFactory();

                                    Set<String> curContextVars = getCurrentContextVariables(manager);
                                    IProcess2 process2 = getProcess();
                                    String jobId = process2.getProperty().getId();
                                    IEditorReference[] reference = PlatformUI.getWorkbench()
                                            .getActiveWorkbenchWindow().getActivePage().getEditorReferences();
                                    List<IProcess2> processes = CorePlugin.getDefault().getDesignerCoreService()
                                            .getOpenedProcess(reference);

                                    // gcui:if nameMap is empty it do nothing.
                                    if (!nameMap.isEmpty()) {
                                        UpdateRunJobComponentContextHelper.updateItemRunJobComponentReference(
                                                factory, nameMap, jobId, curContextVars);
                                        UpdateRunJobComponentContextHelper
                                                .updateOpenedJobRunJobComponentReference(processes, nameMap,
                                                        jobId, curContextVars);
                                    }
                                    // add for bug 9564
                                    List<IRepositoryViewObject> all = factory
                                            .getAll(ERepositoryObjectType.PROCESS, true);
                                    List<ProcessItem> allProcess = new ArrayList<ProcessItem>();
                                    for (IRepositoryViewObject repositoryObject : all) {
                                        Item item = repositoryObject.getProperty().getItem();
                                        if (item instanceof ProcessItem) {
                                            ProcessItem processItem = (ProcessItem) item;
                                            allProcess.add(processItem);
                                        }
                                    }
                                    UpdateRunJobComponentContextHelper
                                            .updateRefJobRunJobComponentContext(factory, allProcess, process2);

                                } catch (PersistenceException e) {
                                    // e.printStackTrace();
                                    ExceptionHandler.process(e);
                                }
                                manager.setModified(false);
                            }
                        });

                    }

                });
                monitor.done();
                if (monitor.isCanceled()) {
                    try {
                        throw new InterruptedException("Save Fail"); //$NON-NLS-1$
                    } catch (InterruptedException e) {
                        ExceptionHandler.process(e);
                    }
                }
            }
        };
        try {
            progressDialog.run(true, true, runnable);
        } catch (InvocationTargetException e1) {
            ExceptionHandler.process(e1);
        } catch (InterruptedException e1) {
            ExceptionHandler.process(e1);
        }
    }
}

From source file:net.lightbody.bmp.proxy.jetty.http.HttpContext.java

/** Stop the context.
 *///from   www.ja  v  a2 s .c  om
protected void doStop() throws Exception {
    if (_httpServer == null)
        throw new InterruptedException("Destroy called");

    synchronized (this) {
        // Notify the container for the stop
        Thread thread = Thread.currentThread();
        ClassLoader lastContextLoader = thread.getContextClassLoader();
        try {
            if (_loader != null)
                thread.setContextClassLoader(_loader);
            Iterator handlers = _handlers.iterator();
            while (handlers.hasNext()) {
                HttpHandler handler = (HttpHandler) handlers.next();
                if (handler.isStarted()) {
                    try {
                        handler.stop();
                    } catch (Exception e) {
                        log.warn(LogSupport.EXCEPTION, e);
                    }
                }
            }

            if (_requestLog != null)
                _requestLog.stop();
        } finally {
            thread.setContextClassLoader(lastContextLoader);
        }

        // TODO this is a poor test
        if (_loader instanceof ContextLoader) {
            ((ContextLoader) _loader).destroy();
            LogFactory.release(_loader);
        }

        _loader = null;
    }
    _resources.flushCache();
    _resources.stop();
}

From source file:org.wso2.developerstudio.appfactory.ui.views.AppfactoryApplicationListView.java

public boolean executeMavenCommands(File pomFile, IProgressMonitor monitor) throws InterruptedException {

    monitor.worked(10);//from  ww  w .  j  a  va 2 s .com

    try {
        String operationText = Messages.AppfactoryApplicationListView_executeMavenCommands_text;
        monitor.subTask(operationText);
        printInfoLog(operationText);

        InvocationResult result = mavenInstall(pomFile, monitor);

        if (result.getExitCode() != 0) {

            printErrorLog(Messages.AppfactoryApplicationListView_executeMavenCommands_errorlog_text);
        }

        monitor.worked(30);

    } catch (MavenInvocationException e) {

    }

    if (monitor.isCanceled()) {
        throw new InterruptedException(Messages.ImportingCancelled_Error);
    }

    try {

        String operationText = Messages.AppfactoryApplicationListView_executeMavenCommands_text2;
        monitor.subTask(operationText);
        printInfoLog(operationText);

        InvocationResult result = mavenEclipse(pomFile, monitor);

        if (result.getExitCode() != 0) {

            printErrorLog(Messages.AppfactoryApplicationListView_executeMavenCommands_errorlog_text2);
        }
        monitor.worked(20);

    } catch (MavenInvocationException e) {
        monitor.worked(50);
    }

    return true;
}

From source file:io.pyd.synchro.SyncJob.java

protected void listDirRecursive(File directory, Node root, List<Node> accumulator, boolean save,
        List<Node> previousSnapshot) throws InterruptedException, SQLException {

    if (this.interruptRequired) {
        throw new InterruptedException("Interrupt required");
    }//from w w w .jav  a  2 s.c  o  m
    // Logger.getRootLogger().info("Searching " +
    // directory.getAbsolutePath());

    File[] children = directory.listFiles();
    String[] start = getCoreManager().EXCLUDED_FILES_START;
    String[] end = getCoreManager().EXCLUDED_FILES_END;
    if (children != null) {

        for (int i = 0; i < children.length; i++) {

            boolean ignore = false;
            String path = children[i].getName();
            for (int j = 0; j < start.length; j++) {
                if (path.startsWith(start[j])) {
                    ignore = true;
                    break;
                }
            }
            if (ignore) {
                continue;
            }

            for (int j = 0; j < end.length; j++) {
                if (path.endsWith(end[j])) {
                    ignore = true;
                    break;
                }
            }
            if (ignore) {
                continue;
            }

            Node newNode = new Node(Node.NODE_TYPE_ENTRY, path, root);
            if (save) {
                nodeDao.create(newNode);
            }

            String p = children[i].getAbsolutePath().substring(root.getPath(true).length()).replace("\\", "/");
            newNode.setPath(p);
            newNode.properties = nodeDao.getEmptyForeignCollection("properties");
            newNode.setLastModified(new Date(children[i].lastModified()));
            if (children[i].isDirectory()) {
                listDirRecursive(children[i], root, accumulator, save, previousSnapshot);
            } else {
                newNode.addProperty("bytesize", String.valueOf(children[i].length()));
                String md5 = null;

                if (previousSnapshot != null) {
                    // Logger.getRootLogger().info("Searching node in previous snapshot for "
                    // + p);

                    Node previous = ((EhcacheList<Node>) previousSnapshot).get(newNode);
                    if (previous != null) {
                        if (previous.getPath(true).equals(p)) {
                            if (previous.getLastModified().equals(newNode.getLastModified())
                                    && previous.getPropertyValue("bytesize")
                                            .equals(newNode.getPropertyValue("bytesize"))) {
                                md5 = previous.getPropertyValue("md5");
                                // Logger.getRootLogger().info("-- Getting md5 from previous snapshot");
                            }
                        }
                    }
                }

                if (md5 == null) {
                    // Logger.getRootLogger().info("-- Computing new md5");
                    getCoreManager().notifyUser("Indexation", "Indexing " + p, currentJobNodeID);
                    md5 = computeMD5(children[i]);
                }
                newNode.addProperty("md5", md5);
                newNode.setLeaf();
            }
            if (save) {
                nodeDao.update(newNode);

            }
            if (accumulator != null) {
                accumulator.add(newNode);
            }
            long totalMemory = Runtime.getRuntime().totalMemory();
            long currentMemory = Runtime.getRuntime().freeMemory();
            long percent = (currentMemory * 100 / totalMemory);
            // Logger.getRootLogger().info( percent + "%");
            if (percent <= 5) {
                // System.gc();
            }
            freeSomeCPU();
        }
    }
}

From source file:io.pyd.synchro.SyncJob.java

/**
 * Creates snapshot - node tree structure - using saved XML file - file
 * saved by HTTP response input stream/*  www . j  ava 2 s.  c o m*/
 * 
 * @param rootNode
 * @param accumulator
 * @param save
 * @throws Exception
 */
protected void takeRemoteSnapshot(final Node rootNode, final List<Node> accumulator, final boolean save)
        throws Exception {
    long time = System.currentTimeMillis();
    if (save) {
        emptyNodeChildren(rootNode, false);
    }

    // takeRemoteSnapshots only creates a collection of remote nodes
    // with properties stored in local (not managed by DB) properties
    // collection
    // this calls LS remote dir recursive, and will produce a full
    // tree conent of nodes, then - we need to persist!

    takeRemoteSnapshot(rootNode, rootNode, accumulator, save);

    if (interruptRequired) {
        throw new InterruptedException("Interrupt required");
    }

    Logger.getRootLogger().info("Saving nodes");

    if (save) {
        nodeDao.update(rootNode);

        // now we need to persist collection of nodes
        // with steps reproduced from original takeRemoteSnapshot()
        // - adding parent
        // - create
        // - create properties collection
        // - update
        // NOTE: before update, we need to copy properties from local
        // collection to db managed collection!
        nodeDao.callBatchTasks(new Callable<Void>() {
            public Void call() throws Exception {
                for (Node n : accumulator) {
                    // add parent
                    n.setParent(rootNode);
                    nodeDao.create(n);
                    n.properties = nodeDao.getEmptyForeignCollection("properties");
                    n.copyLocalProperties();
                    nodeDao.update(n);
                }
                return null;
            }
        });

    }

    Logger.getRootLogger().info("Nodes saved: " + (System.currentTimeMillis() - time) + " ms");
}

From source file:io.pyd.synchro.SyncJob.java

protected void takeRemoteSnapshot(final Node rootNode, final Node currentFolder, final List<Node> accumulator,
        final boolean save) throws Exception {

    currentFolder.setMaxDepth(3);// w  ww  . j a  v a  2s . co m
    currentFolder.setMaxNodes(1000);
    int d = 3;
    int no = 1000;
    if (currentRepository.getPropertyValue("max_depth") != null) {
        d = new Integer(currentRepository.getPropertyValue("max_depth"));
        if (d == -1)
            no = -1;
        else if (d == 3)
            no = 1000;
        else
            no = 100;
    }
    currentFolder.setMaxDepth(d);
    currentFolder.setMaxNodes(no);

    Logger.getRootLogger().info("Taking remote content for node: " + currentFolder.getPath());

    final List<Node> partial = new ArrayList<Node>();

    // parse file structure to node tree
    URI recursiveLsDirectoryUri = AjxpAPI.getInstance().getRecursiveLsDirectoryUri(currentFolder);
    parseNodesFromStream(getUriContentStream(recursiveLsDirectoryUri), rootNode, partial, save);

    if (interruptRequired) {
        throw new InterruptedException("Interrupt required");
    }

    Logger.getRootLogger().info("Loaded part: " + partial.size());

    for (Node n : partial) {

        if (interruptRequired) {
            throw new InterruptedException("Interrupt required");
        }
        if (!n.isLeaf() && n.isHasChildren()) {
            if (!"".equals(n.getPath()) && !"/".equals(n.getPath())) {
                takeRemoteSnapshot(rootNode, n, accumulator, save);
            }
        }
    }

    if (accumulator != null) {
        accumulator.addAll(partial);
    }

}

From source file:io.pyd.synchro.SyncJob.java

/**
 * Parses remote node structure directly from XML stream using modified
 * SAXparser./* w ww.j a v a  2s.  c  o  m*/
 * 
 * @param remoteTreeFile
 * @param parentNode
 * @param list
 * @param save
 * @throws SynchroOperationException
 * @throws InterruptedException
 */
protected void parseNodesFromStream(InputStream is, final Node parentNode, final List<Node> list,
        final boolean save) throws SynchroOperationException, InterruptedException {

    try {
        XMLReader reader = new XMLReader();
        reader.addHandler("tree", new NodeHandler() {

            @Override
            public void process(StructuredNode node) {

                // check if user wants to stop?
                if (interruptRequired) {
                    return;
                }

                try {
                    org.w3c.dom.Node xmlNode = node.queryXMLNode("/tree");

                    Node entry = new Node(Node.NODE_TYPE_ENTRY, "", parentNode);
                    // init node with properties saved to LOCAL property
                    // collection
                    entry.initFromXmlNode(xmlNode, true);

                    if (list != null) {
                        list.add(entry);
                    }
                } catch (XPathExpressionException e) {
                    // FIXME - how to manage this errors here?

                }
            }
        });
        reader.parse(is);

        if (interruptRequired) {
            throw new InterruptedException("Interrupt required");
        }
        if (list != null) {
            Logger.getRootLogger().info("Parsed " + list.size() + " nodes from stream");
        } else {
            Logger.getRootLogger().info("No items to parse");
        }
    } catch (ParserConfigurationException e) {
        throw new SynchroOperationException(
                "Error during parsing remote node tree structure: " + e.getMessage(), e);
    } catch (SAXException e) {

        throw new SynchroOperationException(
                "Error during parsing remote node tree structure: " + e.getMessage(), e);
    } catch (IOException e) {
        throw new SynchroOperationException(
                "Error during parsing remote node tree structure: " + e.getMessage(), e);
    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (IOException e) {
                // we cannot do here nothing more
            }
        }
    }
}

From source file:net.yacy.peers.Protocol.java

/**
 * Execute solr query against specified target.
 * @param event search event ot feed with results
 * @param solrQuery solr query//  ww w.j ava2  s .  c  om
 * @param offset pagination start indice
 * @param count expected maximum results
 * @param target target peer to query. May be null : in that case, local peer is queried.
 * @param partitions
 * @param blacklist url list to exclude from results
 * @param useSolrFacets when true, use Solr computed facets when possible to update the event navigators counters
 * @param incrementNavigators when true, increment event navigators either with facet counts or with individual results
 * @return the size of results list
 * @throws InterruptedException when interrupt status on calling thread is detected while processing
 */
protected static int solrQuery(final SearchEvent event, final SolrQuery solrQuery, final int offset,
        final int count, final Seed target, final int partitions, final Blacklist blacklist,
        final boolean useSolrFacets, final boolean incrementNavigators) throws InterruptedException {

    //try {System.out.println("*** debug-query *** " + URLDecoder.decode(solrQuery.toString(), "UTF-8"));} catch (UnsupportedEncodingException e) {}

    if (event.query.getQueryGoal().getQueryString(false) == null
            || event.query.getQueryGoal().getQueryString(false).length() == 0) {
        return -1; // we cannot query solr only with word hashes, there is no clear text string
    }
    event.addExpectedRemoteReferences(count);
    if (partitions > 0)
        solrQuery.set("partitions", partitions);
    solrQuery.setStart(offset);
    solrQuery.setRows(count);

    boolean localsearch = target == null || target.equals(event.peers.mySeed());
    Map<String, ReversibleScoreMap<String>> facets = new HashMap<String, ReversibleScoreMap<String>>(
            event.query.facetfields.size());
    Map<String, LinkedHashSet<String>> snippets = new HashMap<String, LinkedHashSet<String>>(); // this will be a list of urlhash-snippet entries
    final QueryResponse[] rsp = new QueryResponse[] { null };
    final SolrDocumentList[] docList = new SolrDocumentList[] { null };
    {// encapsulate expensive solr QueryResponse object
        if (localsearch && !Switchboard.getSwitchboard()
                .getConfigBool(SwitchboardConstants.DEBUG_SEARCH_REMOTE_SOLR_TESTLOCAL, false)) {
            // search the local index
            try {
                SolrConnector sc = event.getQuery().getSegment().fulltext().getDefaultConnector();
                if (!sc.isClosed()) {
                    rsp[0] = sc.getResponseByParams(solrQuery);
                    docList[0] = rsp[0].getResults();
                }
            } catch (final Throwable e) {
                Network.log.info("SEARCH failed (solr), localpeer (" + e.getMessage() + ")", e);
                return -1;
            }
        } else {
            String targetBaseURL = null;
            try {
                final boolean myseed = target == event.peers.mySeed();
                if (myseed) {
                    targetBaseURL = "http://localhost:" + target.getPort();
                } else {
                    final Set<String> ips = target.getIPs();
                    if (ips.isEmpty()) {
                        /* This should not happen : seeds db maintains only seeds with at least one IP */
                        Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName()
                                + " has no known IP address");
                        target.setFlagSolrAvailable(false);
                        return -1;
                    }
                    final String ip = ips.iterator().next();

                    targetBaseURL = target.getPublicURL(ip,
                            Switchboard.getSwitchboard().getConfigBool(
                                    SwitchboardConstants.REMOTESEARCH_HTTPS_PREFERRED,
                                    SwitchboardConstants.REMOTESEARCH_HTTPS_PREFERRED_DEFAULT));
                }
                if (!myseed && !target.getFlagSolrAvailable()) { // skip if peer.dna has flag that last try resulted in error
                    Network.log.info("SEARCH skip (solr), remote Solr interface not accessible, peer="
                            + target.getName());
                    return -1;
                }
                final int solrtimeout = Switchboard.getSwitchboard()
                        .getConfigInt(SwitchboardConstants.FEDERATED_SERVICE_SOLR_INDEXING_TIMEOUT, 6000);
                SolrRequestTask remoteRequest = new SolrRequestTask(solrQuery, targetBaseURL, target, myseed,
                        solrtimeout, rsp, docList);
                remoteRequest.start();
                remoteRequest.join(solrtimeout); // just wait until timeout appears
                if (remoteRequest.isAlive()) {
                    /* Try to free the request thread resources properly */
                    remoteRequest.close();
                    if (remoteRequest.isAlive()) {
                        /* Thread still running : try also with interrupt*/
                        remoteRequest.interrupt();
                    }
                    Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName() + "/"
                            + targetBaseURL + " does not answer (time-out)");
                    target.setFlagSolrAvailable(false || myseed);
                    return -1; // give up, leave remoteRequest abandoned.
                }

                if (rsp[0] == null || docList[0] == null) {
                    Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName() + "/"
                            + targetBaseURL + " returned null");
                    if (!myseed) {
                        if (targetBaseURL.startsWith("https")) {
                            /* First mark https unavailable on this peer before removing anything else */
                            target.setFlagSSLAvailable(false);
                            event.peers.updateConnected(target);
                        } else {
                            target.setFlagSolrAvailable(false);
                        }
                    }
                    return -1;
                }
            } catch (InterruptedException e) {
                /* Current thread might be interrupted by SearchEvent.cleanup() : 
                 * we must not in that case mark the target as not available but rather transmit the exception to the caller (likely RemoteSearch.solrRemoteSearch) */
                throw e;
            } catch (final Throwable e) {
                if (Network.log.isInfo()) {
                    Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName()
                            + (targetBaseURL != null ? "/" + targetBaseURL : "") + " (" + e.getMessage() + ")");
                }
                target.setFlagSolrAvailable(false || localsearch);
                return -1;
            }
        }

        // evaluate facets
        if (useSolrFacets) {
            for (String field : event.query.facetfields) {
                FacetField facet = rsp[0].getFacetField(field);
                ReversibleScoreMap<String> result = new ClusteredScoreMap<String>(
                        UTF8.insensitiveUTF8Comparator);
                List<Count> values = facet == null ? null : facet.getValues();
                if (values == null) {
                    continue;
                }
                for (Count ff : values) {
                    int c = (int) ff.getCount();
                    if (c == 0) {
                        continue;
                    }
                    if (ff.getName().length() == 0) {
                        continue; // facet entry without text is not useful
                    }
                    result.set(ff.getName(), c);
                }
                if (result.size() > 0) {
                    facets.put(field, result);
                }
            }
        }

        // evaluate snippets
        final Map<String, Map<String, List<String>>> rawsnippets = rsp[0].getHighlighting(); // a map from the urlhash to a map with key=field and value = list of snippets
        if (rawsnippets != null) {
            nextsnippet: for (final Map.Entry<String, Map<String, List<String>>> re : rawsnippets.entrySet()) {
                final Map<String, List<String>> rs = re.getValue();
                for (final String field : solrQuery.getHighlightFields()) {
                    if (rs.containsKey(field)) {
                        final List<String> s = rs.get(field);
                        if (s.size() > 0) {
                            final LinkedHashSet<String> ls = new LinkedHashSet<String>();
                            ls.addAll(s);
                            snippets.put(re.getKey(), ls);
                            continue nextsnippet;
                        }
                    }
                }
                // no snippet found :( --we don't assign a value here by default; that can be done as an evaluation outside this method
            }
        }
        rsp[0] = null;
    }

    // evaluate result
    if (docList == null || docList[0].size() == 0) {
        Network.log.info("SEARCH (solr), returned 0 out of 0 documents from "
                + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())) + " query = "
                + solrQuery.toString());
        return 0;
    }

    List<URIMetadataNode> resultContainer = new ArrayList<URIMetadataNode>();
    Network.log.info("SEARCH (solr), returned " + docList[0].size() + " out of " + docList[0].getNumFound()
            + " documents and " + facets.size() + " facets " + facets.keySet().toString() + " from "
            + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())));
    int term = count;
    Collection<SolrInputDocument> docs;
    if (event.addResultsToLocalIndex) { // only needed to store remote results
        docs = new ArrayList<SolrInputDocument>(docList[0].size());
    } else
        docs = null;
    for (final SolrDocument tmpdoc : docList[0]) {
        //System.out.println("***DEBUG*** " + ((String) doc.getFieldValue("sku")));
        if (term-- <= 0) {
            break; // do not process more that requested (in case that evil peers fill us up with rubbish)
        }
        // get one single search result
        if (tmpdoc == null) {
            continue;
        }
        URIMetadataNode urlEntry;
        try {
            urlEntry = new URIMetadataNode(tmpdoc);
        } catch (MalformedURLException ex) {
            continue;
        }

        if (blacklist.isListed(BlacklistType.SEARCH, urlEntry.url())) {
            if (Network.log.isInfo()) {
                if (localsearch) {
                    Network.log.info("local search (solr): filtered blacklisted url "
                            + urlEntry.url().toNormalform(true));
                } else {
                    Network.log.info("remote search (solr): filtered blacklisted url "
                            + urlEntry.url().toNormalform(true) + " from "
                            + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())));
                }
            }
            continue; // block with blacklist
        }

        final String urlRejectReason = Switchboard.getSwitchboard().crawlStacker
                .urlInAcceptedDomain(urlEntry.url());
        if (urlRejectReason != null) {
            if (Network.log.isInfo()) {
                if (localsearch) {
                    Network.log.info("local search (solr): rejected url '" + urlEntry.url().toNormalform(true)
                            + "' (" + urlRejectReason + ")");
                } else {
                    Network.log.info("remote search (solr): rejected url '" + urlEntry.url().toNormalform(true)
                            + "' (" + urlRejectReason + ") from peer " + target.getName());
                }
            }
            continue; // reject url outside of our domain
        }

        // passed all checks, store url
        if (!localsearch) {

            // put the remote documents to the local index. We must convert the solr document to a solr input document:
            if (event.addResultsToLocalIndex) {
                /* Check document size, only if a limit is set on remote documents size allowed to be stored to local index */
                if (checkDocumentSize(tmpdoc, event.getRemoteDocStoredMaxSize() * 1024)) {
                    final SolrInputDocument sid = event.query.getSegment().fulltext().getDefaultConfiguration()
                            .toSolrInputDocument(tmpdoc);

                    // the input document stays untouched because it contains top-level cloned objects
                    docs.add(sid);
                    // will be stored to index, and is a full solr document, can be added to firstseen
                    event.query.getSegment().setFirstSeenTime(urlEntry.hash(),
                            Math.min(urlEntry.moddate().getTime(), System.currentTimeMillis()));
                } else {
                    Network.log.info("Document size greater than " + event.getRemoteDocStoredMaxSize()
                            + " kbytes, excludes it from being stored to local index. Url : "
                            + urlEntry.urlstring());
                }
            }

            // after this conversion we can remove the largest and not used field text_t and synonyms_sxt from the document
            // because that goes into a search cache and would take a lot of memory in the search cache
            //doc.removeFields(CollectionSchema.text_t.getSolrFieldName());
            tmpdoc.removeFields(CollectionSchema.synonyms_sxt.getSolrFieldName());

            ResultURLs.stack(ASCII.String(urlEntry.url().hash()), urlEntry.url().getHost(),
                    event.peers.mySeed().hash.getBytes(), UTF8.getBytes(target.hash), EventOrigin.QUERIES);
        }

        // add the url entry to the checked results
        resultContainer.add(urlEntry);
    }
    final int numFound = (int) docList[0].getNumFound();
    docList[0].clear();
    docList[0] = null;
    if (localsearch) {
        event.addNodes(resultContainer, facets, snippets, true, "localpeer", numFound, incrementNavigators);
        event.addFinalize();
        event.addExpectedRemoteReferences(-count);
        Network.log.info("local search (solr): localpeer sent " + resultContainer.size() + "/" + numFound
                + " references");
    } else {
        if (event.addResultsToLocalIndex) {
            /*
            * Current thread might be interrupted by SearchEvent.cleanup()
             */
            if (Thread.interrupted()) {
                throw new InterruptedException("solrQuery interrupted");
            }
            WriteToLocalIndexThread writeToLocalIndexThread = new WriteToLocalIndexThread(
                    event.query.getSegment(), docs); // will clear docs on return
            writeToLocalIndexThread.start();
        }
        event.addNodes(resultContainer, facets, snippets, false, target.getName() + "/" + target.hash, numFound,
                incrementNavigators);
        event.addFinalize();
        event.addExpectedRemoteReferences(-count);
        Network.log.info("remote search (solr): peer " + target.getName() + " sent " + (resultContainer.size())
                + "/" + numFound + " references");
    }
    return resultContainer.size();
}