Example usage for java.util LinkedHashMap isEmpty

List of usage examples for java.util LinkedHashMap isEmpty

Introduction

In this page you can find the example usage for java.util LinkedHashMap isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this map contains no key-value mappings.

Usage

From source file:org.pircbotx.PircBotX.java

/**
 * Start the bot by connecting to the server. If
 * {@link Configuration#isAutoReconnect()} is true this will continuously
 * reconnect to the server until {@link #stopBotReconnect() } is called or
 * an exception is thrown from connecting
 *
 * @throws IOException if it was not possible to connect to the server.
 * @throws IrcException//from  www.j  a  v a  2s.  c o  m
 */
public void startBot() throws IOException, IrcException {
    //Begin magic
    reconnectStopped = false;
    do {
        //Try to connect to the server, grabbing any exceptions
        LinkedHashMap<InetSocketAddress, Exception> connectExceptions = Maps.newLinkedHashMap();
        try {
            connectAttemptTotal++;
            connectAttempts++;
            connectExceptions.putAll(connect());
        } catch (Exception e) {
            //Initial connect exceptions are returned in the map, this is a more serious error
            log.error("Exception encountered during connect", e);
            connectExceptions.put(new InetSocketAddress(serverHostname, serverPort), e);

            if (!configuration.isAutoReconnect())
                throw new RuntimeException("Exception encountered during connect", e);
        } finally {
            if (!connectExceptions.isEmpty())
                Utils.dispatchEvent(this,
                        new ConnectAttemptFailedEvent(this,
                                configuration.getAutoReconnectAttempts() - connectAttempts,
                                ImmutableMap.copyOf(connectExceptions)));

            //Cleanup if not already called
            synchronized (stateLock) {
                if (state != State.DISCONNECTED)
                    shutdown();
            }
        }

        //No longer connected to the server
        if (!configuration.isAutoReconnect())
            return;
        if (reconnectStopped) {
            log.debug("stopBotReconnect() called, exiting reconnect loop");
            return;
        }
        if (connectAttempts == configuration.getAutoReconnectAttempts()) {
            throw new IOException("Failed to connect to IRC server(s) after " + connectAttempts + " attempts");
        }

        //Optionally pause between attempts, useful if network is temporarily down
        if (configuration.getAutoReconnectDelay() > 0)
            try {
                log.debug("Pausing for {} milliseconds before connecting again",
                        configuration.getAutoReconnectDelay());
                Thread.sleep(configuration.getAutoReconnectDelay());
            } catch (InterruptedException e) {
                throw new RuntimeException("Interrupted while pausing before the next connect attempt", e);
            }
    } while (connectAttempts < configuration.getAutoReconnectAttempts());
}

From source file:org.trnltk.tokenizer.TextTokenizerDefaultTrainingTest.java

@Test
public void shouldValidateDefaultRuleEntries() throws IOException {
    final TokenizationGraph tokenizationGraph = TextTokenizerTrainer.buildDefaultTokenizationGraph(true);

    final TextTokenizer tokenizer = TextTokenizer.newBuilder().blockSize(2).recordStats().strict()
            .graph(tokenizationGraph).build();

    final TokenizerTrainingData defaultTrainingData = TokenizerTrainingData.createDefaultTrainingData();
    for (TokenizerTrainingEntry tokenizerTrainingEntry : defaultTrainingData.getEntries()) {
        final String text = tokenizerTrainingEntry.getText();
        final String tknz = tokenizerTrainingEntry.getTknz();

        final List<Token> tokens = tokenizer.tokenize(text);
        final String join = Joiner.on(" ").join(Iterables.transform(tokens, new Function<Token, String>() {
            @Override//from  ww w  .  ja  va 2s. c o m
            public String apply(org.trnltk.tokenizer.Token input) {
                return input.getSurface();
            }
        }));
        assertThat(tknz.trim(), equalTo(join.trim()));
    }

    final TextTokenizer.TextTokenizerStats stats = tokenizer.getStats();
    final LinkedHashMap<Pair<TextBlockTypeGroup, TextBlockTypeGroup>, Set<MissingTokenizationRuleException>> failMap = stats
            .buildSortedFailMap();

    assertThat(failMap.isEmpty(), equalTo(true));
}

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

@SuppressWarnings("serial")
private AbstractAction createDownloadListAction() {
    return new AbstractAction() {
        @Override/*  ww w .j av a2 s. c  om*/
        public void actionPerformed(ActionEvent e) {
            if (!gui.validateAndSetUI()) {
                gui.popupErrorConfigurationDialog();
                return;
            }
            AsyncTask task = new AsyncTask() {
                @Override
                public Object run() throws Exception {
                    if (stopLogListProcessing)
                        stopLogListProcessing = false;

                    long time = System.currentTimeMillis();
                    showInGuiStarting();

                    gui.downloadListButton.setEnabled(false);
                    // logFolderList.setEnabled(false);
                    gui.logFolderList.setValueIsAdjusting(true);
                    // logFilesList.setEnabled(false);

                    // ->Getting txt list of logs from server
                    showInGuiConnectingToServers();

                    // Map base log folder vs servers presence (space separated list of servers keys)
                    LinkedHashMap<String, String> serversLogPresenceList = new LinkedHashMap<>();
                    // Map FTPFile (log base folder) vs remote path
                    LinkedHashMap<FTPFile, String> retList = new LinkedHashMap<>();

                    // Get list from servers
                    getFromServersBaseLogList(retList, serversLogPresenceList);

                    if (retList.isEmpty()) {
                        gui.msgPanel.writeMessageTextln(I18n.text("Done"));
                        return null;
                    }

                    gui.msgPanel
                            .writeMessageTextln(I18n.textf("Log Folders: %numberoffolders", retList.size()));

                    long timeS1 = System.currentTimeMillis();

                    // Added in order not to show the active log (the last one)
                    orderAndFilterOutTheActiveLog(retList);
                    showInGuiNumberOfLogsFromServers(retList);
                    if (retList.size() == 0) // Abort the rest of processing
                        return null;

                    // ->Removing from already existing LogFolders to LOCAL state
                    showInGuiFiltering();
                    setStateLocalIfNotInPresentServer(retList);

                    if (stopLogListProcessing)
                        return null;

                    // ->Adding new LogFolders
                    LinkedList<LogFolderInfo> existentLogFoldersFromServer = new LinkedList<LogFolderInfo>();
                    LinkedList<LogFolderInfo> newLogFoldersFromServer = new LinkedList<LogFolderInfo>();
                    addTheNewFoldersAnFillTheReturnedExistentAndNewLists(retList, existentLogFoldersFromServer,
                            newLogFoldersFromServer);

                    if (stopLogListProcessing)
                        return null;

                    // ->Getting Log files list from server
                    showInGuiProcessingLogList();
                    LinkedList<LogFolderInfo> tmpLogFolderList = getFromServersCompleteLogList(
                            serversLogPresenceList);

                    showInGuiUpdatingLogsInfo();

                    // Testing for log files from each log folder
                    testingForLogFilesFromEachLogFolderAndFillInfo(tmpLogFolderList);

                    if (stopLogListProcessing)
                        return null;

                    // Updating new and existent log folders
                    testNewReportedLogFoldersForLocalCorrespondent(newLogFoldersFromServer);
                    updateLogFoldersState(existentLogFoldersFromServer);

                    // Updating Files for selected folders
                    updateFilesListGUIForFolderSelectedNonBlocking();

                    NeptusLog.pub().warn("....process list from all servers "
                            + (System.currentTimeMillis() - timeS1) + "ms");

                    showInGuiUpdatingGui();

                    NeptusLog.pub()
                            .warn("....all downloadListAction " + (System.currentTimeMillis() - time) + "ms");
                    showInGuiDone();
                    return true;
                }

                @Override
                public void finish() {
                    stopLogListProcessing = false;

                    gui.logFolderList.setValueIsAdjusting(false);
                    gui.logFolderList.invalidate();
                    gui.logFolderList.revalidate();
                    gui.logFolderList.repaint();
                    gui.logFolderList.setEnabled(true);
                    // logFilesList.invalidate();
                    // logFilesList.revalidate();
                    // logFilesList.repaint();
                    gui.listHandlingProgressBar.setValue(0);
                    gui.listHandlingProgressBar.setIndeterminate(false);
                    gui.listHandlingProgressBar.setString("");
                    gui.logFilesList.setEnabled(true);
                    gui.downloadListButton.setEnabled(true);
                    try {
                        this.getResultOrThrow();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            };

            AsyncWorker.getWorkerThread().postTask(task);
        }
    };
}

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

/**
 * For the given server with serverKey ID, takes his {@link #getBaseLogListFrom(String)}
 * reply as toProcessLogList and fill the serversLogPresenceList for each base log
 * adding the serverKey to the list of presence for that base log.
 * //from w  ww  .java2  s  . c  o m
 * If finalLogList is not null, also adds the missing entries to it.
 * 
 * @param serverKey
 * @param toProcessLogList
 * @param finalLogList
 * @param serversLogPresenceList
 */
private void fillServerPresenceList(String serverKey, LinkedHashMap<FTPFile, String> toProcessLogList,
        LinkedHashMap<FTPFile, String> finalLogList, LinkedHashMap<String, String> serversLogPresenceList) {

    if (toProcessLogList != null && !toProcessLogList.isEmpty()) {
        if (finalLogList == null || finalLogList.isEmpty()) {
            for (String partialUri : toProcessLogList.values()) {
                serversLogPresenceList.put(partialUri, serverKey);
            }
            if (finalLogList != null)
                finalLogList.putAll(toProcessLogList);
        } else {
            for (FTPFile ftpFile : toProcessLogList.keySet()) {
                String val = toProcessLogList.get(ftpFile);
                if (finalLogList.containsValue(val)) {
                    serversLogPresenceList.put(val, serversLogPresenceList.get(val) + " " + serverKey);
                    continue;
                } else {
                    finalLogList.put(ftpFile, val);
                    serversLogPresenceList.put(val, serverKey);
                }
            }
        }
    }
}

From source file:ubic.gemma.core.visualization.ExperimentalDesignVisualizationServiceImpl.java

@Override
public Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> sortVectorDataByDesign(
        Collection<DoubleVectorValueObject> dedVs) {

    // cachedLayouts.clear(); // uncomment FOR DEBUGGING.

    if (dedVs == null) {
        return new HashMap<>(0);
    }// www  .j a va2  s.co m

    Map<Long, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>>> returnedLayouts = new HashMap<>(
            dedVs.size());

    StopWatch timer = new StopWatch();
    timer.start();

    /*
     * This is shared across experiments that might show up in the dedVs; this should be okay...saves computation.
     * This is the only slow part.
     */
    this.prepare(dedVs);

    /*
     * This loop is not a performance issue.
     */
    Map<DoubleVectorValueObject, List<BioAssayValueObject>> newOrderingsForBioAssayDimensions = new HashMap<>();
    for (DoubleVectorValueObject vec : dedVs) {

        if (vec.isReorganized()) {
            continue;
        }

        assert !vec.getBioAssays().isEmpty();

        LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = null;

        if (cachedLayouts.containsKey(vec.getExpressionExperiment().getId())) {
            layout = cachedLayouts.get(vec.getExpressionExperiment().getId());
        } else if (vec.getExpressionExperiment().getClass()
                .isInstance(ExpressionExperimentSubsetValueObject.class)) {
            // subset.
            layout = cachedLayouts.get(((ExpressionExperimentSubsetValueObject) vec.getExpressionExperiment())
                    .getSourceExperiment());
        }

        if (layout == null || layout.isEmpty()) {
            log.error("Did not find cached layout for " + vec.getId());
            continue;
        }

        List<BioAssayValueObject> newOrdering = new ArrayList<>(layout.keySet());

        newOrdering.retainAll(vec.getBioAssays());

        /*
         * This can happen if the vectors are out of whack with the bioassays - e.g. two platforms were used but
         * merging is not done. See bug 3775. Skipping the ordering is not the right thing to do.
         */
        if (newOrdering.isEmpty()) {

            boolean allNaN = this.allNaN(vec);

            if (allNaN) {
                // reordering will have no effect.
                continue;
            }

            /*
             * Add to the layout.
             */
            layout = this.extendLayout(vec, vec.getExpressionExperiment().getId());
            newOrdering = new ArrayList<>(layout.keySet());
            newOrdering.retainAll(vec.getBioAssays());
            assert !newOrdering.isEmpty();
        }

        newOrderingsForBioAssayDimensions.put(vec, newOrdering);

        Map<BioAssayValueObject, Integer> ordering = this.getOrdering(newOrdering);

        Long eeId;
        eeId = vec.getExpressionExperiment().getId(); // might be subset id.

        if (!returnedLayouts.containsKey(eeId)) {
            if (vec.isSliced()) {
                LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> trimmedLayout = new LinkedHashMap<>();

                for (BioAssayValueObject baVo : newOrdering) {
                    trimmedLayout.put(baVo, layout.get(baVo));
                }

                returnedLayouts.put(eeId, trimmedLayout);

            } else {
                returnedLayouts.put(eeId, layout);
            }
        }

        /*
         * Might be a faster way.
         */
        double[] data = vec.getData();
        double[] dol = ArrayUtils.clone(data);

        // assert ordering.size() == data.length : "got " + ordering.size() + " expected " + data.length;

        List<BioAssayValueObject> oldOrdering = vec.getBioAssayDimension().getBioAssays();
        int j = 0;
        if (log.isTraceEnabled())
            log.trace("Old order: " + StringUtils.join(ArrayUtils.toObject(data), ","));
        for (BioAssayValueObject ba : oldOrdering) {

            if (ordering.get(ba) == null) {
                assert Double.isNaN(dol[j]);
                j++;
                continue;
            }

            assert ordering.containsKey(ba);
            assert ordering.get(ba) != null;

            Integer targetIndex = ordering.get(ba);

            data[targetIndex] = dol[j++];

        }
        if (log.isTraceEnabled())
            log.trace("New order: " + StringUtils.join(ArrayUtils.toObject(data), ","));

        vec.setReorganized(true);

    }

    for (DoubleVectorValueObject vec : dedVs) {
        if (vec.getBioAssayDimension().isReordered())
            continue;
        List<BioAssayValueObject> newOrdering = newOrderingsForBioAssayDimensions.get(vec);
        if (newOrdering == null)
            continue; // data was empty, etc.
        vec.getBioAssayDimension().reorder(newOrdering);
    }

    if (timer.getTime() > 1500) {
        log.info("Sort vectors by design: " + timer.getTime() + "ms");
    }

    return returnedLayouts;

}

From source file:ubic.gemma.persistence.service.expression.designElement.CompositeSequenceServiceImpl.java

/**
 * Checks to see if the CompositeSequence exists in any of the array designs. If so, it is internally stored in the
 * collection of composite sequences as a HashSet, preserving order based on insertion.
 *///from w  ww .j av  a 2  s. c  o m
@Override
public Collection<CompositeSequence> findByNamesInArrayDesigns(Collection<String> compositeSequenceNames,
        Collection<ArrayDesign> arrayDesigns) {
    LinkedHashMap<String, CompositeSequence> compositeSequencesMap = new LinkedHashMap<>();

    for (ArrayDesign arrayDesign : arrayDesigns) {
        for (Object obj : compositeSequenceNames) {
            String name = (String) obj;
            name = StringUtils.trim(name);
            AbstractService.log.debug("entered: " + name);
            CompositeSequence cs = this.findByName(arrayDesign, name);
            if (cs != null && !compositeSequencesMap.containsKey(cs.getName())) {
                compositeSequencesMap.put(cs.getName(), cs);
            } else {
                AbstractService.log.warn("Composite sequence " + name + " does not exist.  Discarding ... ");
            }
        }
    }

    if (compositeSequencesMap.isEmpty())
        return null;

    return compositeSequencesMap.values();
}

From source file:ubic.gemma.web.controller.expression.experiment.DEDVController.java

/**
 * Prepare vvo for display on front end. Uses factors and factor values from layouts
 *
 * @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames
 *//*from w  w  w. j a  va2  s.c  o  m*/
private void prepareFactorsForFrontEndDisplay(VisualizationValueObject vvo,
        LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts) {

    if (eeLayouts == null || eeLayouts.isEmpty()) {
        log.warn("No layouts, bail");
        vvo.setFactorNames(null);
        vvo.setFactorValuesToNames(null);
        return;
    }

    LinkedHashSet<ExperimentalFactor> factorNames = getFactorNames(eeLayouts);

    // colours for conditions/factor values bar chart FIXME make continuous maps different.
    Map<ExperimentalFactor, Queue<String>> factorColoursMap = createFactorNameToColoursMap(factorNames);
    String missingValueColour = "#DCDCDC";

    Random random = new Random();

    LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<>();
    // list of maps with entries: key = factorName, value=array of factor values
    // 1 entry per sample
    List<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<>();

    Collection<String> factorsMissingValues = new HashSet<>();

    Collection<BioMaterialValueObject> seenSamples = new HashSet<>(); // if same sample was run more than once on
    // diff platforms.
    Map<Long, FactorValue> fvs = new HashMap<>(); // avoid loading repeatedly.
    Collection<ExperimentalFactor> seenFactors = new HashSet<>();

    for (BioAssayValueObject ba : eeLayouts.keySet()) {

        if (seenSamples.contains(ba.getSample())) {
            continue;
        }
        seenSamples.add(ba.getSample());

        // double should be the factorValue id, defined in
        // ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment,
        // BioAssayDimension)
        LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get(ba);
        LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<>(factorNames.size());

        // this is defensive, should only come into play when there's something messed up with the data.
        // for every factor, add a missing-value entry (guards against missing data messing up the layout)
        for (ExperimentalFactor factor : factorNames) {
            String[] facValAndColour = new String[] { "No value", missingValueColour };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);
        }

        // for each experimental factor, store the name and value
        for (Entry<ExperimentalFactor, Double> pair : factorMap.entrySet()) {
            ExperimentalFactor factor = pair.getKey();
            Double valueOrId = pair.getValue();

            /*
             * the double is only a double because it is meant to hold measurements when the factor is continuous if
             * the factor is categorical, the double value is set to the value's id see
             * ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(
             * ExpressionExperiment, BioAssayDimension)
             */
            if (valueOrId == null || factor.getType() == null
                    || (factor.getType().equals(FactorType.CATEGORICAL)
                            && factor.getFactorValues().isEmpty())) {
                factorsMissingValues.add(getUniqueFactorName(factor));
                continue;
            }

            if (!seenFactors.contains(factor) && factor.getType().equals(FactorType.CATEGORICAL)) {
                for (FactorValue fv : factor.getFactorValues()) {
                    fvs.put(fv.getId(), fv);
                }
            }

            String facValsStr = getFacValsStr(fvs, factor, valueOrId);

            if (!factorToValueNames.containsKey(getUniqueFactorName(factor))) {
                factorToValueNames.put(getUniqueFactorName(factor), new LinkedHashMap<String, String>());
            }
            // assign colour if unassigned or fetch it if already assigned
            String colourString = "";
            if (!factorToValueNames.get(getUniqueFactorName(factor)).containsKey(facValsStr)) {
                if (factorColoursMap.containsKey(factor)) {
                    colourString = factorColoursMap.get(factor).poll();
                }
                if (colourString == null || Objects.equals(colourString, "")) { // ran out of predefined colours
                    colourString = getRandomColour(random);
                }
                factorToValueNames.get(getUniqueFactorName(factor)).put(facValsStr, colourString);
            } else {
                colourString = factorToValueNames.get(getUniqueFactorName(factor)).get(facValsStr);
            }
            String[] facValAndColour = new String[] { facValsStr, colourString };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);

        }
        factorValueMaps.add(factorNamesToValueColourPairs);
    }

    // add missing value entries here so they show up at the end of the legend's value lists
    if (!factorsMissingValues.isEmpty()) {
        for (String factorName : factorsMissingValues) {
            if (!factorToValueNames.containsKey(factorName)) {
                factorToValueNames.put(factorName, new LinkedHashMap<String, String>());
            }
            factorToValueNames.get(factorName).put("No value", missingValueColour);
        }
    }
    vvo.setFactorNames(factorToValueNames); // this is summary of values & colours by factor, used for legend
    vvo.setFactorValuesToNames(factorValueMaps); // this is list of maps for each sample
}