Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:com.github.horrorho.liquiddonkey.settings.commandline.CommandLineOptions.java

static LinkedHashMap<Property, Option> propertyToOption(String itemTypes) {
    LinkedHashMap<Property, Option> options = new LinkedHashMap<>();

    options.put(FILE_OUTPUT_DIRECTORY, new Option("o", "output", true, "Output folder."));

    options.put(FILE_COMBINED,//  w  w  w. j  av a 2s  . co  m
            new Option("c", "combined", false, "Do not separate each snapshot into its own folder."));

    options.put(SELECTION_UDID,
            Option.builder("u").longOpt("udid")
                    .desc("Download the backup/s with the specified UDID/s. "
                            + "Will match partial UDIDs. Leave empty to download all.")
                    .argName("hex").hasArgs().optionalArg(true).build());

    options.put(SELECTION_SNAPSHOT, Option.builder("s").longOpt("snapshot")
            .desc("Only download data in the snapshot/s specified.\n"
                    + "Negative numbers indicate relative positions from newest backup "
                    + "with -1 being the newest, -2 second newest, etc.")
            .argName("int").hasArgs().build());

    options.put(FILTER_ITEM_TYPES,
            Option.builder(null).longOpt("item-types")
                    .desc("Only download the specified item type/s:\n" + itemTypes).argName("item_type")
                    .hasArgs().build());

    options.put(FILTER_DOMAIN,
            Option.builder("d").longOpt("domain")
                    .desc("Limit files to those within the specified application domain/s.").argName("str")
                    .hasArgs().build());

    options.put(FILTER_RELATIVE_PATH, Option.builder("r").longOpt("relative-path")
            .desc("Limit files to those with the specified relative path/s").argName("str").hasArgs().build());

    options.put(FILTER_EXTENSION, Option.builder("e").longOpt("extension")
            .desc("Limit files to those with the specified extension/s.").argName("str").hasArgs().build());

    options.put(FILTER_DATE_MIN,
            Option.builder().longOpt("min-date")
                    .desc("Minimum last-modified timestamp, ISO format date. E.g. 2000-12-31.").argName("date")
                    .hasArgs().build());

    options.put(FILTER_DATE_MAX,
            Option.builder().longOpt("max-date")
                    .desc("Maximum last-modified timestamp, ISO format date. E.g. 2000-12-31.").argName("date")
                    .hasArgs().build());

    options.put(FILTER_SIZE_MIN, Option.builder().longOpt("min-size").desc("Minimum size in kilobytes.")
            .argName("Kb").hasArgs().build());

    options.put(FILTER_SIZE_MAX, Option.builder().longOpt("max-size").desc("Maximum size in kilobytes.")
            .argName("Kb").hasArgs().build());

    options.put(ENGINE_FORCE_OVERWRITE,
            new Option("f", "force", false, "Download files regardless of whether a local version exists."));

    options.put(ENGINE_PERSISTENT, new Option("p", "persistent", false,
            "More persistent in the handling of network errors, for unstable connections."));

    options.put(ENGINE_AGGRESSIVE, new Option("a", "aggressive", false, "Aggressive retrieval tactics."));

    options.put(ENGINE_THREAD_COUNT, Option.builder("t").longOpt("threads")
            .desc("The maximum number of concurrent threads.").argName("int").hasArgs().build());

    options.put(HTTP_RELAX_SSL,
            new Option(null, "relax-ssl", false, "Relaxed SSL verification, for SSL validation errors."));

    options.put(DEBUG_REPORT, new Option("w", "report", false, "Write out rudimentary reports."));

    options.put(DEBUG_PRINT_STACK_TRACE,
            new Option("x", "stack-trace", false, "Print stack trace on errors, useful for debugging."));

    options.put(ENGINE_DUMP_TOKEN, new Option(null, "token", false, "Output authentication token and exit."));

    options.put(COMMAND_LINE_HELP, new Option(null, "help", false, "Display this help and exit."));

    options.put(COMMAND_LINE_VERSION,
            new Option(null, "version", false, "Output version information and exit."));

    //        options.put(FILE_FLAT,
    //                new Option("i", "--itunes-style", false, "Download files to iTunes style format."));
    return options;
}

From source file:Main.java

/**
 * Sort a map by supplied comparator logic.
 *
 * @return new instance of {@link LinkedHashMap} contained sorted entries of supplied map.
 * @author Maxim Veksler//from w w w .j  a va  2  s.  co  m
 */
public static <K, V> LinkedHashMap<K, V> sortMap(final Map<K, V> map,
        final Comparator<Map.Entry<K, V>> comparator) {
    // Convert the map into a list of key,value pairs.
    List<Map.Entry<K, V>> mapEntries = new LinkedList<Map.Entry<K, V>>(map.entrySet());

    // Sort the converted list according to supplied comparator.
    Collections.sort(mapEntries, comparator);

    // Build a new ordered map, containing the same entries as the old map.
    LinkedHashMap<K, V> result = new LinkedHashMap<K, V>(map.size() + (map.size() / 20));
    for (Map.Entry<K, V> entry : mapEntries) {
        // We iterate on the mapEntries list which is sorted by the comparator putting new entries into
        // the targeted result which is a sorted map.
        result.put(entry.getKey(), entry.getValue());
    }

    return result;
}

From source file:de.iew.framework.security.access.RequestMapbuilder.java

/**
 * Builds a spring security compatibly request map ({@link java.util.LinkedHashMap} data structure.
 *
 * @param requestMapEntries the request map entries
 * @return the request map/*from www .j a v  a 2s.c  o  m*/
 */
public static LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> buildRequestConfigAttributeMap(
        Collection<RequestMapEntry> requestMapEntries) {
    // Calculate the initial capacity for our LinkedHashMap
    float requestmapEntrySize = (float) requestMapEntries.size();
    int mapInitialCapacity = (int) Math.ceil(requestmapEntrySize / REQUEST_MAP_LOAD_FACTOR);

    if (log.isDebugEnabled()) {
        log.debug("Initialisiere die LinkedHashMap mit einer Kapazitt von " + mapInitialCapacity
                + " Eintrgen.");
    }

    LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> requestMap = new LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>>(
            mapInitialCapacity);

    for (RequestMapEntry requestMapEntry : requestMapEntries) {
        requestMap.put(requestMapEntry.getRequestMatcher(), requestMapEntry.getConfigAttributes());
    }

    return requestMap;
}

From source file:Main.java

/**
 * Builds map from list of strings/*from  ww w .jav  a2  s.c om*/
 *
 * @param args key-value pairs for build a map. Must be a multiple of 2
 * @return Result map. If args not multiple of 2, last argument will be ignored
 */
public static Map<String, Object> mapFrom(Object... args) {
    if (args.length % 2 != 0) {
        Log.w("VKUtil", "Params must be paired. Last one is ignored");
    }
    LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>(args.length / 2);
    for (int i = 0; i + 1 < args.length; i += 2) {
        if (!(args[i] instanceof String))
            Log.e("VK SDK", "Error while using mapFrom",
                    new InvalidParameterSpecException("Key must be string"));
        result.put((String) args[i], args[i + 1]);
    }
    return result;
}

From source file:de.tudarmstadt.ukp.dkpro.c4corpus.hadoop.statistics.vocabulary.TopNWordsCorrelation.java

public static LinkedHashMap<String, Integer> loadCorpusToRankedVocabulary(InputStream corpus)
        throws IOException {
    LinkedHashMap<String, Integer> result = new LinkedHashMap<>();

    LineIterator lineIterator = IOUtils.lineIterator(corpus, "utf-8");
    int counter = 0;
    while (lineIterator.hasNext()) {
        String line = lineIterator.next();

        String word = line.split("\\s+")[0];

        result.put(word, counter);
        counter++;//w  w w.ja v a2s .  c o m
    }

    return result;
}

From source file:com.atinternet.tracker.LifeCycle.java

/**
 * Get the object which contains lifecycle metrics
 *
 * @return Closure/*from   w ww .  j av a 2  s  . c  o  m*/
 */
static Closure getMetrics(final SharedPreferences preferences) {
    return new Closure() {
        @Override
        public String execute() {
            try {
                LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();

                // fs
                map.put("fs", preferences.getBoolean(FIRST_SESSION, false) ? 1 : 0);

                // fsau
                map.put("fsau", preferences.getBoolean(FIRST_SESSION_AFTER_UPDATE, false) ? 1 : 0);

                if (!TextUtils.isEmpty(preferences.getString(FIRST_SESSION_DATE_AFTER_UPDATE, ""))) {
                    map.put("scsu", preferences.getInt(SESSION_COUNT_SINCE_UPDATE, 0));
                    map.put("fsdau",
                            Integer.parseInt(preferences.getString(FIRST_SESSION_DATE_AFTER_UPDATE, "")));
                    map.put("dsu", preferences.getInt(DAYS_SINCE_UPDATE, 0));
                }

                map.put("sc", preferences.getInt(SESSION_COUNT, 0));
                map.put("fsd", Integer.parseInt(preferences.getString(FIRST_SESSION_DATE, "")));
                map.put("dsls", preferences.getInt(DAYS_SINCE_LAST_SESSION, 0));
                map.put("dsfs", preferences.getInt(DAYS_SINCE_FIRST_SESSION, 0));
                map.put("sessionId", sessionId);

                return new JSONObject().put("lifecycle", new JSONObject(map)).toString();
            } catch (JSONException e) {
                e.printStackTrace();
            }
            return "";
        }
    };
}

From source file:com.netflix.dyno.contrib.consul.ConsulHelper.java

public static Map<String, String> getMetadata(List<String> tags) {
    LinkedHashMap<String, String> metadata = new LinkedHashMap<>();
    if (tags != null) {
        for (String tag : tags) {
            String[] parts = StringUtils.split(tag, "=");
            switch (parts.length) {
            case 0:
                break;
            case 1:
                metadata.put(parts[0], parts[0]);
                break;
            case 2:
                metadata.put(parts[0], parts[1]);
                break;
            default:
                String[] end = Arrays.copyOfRange(parts, 1, parts.length);
                metadata.put(parts[0], StringUtils.join(end, "="));
                break;
            }//w  ww. jav  a2s.c om

        }
    }

    return metadata;
}

From source file:com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyParameterSensitivity.java

/**
 * Constructor from a simple sensitivity and a currency.
 * @param single The Simple parameter sensitivity
 * @param ccy The currency.//from   w  ww .j  a  v  a  2 s.com
 * @return The multiple currency sensitivity.
 */
public static MultipleCurrencyParameterSensitivity of(final SimpleParameterSensitivity single,
        final Currency ccy) {
    final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> sensi = new LinkedHashMap<>();
    for (final String name : single.getAllNames()) {
        sensi.put(new ObjectsPair<>(name, ccy), single.getSensitivity(name));
    }
    return MultipleCurrencyParameterSensitivity.of(sensi);
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcMetastoreUtil.java

public static LinkedHashMap<String, JdbcTypeInfo> getDiff(LinkedHashMap<String, JdbcTypeInfo> oldState,
        LinkedHashMap<String, JdbcTypeInfo> newState) throws JdbcStageCheckedException {
    LinkedHashMap<String, JdbcTypeInfo> columnDiff = new LinkedHashMap<>();
    for (Map.Entry<String, JdbcTypeInfo> entry : newState.entrySet()) {
        String columnName = entry.getKey();
        JdbcTypeInfo columnTypeInfo = entry.getValue();
        if (!oldState.containsKey(columnName)) {
            columnDiff.put(columnName, columnTypeInfo);
        } else if (!oldState.get(columnName).equals(columnTypeInfo)) {
            throw new JdbcStageCheckedException(JdbcErrors.JDBC_303, columnName,
                    oldState.get(columnName).toString(), columnTypeInfo.toString());
        }//ww w .  j a v  a2s  .  c o m
    }
    return columnDiff;
}

From source file:com.mongodb.hadoop.pig.BSONStorage.java

public static Object getTypeForBSON(Object o, ResourceSchema.ResourceFieldSchema field) throws IOException {
    byte dataType = field != null ? field.getType() : DataType.UNKNOWN;
    ResourceSchema s = null;//from w w w .  j a  v  a 2 s .  co  m
    if (field == null) {
        if (o instanceof Map) {
            dataType = DataType.MAP;
        } else if (o instanceof List) {
            dataType = DataType.BAG;
        } else {
            dataType = DataType.UNKNOWN;
        }
    } else {
        s = field.getSchema();
        if (dataType == DataType.UNKNOWN) {
            if (o instanceof Map)
                dataType = DataType.MAP;
            if (o instanceof List)
                dataType = DataType.BAG;
        }
    }

    if (dataType == DataType.BYTEARRAY && o instanceof Map) {
        dataType = DataType.MAP;
    }

    switch (dataType) {
    case DataType.NULL:
        return null;
    case DataType.INTEGER:
    case DataType.LONG:
    case DataType.FLOAT:
    case DataType.DOUBLE:
        return o;
    case DataType.BYTEARRAY:
        return o.toString();
    case DataType.CHARARRAY:
        return (String) o;

    // Given a TUPLE, create a Map so BSONEncoder will eat it
    case DataType.TUPLE:
        if (s == null) {
            throw new IOException("Schemas must be fully specified to use "
                    + "this storage function.  No schema found for field " + field.getName());
        }
        ResourceSchema.ResourceFieldSchema[] fs = s.getFields();
        LinkedHashMap m = new java.util.LinkedHashMap();
        for (int j = 0; j < fs.length; j++) {
            m.put(fs[j].getName(), getTypeForBSON(((Tuple) o).get(j), fs[j]));
        }
        return m;

    // Given a BAG, create an Array so BSONEnconder will eat it.
    case DataType.BAG:
        if (s == null) {
            throw new IOException("Schemas must be fully specified to use "
                    + "this storage function.  No schema found for field " + field.getName());
        }
        fs = s.getFields();
        if (fs.length != 1 || fs[0].getType() != DataType.TUPLE) {
            throw new IOException("Found a bag without a tuple " + "inside!");
        }
        // Drill down the next level to the tuple's schema.
        s = fs[0].getSchema();
        if (s == null) {
            throw new IOException("Schemas must be fully specified to use "
                    + "this storage function.  No schema found for field " + field.getName());
        }
        fs = s.getFields();

        ArrayList a = new ArrayList<Map>();
        for (Tuple t : (DataBag) o) {
            LinkedHashMap ma = new java.util.LinkedHashMap();
            for (int j = 0; j < fs.length; j++) {
                ma.put(fs[j].getName(), ((Tuple) t).get(j));
            }
            a.add(ma);
        }

        return a;
    case DataType.MAP:
        Map map = (Map) o;
        Map<String, Object> out = new HashMap<String, Object>(map.size());
        for (Object key : map.keySet()) {
            out.put(key.toString(), getTypeForBSON(map.get(key), null));
        }
        return out;
    default:
        return o;
    }
}