Example usage for java.util EnumMap containsKey

List of usage examples for java.util EnumMap containsKey

Introduction

In this page you can find the example usage for java.util EnumMap containsKey.

Prototype

public boolean containsKey(Object key) 

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:Tutorial.java

public static void main(String[] args) {
    EnumMap<Tutorial, String> map = new EnumMap<Tutorial, String>(Tutorial.class);

    map.put(Tutorial.CSS, "1");
    map.put(Tutorial.Python, "2");
    map.put(Tutorial.PHP, "3");
    map.put(Tutorial.Java, "4");

    System.out.println(map);/*from  w  w  w  .jav  a 2  s. co  m*/

    boolean contains = map.containsKey(Tutorial.CSS);

    System.out.println("Tutorial.CSS has a mapping:" + contains);

    contains = map.containsKey(Tutorial.Javascript);

    System.out.println("Tutorial.Javascript has a mapping:" + contains);

}

From source file:it.units.malelab.sse.MyGeneticAlgorithm.java

private EnumMap<Evaluator.ResultType, Double> mean(List<EnumMap<Evaluator.ResultType, Double>> statsList) {
    EnumMap<Evaluator.ResultType, Double> meanStats = new EnumMap<>(Evaluator.ResultType.class);
    for (Evaluator.ResultType type : Evaluator.ResultType.values()) {
        double s = 0;
        double c = 0;
        for (EnumMap<Evaluator.ResultType, Double> stats : statsList) {
            if (stats.containsKey(type)) {
                s = s + stats.get(type);
                c = c + 1;/*from   w  ww  . ja v  a2s  . c o  m*/
            }
        }
        if (c > 0) {
            meanStats.put(type, s / c);
        }
    }
    return meanStats;
}

From source file:it.units.malelab.sse.MyGeneticAlgorithm.java

@Override
public Population evolve(Population initial, StoppingCondition condition) {
    Population current = initial;/*from   www .  j a  va 2 s  . co m*/
    generationsEvolved = 0;
    while (!condition.isSatisfied(current)) {
        current = nextGeneration(current);
        generationsEvolved++;
        //obtain stats
        List<EnumMap<Evaluator.ResultType, Double>> statsList = new ArrayList<>(current.getPopulationSize());
        Iterator<Chromosome> iterator = current.iterator();
        while (iterator.hasNext()) {
            OperationsChromosome chromosome = (OperationsChromosome) iterator.next();
            EnumMap<Evaluator.ResultType, Double> stats = chromosome.getStats();
            if (stats.containsKey(Evaluator.ResultType.OVERLAPNESS)) {
                statsList.add(stats);
            }
        }
        Collections.sort(statsList, new Comparator<EnumMap<Evaluator.ResultType, Double>>() {
            @Override
            public int compare(EnumMap<Evaluator.ResultType, Double> stats1,
                    EnumMap<Evaluator.ResultType, Double> stats2) {
                return Double.compare(stats1.get(Evaluator.ResultType.OVERLAPNESS),
                        stats2.get(Evaluator.ResultType.OVERLAPNESS));
            }
        });
        EnumMap<Evaluator.ResultType, Double> bestStats = statsList.get(0);
        EnumMap<Evaluator.ResultType, Double> top10Stats = mean(statsList.subList(0, 10));
        EnumMap<Evaluator.ResultType, Double> allStats = mean(statsList);
        System.out.printf("ovp=%5.3f/%5.3f/%5.3f   ", bestStats.get(Evaluator.ResultType.OVERLAPNESS),
                top10Stats.get(Evaluator.ResultType.OVERLAPNESS),
                allStats.get(Evaluator.ResultType.OVERLAPNESS));
        System.out.printf("ops=%4.0f/%4.0f/%4.0f   ", bestStats.get(Evaluator.ResultType.AVG_OPS),
                top10Stats.get(Evaluator.ResultType.AVG_OPS), allStats.get(Evaluator.ResultType.AVG_OPS));
        System.out.printf("mfp=%4.0f/%4.0f/%4.0f   ", bestStats.get(Evaluator.ResultType.AVG_FOOTPRINT),
                top10Stats.get(Evaluator.ResultType.AVG_FOOTPRINT),
                allStats.get(Evaluator.ResultType.AVG_FOOTPRINT));
        System.out.printf("err=%5.3f/%5.3f/%5.3f   ", bestStats.get(Evaluator.ResultType.ERROR_RATIO),
                top10Stats.get(Evaluator.ResultType.ERROR_RATIO),
                allStats.get(Evaluator.ResultType.ERROR_RATIO));
        System.out.printf("size=%3.0f/%3.0f/%3.0f   ", bestStats.get(Evaluator.ResultType.SIZE),
                top10Stats.get(Evaluator.ResultType.SIZE), allStats.get(Evaluator.ResultType.SIZE));
        System.out.printf("evals=%8d\n", evaluator.getEvaluatedCount());
        //System.out.println(evaluator.getErrorCodes());
    }
    return current;
}

From source file:com.glowinteractive.reforger.Item.java

public HashSet<StatKVMap> candidates(EnumMap<Stat, EnumSet<Stat>> mappings) {
    parse();/*from ww w .j  av a  2  s. c o m*/

    final HashSet<StatKVMap> result = new HashSet<StatKVMap>(Stat.TYPE_COUNT);

    for (Stat decrease : Stat.values()) {
        for (Stat increase : Stat.values()) {
            if (_mutableStats.value(increase) == 0 && _mutableStats.value(decrease) != 0
                    && mappings.containsKey(decrease) && mappings.get(decrease).contains(increase)) {
                int delta = Math.round((float) Math.floor(0.4 * _mutableStats.value(decrease)));
                StatKVMap deltaMap = new StatKVMap(decrease, increase, delta);
                result.add(deltaMap);
            }
        }
    }

    return result;
}

From source file:it.unimi.di.big.mg4j.index.cluster.IndexCluster.java

/** Returns a new index cluster.
 * //from w ww  .  j ava2s .  com
 * <p>This method uses the <samp>LOCALINDEX</samp> property to locate the local indices,
 * loads them (passing on <code>randomAccess</code>) and
 * builds a new index cluster using the appropriate implementing subclass.
 * 
 * <p>Note that <code>documentSizes</code> is just passed to the local indices. This can be useful
 * in {@linkplain DocumentalCluster documental clusters}, as it allows local scoring, but it is useless in
 * {@linkplain LexicalCluster lexical clusters}, as scoring is necessarily centralised. In the
 * latter case, the property {@link it.unimi.di.big.mg4j.index.Index.UriKeys#SIZES} can be used to specify a global sizes file (which
 * usually comes from an original global index).
 * 
 * @param basename the basename.
 * @param randomAccess whether the index should be accessible randomly.
 * @param documentSizes if true, document sizes will be loaded (note that sometimes document sizes
 * might be loaded anyway because the compression method for positions requires it).
 * @param queryProperties a map containing associations between {@link it.unimi.di.big.mg4j.index.Index.UriKeys} and values, or <code>null</code>.
 */
@SuppressWarnings("unchecked")
static public Index getInstance(final CharSequence basename, final boolean randomAccess,
        final boolean documentSizes, final EnumMap<UriKeys, String> queryProperties)
        throws ConfigurationException, IOException, ClassNotFoundException, SecurityException,
        URISyntaxException, InstantiationException, IllegalAccessException, InvocationTargetException,
        NoSuchMethodException {
    final Properties properties = new Properties(basename + DiskBasedIndex.PROPERTIES_EXTENSION);
    ClusteringStrategy strategy = null;
    Class<? extends ClusteringStrategy> strategyClass = null;
    if (properties.containsKey(PropertyKeys.STRATEGY))
        strategy = (ClusteringStrategy) BinIO.loadObject(properties.getString(PropertyKeys.STRATEGY));
    else if (properties.containsKey(PropertyKeys.STRATEGYCLASS))
        try {
            strategyClass = (Class<? extends ClusteringStrategy>) MG4JClassParser.getParser()
                    .parse(properties.getString(PropertyKeys.STRATEGYCLASS));
        } catch (ParseException e) {
            throw new RuntimeException(e);
        }
    else
        throw new IllegalArgumentException(
                "Cluster properties must contain either a strategy or a strategy class property");
    final Class<? extends IndexCluster> indexClass = (Class<? extends IndexCluster>) Class
            .forName(properties.getString(Index.PropertyKeys.INDEXCLASS, "(missing index class)"));

    String[] localBasename = properties.getStringArray(PropertyKeys.LOCALINDEX);
    Index[] localIndex = new Index[localBasename.length];
    for (int i = 0; i < localIndex.length; i++)
        localIndex[i] = Index.getInstance(localBasename[i], randomAccess, documentSizes);

    final int numberOfDocuments = properties.getInt(Index.PropertyKeys.DOCUMENTS);
    final IntBigList sizes = queryProperties != null && queryProperties.containsKey(Index.UriKeys.SIZES)
            ? DiskBasedIndex.readSizes(queryProperties.get(Index.UriKeys.SIZES), numberOfDocuments)
            : null;

    if (sizes != null && documentSizes)
        LOGGER.warn(
                "You are loading both local sizes and a global size file specified by the \"size\" properties, which is usually nonsensical");

    boolean hasCounts = true;
    boolean hasPositions = true;
    Payload payload = null;

    for (int i = 0; i < localIndex.length; i++) {
        hasCounts = hasCounts && localIndex[i].hasCounts;
        hasPositions = hasPositions && localIndex[i].hasPositions;

        if (i == 0)
            payload = localIndex[i].payload;
        if ((payload == null) != (localIndex[i].payload == null)
                || payload != null && !payload.compatibleWith(localIndex[i].payload))
            throw new IllegalStateException("The payload specification of index " + localIndex[0]
                    + " is not compatible with that of index " + localIndex[i]);
    }

    // We stem the names of Bloom filters from the index basename.
    BloomFilter<Void>[] termFilter = null;
    if (properties.getBoolean(DocumentalCluster.PropertyKeys.BLOOM)) {
        LOGGER.debug("Loading Bloom filters...");
        termFilter = new BloomFilter[localIndex.length];
        for (int i = 0; i < localIndex.length; i++)
            termFilter[i] = (BloomFilter<Void>) BinIO.loadObject(basename + "-" + i + BLOOM_EXTENSION);
        LOGGER.debug("Completed.");
    }

    // Let us rebuild the strategy in case it's a chained strategy
    if (strategyClass != null) {
        strategy = strategyClass.getConstructor(Index[].class, BloomFilter[].class).newInstance(localIndex,
                termFilter);
    } else {
        if (strategy instanceof ChainedLexicalClusteringStrategy)
            strategy = new ChainedLexicalClusteringStrategy(localIndex, termFilter);
        else if (strategy.numberOfLocalIndices() != localBasename.length)
            throw new IllegalArgumentException("The number of local indices of the strategy ("
                    + localIndex.length + ") and the number of local indices specified by the property file ("
                    + localBasename.length + ") differ");
    }

    if (LexicalCluster.class.isAssignableFrom(indexClass))
        return new LexicalCluster(localIndex, (LexicalClusteringStrategy) strategy, termFilter,
                numberOfDocuments, properties.getInt(Index.PropertyKeys.TERMS),
                properties.getLong(Index.PropertyKeys.POSTINGS),
                properties.getLong(Index.PropertyKeys.OCCURRENCES),
                properties.getInt(Index.PropertyKeys.MAXCOUNT), payload, hasCounts, hasPositions,
                Index.getTermProcessor(properties), properties.getString(Index.PropertyKeys.FIELD), sizes,
                properties);
    else if (DocumentalCluster.class.isAssignableFrom(indexClass)) {
        if (DocumentalConcatenatedCluster.class.isAssignableFrom(indexClass))
            return new DocumentalConcatenatedCluster(localIndex, (DocumentalClusteringStrategy) strategy,
                    properties.getBoolean(IndexCluster.PropertyKeys.FLAT), termFilter, numberOfDocuments,
                    properties.getInt(Index.PropertyKeys.TERMS),
                    properties.getLong(Index.PropertyKeys.POSTINGS),
                    properties.getLong(Index.PropertyKeys.OCCURRENCES),
                    properties.getInt(Index.PropertyKeys.MAXCOUNT), payload, hasCounts, hasPositions,
                    Index.getTermProcessor(properties), properties.getString(Index.PropertyKeys.FIELD), sizes,
                    properties);
        return new DocumentalMergedCluster(localIndex, (DocumentalClusteringStrategy) strategy,
                properties.getBoolean(IndexCluster.PropertyKeys.FLAT), termFilter, numberOfDocuments,
                properties.getInt(Index.PropertyKeys.TERMS), properties.getLong(Index.PropertyKeys.POSTINGS),
                properties.getLong(Index.PropertyKeys.OCCURRENCES),
                properties.getInt(Index.PropertyKeys.MAXCOUNT), payload, hasCounts, hasPositions,
                Index.getTermProcessor(properties), properties.getString(Index.PropertyKeys.FIELD), sizes,
                properties);
    } else
        throw new IllegalArgumentException("Unknown IndexCluster implementation: " + indexClass.getName());

}

From source file:it.unimi.di.big.mg4j.index.DiskBasedIndex.java

/** Returns a new disk-based index, loading exactly the specified parts and using preloaded {@link Properties}.
 * //from w  ww. ja  v a 2s.c o  m
 * @param ioFactory the factory that will be used to perform I/O.
 * @param basename the basename of the index.
 * @param properties the properties obtained from the given basename.
 * @param termMap the term map for this index, or <code>null</code> for no term map.
 * @param prefixMap the prefix map for this index, or <code>null</code> for no prefix map.
 * @param randomAccess whether the index should be accessible randomly (e.g., if it will
 * be possible to call {@link IndexReader#documents(long)} on the index readers returned by the index).
 * @param documentSizes if true, document sizes will be loaded (note that sometimes document sizes
 * might be loaded anyway because the compression method for positions requires it).
 * @param queryProperties a map containing associations between {@link Index.UriKeys} and values, or <code>null</code>.
 */
@SuppressWarnings("resource")
public static Index getInstance(final IOFactory ioFactory, final CharSequence basename, Properties properties,
        final StringMap<? extends CharSequence> termMap, final PrefixMap<? extends CharSequence> prefixMap,
        final boolean randomAccess, final boolean documentSizes, final EnumMap<UriKeys, String> queryProperties)
        throws ClassNotFoundException, IOException, InstantiationException, IllegalAccessException {

    // This could be null if old indices contain SkipIndex
    Class<?> indexClass = null;
    try {
        // Compatibility with previous versions
        indexClass = Class.forName(properties.getString(Index.PropertyKeys.INDEXCLASS, "(missing index class)")
                .replace(".dsi.", ".di."));
    } catch (Exception ignore) {
    }

    final long numberOfDocuments = properties.getLong(Index.PropertyKeys.DOCUMENTS);
    final long numberOfTerms = properties.getLong(Index.PropertyKeys.TERMS);
    final long numberOfPostings = properties.getLong(Index.PropertyKeys.POSTINGS);
    final long numberOfOccurrences = properties.getLong(Index.PropertyKeys.OCCURRENCES, -1);
    final int maxCount = properties.getInt(Index.PropertyKeys.MAXCOUNT, -1);
    final String field = properties.getString(Index.PropertyKeys.FIELD,
            new File(basename.toString()).getName());

    if (termMap != null && termMap.size64() != numberOfTerms)
        throw new IllegalArgumentException("The size of the term map (" + termMap.size64()
                + ") is not equal to the number of terms (" + numberOfTerms + ")");
    if (prefixMap != null && prefixMap.size64() != numberOfTerms)
        throw new IllegalArgumentException("The size of the prefix map (" + prefixMap.size64()
                + ") is not equal to the number of terms (" + numberOfTerms + ")");

    final Payload payload = (Payload) (properties.containsKey(Index.PropertyKeys.PAYLOADCLASS)
            ? Class.forName(properties.getString(Index.PropertyKeys.PAYLOADCLASS)).newInstance()
            : null);

    final int skipQuantum = properties.getInt(BitStreamIndex.PropertyKeys.SKIPQUANTUM, -1);

    final int bufferSize = properties.getInt(BitStreamIndex.PropertyKeys.BUFFERSIZE,
            BitStreamIndex.DEFAULT_BUFFER_SIZE);
    final int offsetStep = queryProperties != null && queryProperties.get(UriKeys.OFFSETSTEP) != null
            ? Integer.parseInt(queryProperties.get(UriKeys.OFFSETSTEP))
            : DEFAULT_OFFSET_STEP;

    final boolean highPerformance = indexClass != null && FileHPIndex.class.isAssignableFrom(indexClass);
    final boolean inMemory = queryProperties != null && queryProperties.containsKey(UriKeys.INMEMORY);
    final TermProcessor termProcessor = Index.getTermProcessor(properties);

    // Load document sizes if forced to do so, or if the pointer/position compression methods make it necessary.
    IntBigList sizes = null;

    if (queryProperties != null && queryProperties.containsKey(UriKeys.SUCCINCTSIZES)
            && ioFactory != IOFactory.FILESYSTEM_FACTORY)
        throw new IllegalArgumentException(
                "Succinct sizes are deprecated and available only using the file system I/O factory.");

    if (QuasiSuccinctIndex.class == indexClass) {
        if (ioFactory != IOFactory.FILESYSTEM_FACTORY && !inMemory)
            throw new IllegalArgumentException(
                    "Memory-mapped quasi-succinct indices require the file system I/O factory.");
        final Map<Component, Coding> flags = CompressionFlags.valueOf(
                properties.getStringArray(Index.PropertyKeys.CODING),
                CompressionFlags.DEFAULT_QUASI_SUCCINCT_INDEX);
        final File pointersFile = new File(basename + POINTERS_EXTENSIONS);
        if (!pointersFile.exists())
            throw new FileNotFoundException("Cannot find pointers file " + pointersFile.getName());

        if (documentSizes) {
            sizes = queryProperties != null && queryProperties.containsKey(UriKeys.SUCCINCTSIZES)
                    ? readSizesSuccinct(basename + DiskBasedIndex.SIZES_EXTENSION, numberOfDocuments)
                    : readSizes(ioFactory, basename + DiskBasedIndex.SIZES_EXTENSION, numberOfDocuments);
            if (sizes.size64() != numberOfDocuments)
                throw new IllegalStateException("The length of the size list (" + sizes.size64()
                        + ") is not equal to the number of documents (" + numberOfDocuments + ")");
        }

        final ByteOrder byteOrder = byteOrder(properties.getString(PropertyKeys.BYTEORDER));
        final boolean hasCounts = flags.containsKey(Component.COUNTS);
        final boolean hasPositions = flags.containsKey(Component.POSITIONS);
        return new QuasiSuccinctIndex(
                inMemory ? loadLongBigList(ioFactory, basename + POINTERS_EXTENSIONS, byteOrder)
                        : ByteBufferLongBigList.map(
                                new FileInputStream(basename + POINTERS_EXTENSIONS).getChannel(), byteOrder,
                                MapMode.READ_ONLY),
                hasCounts
                        ? (inMemory ? loadLongBigList(ioFactory, basename + COUNTS_EXTENSION, byteOrder)
                                : ByteBufferLongBigList.map(
                                        new FileInputStream(basename + COUNTS_EXTENSION).getChannel(),
                                        byteOrder, MapMode.READ_ONLY))
                        : null,
                hasPositions ? (inMemory ? loadLongBigList(ioFactory, basename + POSITIONS_EXTENSION, byteOrder)
                        : ByteBufferLongBigList.map(
                                new FileInputStream(basename + POSITIONS_EXTENSION).getChannel(), byteOrder,
                                MapMode.READ_ONLY))
                        : null,
                numberOfDocuments, numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount, payload,
                Fast.mostSignificantBit(skipQuantum), hasCounts, hasPositions,
                Index.getTermProcessor(properties), field, properties, termMap, prefixMap, sizes,
                DiskBasedIndex.offsets(ioFactory, basename + POINTERS_EXTENSIONS + OFFSETS_POSTFIX,
                        numberOfTerms, offsetStep),
                hasCounts
                        ? DiskBasedIndex.offsets(ioFactory, basename + COUNTS_EXTENSION + OFFSETS_POSTFIX,
                                numberOfTerms, offsetStep)
                        : null,
                hasPositions
                        ? DiskBasedIndex.offsets(ioFactory, basename + POSITIONS_EXTENSION + OFFSETS_POSTFIX,
                                numberOfTerms, offsetStep)
                        : null);

    }

    final Map<Component, Coding> flags = CompressionFlags
            .valueOf(properties.getStringArray(Index.PropertyKeys.CODING), null);

    final Coding frequencyCoding = flags.get(Component.FREQUENCIES);
    final Coding pointerCoding = flags.get(Component.POINTERS);
    final Coding countCoding = flags.get(Component.COUNTS);
    final Coding positionCoding = flags.get(Component.POSITIONS);
    if (countCoding == null && positionCoding != null)
        throw new IllegalArgumentException(
                "Index " + basename + " has positions but no counts (this can't happen)");

    if (payload == null
            && (documentSizes || positionCoding == Coding.GOLOMB || positionCoding == Coding.INTERPOLATIVE)) {
        sizes = queryProperties != null && queryProperties.containsKey(UriKeys.SUCCINCTSIZES)
                ? readSizesSuccinct(basename + DiskBasedIndex.SIZES_EXTENSION, numberOfDocuments)
                : readSizes(basename + DiskBasedIndex.SIZES_EXTENSION, numberOfDocuments);
        if (sizes.size64() != numberOfDocuments)
            throw new IllegalStateException("The length of the size list (" + sizes.size64()
                    + ") is not equal to the number of documents (" + numberOfDocuments + ")");
    }

    final int height = properties.getInt(BitStreamIndex.PropertyKeys.SKIPHEIGHT, -1);
    // Load offsets if forced to do so. Depending on a property, we use the core-memory or the semi-external version.
    final LongBigList offsets = payload == null && randomAccess
            ? offsets(ioFactory, basename + OFFSETS_EXTENSION, numberOfTerms, offsetStep)
            : null;

    final String indexFile = basename + INDEX_EXTENSION;
    if (!ioFactory.exists(indexFile))
        throw new FileNotFoundException("Cannot find index file " + indexFile);

    if (inMemory) {
        /*if ( SqrtSkipIndex.class.isAssignableFrom( indexClass ) )
           return new SqrtSkipInMemoryIndex( BinIO.loadBytes( indexFile.toString() ), 
          numberOfDocuments, numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount, 
          frequencyCoding, pointerCoding, countCoding, positionCoding,
          termProcessor,
          field, properties, termMap, prefixMap, sizes, offsets );*/
        return highPerformance
                ? new InMemoryHPIndex(IOFactories.loadBytes(ioFactory, indexFile),
                        IOFactories.loadBytes(ioFactory, basename + POSITIONS_EXTENSION), numberOfDocuments,
                        numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount, payload,
                        frequencyCoding, pointerCoding, countCoding, positionCoding, skipQuantum, height,
                        termProcessor, field, properties, termMap, prefixMap, sizes, offsets)
                : new InMemoryIndex(IOFactories.loadBytes(ioFactory, indexFile.toString()), numberOfDocuments,
                        numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount, payload,
                        frequencyCoding, pointerCoding, countCoding, positionCoding, skipQuantum, height,
                        termProcessor, field, properties, termMap, prefixMap, sizes, offsets);
    } else if (queryProperties != null && queryProperties.containsKey(UriKeys.MAPPED)) {
        if (ioFactory != IOFactory.FILESYSTEM_FACTORY)
            throw new IllegalArgumentException("Mapped indices require the file system I/O factory.");
        final File positionsFile = new File(basename + POSITIONS_EXTENSION);
        final ByteBufferInputStream index = ByteBufferInputStream
                .map(new FileInputStream(indexFile).getChannel(), MapMode.READ_ONLY);
        return highPerformance
                ? new MemoryMappedHPIndex(index,
                        ByteBufferInputStream.map(new FileInputStream(positionsFile).getChannel(),
                                MapMode.READ_ONLY),
                        numberOfDocuments, numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount,
                        payload, frequencyCoding, pointerCoding, countCoding, positionCoding, skipQuantum,
                        height, termProcessor, field, properties, termMap, prefixMap, sizes, offsets)
                : new MemoryMappedIndex(index, numberOfDocuments, numberOfTerms, numberOfPostings,
                        numberOfOccurrences, maxCount, payload, frequencyCoding, pointerCoding, countCoding,
                        positionCoding, skipQuantum, height, termProcessor, field, properties, termMap,
                        prefixMap, sizes, offsets);

    }
    /*if ( SqrtSkipIndex.class.isAssignableFrom( indexClass ) )
       return new SqrtSkipFileIndex( basename.toString(), 
    numberOfDocuments, numberOfTerms, numberOfPostings, numberOfOccurrences, maxCount, 
    frequencyCoding, pointerCoding, countCoding, positionCoding,
    termProcessor,
    field, properties, termMap, prefixMap, sizes, offsets, indexFile );*/

    return highPerformance
            ? new FileHPIndex(basename.toString(), numberOfDocuments, numberOfTerms, numberOfPostings,
                    numberOfOccurrences, maxCount, payload, frequencyCoding, pointerCoding, countCoding,
                    positionCoding, skipQuantum, height, bufferSize, termProcessor, field, properties, termMap,
                    prefixMap, sizes, offsets)
            : new FileIndex(ioFactory, basename.toString(), numberOfDocuments, numberOfTerms, numberOfPostings,
                    numberOfOccurrences, maxCount, payload, frequencyCoding, pointerCoding, countCoding,
                    positionCoding, skipQuantum, height, bufferSize, termProcessor, field, properties, termMap,
                    prefixMap, sizes, offsets);

}

From source file:org.apache.metron.utils.PcapInspector.java

public static void main(String... argv) throws IOException {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
    CommandLine cli = InspectorOptions.parse(new PosixParser(), otherArgs);
    Path inputPath = new Path(InspectorOptions.INPUT.get(cli));
    int n = -1;/*from   www  . j  a  v a  2s .c o m*/
    if (InspectorOptions.NUM.has(cli)) {
        n = Integer.parseInt(InspectorOptions.NUM.get(cli));
    }
    SequenceFile.Reader reader = new SequenceFile.Reader(new Configuration(),
            SequenceFile.Reader.file(inputPath));
    LongWritable key = new LongWritable();
    BytesWritable value = new BytesWritable();

    for (int i = 0; (n < 0 || i < n) && reader.next(key, value); ++i) {
        long millis = Long.divideUnsigned(key.get(), 1000000);
        String ts = DATE_FORMAT.format(new Date(millis));
        for (PacketInfo pi : PcapHelper.toPacketInfo(value.copyBytes())) {
            EnumMap<Constants.Fields, Object> result = PcapHelper.packetToFields(pi);
            List<String> fieldResults = new ArrayList<String>() {
                {
                    add("TS: " + ts);
                }
            };
            for (Constants.Fields field : Constants.Fields.values()) {
                if (result.containsKey(field)) {
                    fieldResults.add(field.getName() + ": " + result.get(field));
                }
            }
            System.out.println(Joiner.on(",").join(fieldResults));
        }
    }
}

From source file:org.jgrades.config.service.UserPreferencesUpdater.java

private void updateRoleData(EnumMap<JgRole, RoleDetails> updatedRoles,
        EnumMap<JgRole, RoleDetails> persistRoles) throws IllegalAccessException {
    if (!persistRoles.containsKey(JgRole.ADMINISTRATOR)
            && !CollectionUtils.isEqualCollection(updatedRoles.keySet(), persistRoles.keySet())) {
        throw new UserPreferencesException("Roles cannot be modified by user itself");
    }// w  ww .j  a v a 2  s .  c  o m
    Set<Map.Entry<JgRole, RoleDetails>> entries = updatedRoles.entrySet();
    for (Map.Entry<JgRole, RoleDetails> entry : entries) {
        mapNewValues(entry.getValue().getClass(), updatedRoles.get(entry.getKey()), entry.getValue());
    }

}

From source file:org.sonar.java.checks.verifier.CheckVerifier.java

private static void updateEndLine(int expectedLine, EnumMap<IssueAttribute, String> attr) {
    if (attr.containsKey(IssueAttribute.END_LINE)) {
        String endLineStr = attr.get(IssueAttribute.END_LINE);
        if (endLineStr.charAt(0) == '+') {
            int endLine = Integer.parseInt(endLineStr);
            attr.put(IssueAttribute.END_LINE, Integer.toString(expectedLine + endLine));
        } else {//from ww w.  j  ava 2  s .  c o  m
            Fail.fail("endLine attribute should be relative to the line and must be +N with N integer");
        }
    }
}

From source file:sg.ncl.MainController.java

@RequestMapping(value = "/teams/apply_team", method = RequestMethod.POST)
public String checkApplyTeamInfo(@Valid TeamPageApplyTeamForm teamPageApplyTeamForm,
        BindingResult bindingResult, HttpSession session, final RedirectAttributes redirectAttributes)
        throws WebServiceRuntimeException {

    final String LOG_PREFIX = "Existing user apply for new team: {}";

    if (bindingResult.hasErrors()) {
        log.warn(LOG_PREFIX, "Application form error " + teamPageApplyTeamForm.toString());
        return "team_page_apply_team";
    }//w  w  w.j a  v  a 2s.  co  m
    // log data to ensure data has been parsed
    log.debug(LOG_PREFIX, properties.getRegisterRequestToApplyTeam(session.getAttribute("id").toString()));
    log.info(LOG_PREFIX, teamPageApplyTeamForm.toString());

    JSONObject mainObject = new JSONObject();
    JSONObject teamFields = new JSONObject();
    mainObject.put("team", teamFields);
    teamFields.put("name", teamPageApplyTeamForm.getTeamName());
    teamFields.put(DESCRIPTION, teamPageApplyTeamForm.getTeamDescription());
    teamFields.put(WEBSITE, teamPageApplyTeamForm.getTeamWebsite());
    teamFields.put(ORGANISATION_TYPE, teamPageApplyTeamForm.getTeamOrganizationType());
    teamFields.put(VISIBILITY, teamPageApplyTeamForm.getIsPublic());
    teamFields.put(IS_CLASS, teamPageApplyTeamForm.getIsClass());

    String nclUserId = session.getAttribute("id").toString();

    HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString());
    ResponseEntity response;

    try {
        response = restTemplate.exchange(properties.getRegisterRequestToApplyTeam(nclUserId), HttpMethod.POST,
                request, String.class);
        String responseBody = response.getBody().toString();

        if (RestUtil.isError(response.getStatusCode())) {
            // prepare the exception mapping
            EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class);
            exceptionMessageMap.put(USER_ID_NULL_OR_EMPTY_EXCEPTION, "User id is null or empty ");
            exceptionMessageMap.put(TEAM_NAME_NULL_OR_EMPTY_EXCEPTION, "Team name is null or empty ");
            exceptionMessageMap.put(USER_NOT_FOUND_EXCEPTION, "User not found");
            exceptionMessageMap.put(TEAM_NAME_ALREADY_EXISTS_EXCEPTION, "Team name already exists");
            exceptionMessageMap.put(INVALID_TEAM_NAME_EXCEPTION, "Team name contains invalid characters");
            exceptionMessageMap.put(TEAM_MEMBER_ALREADY_EXISTS_EXCEPTION, "Team member already exists");
            exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Connection to adapter failed");
            exceptionMessageMap.put(ADAPTER_INTERNAL_ERROR_EXCEPTION, "Internal server error on adapter");
            exceptionMessageMap.put(DETERLAB_OPERATION_FAILED_EXCEPTION, "Operation failed on DeterLab");

            MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class);
            ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError());

            final String errorMessage = exceptionMessageMap.containsKey(exceptionState) ? error.getMessage()
                    : ERR_SERVER_OVERLOAD;

            log.warn(LOG_PREFIX, responseBody);
            redirectAttributes.addFlashAttribute(MESSAGE, errorMessage);
            return "redirect:/teams/apply_team";

        } else {
            // no errors, everything ok
            log.info(LOG_PREFIX, "Application for team " + teamPageApplyTeamForm.getTeamName() + " submitted");
            return "redirect:/teams/team_application_submitted";
        }

    } catch (ResourceAccessException | IOException e) {
        log.error(LOG_PREFIX, e);
        throw new WebServiceRuntimeException(e.getMessage());
    }
}