Example usage for java.util LinkedHashSet LinkedHashSet

List of usage examples for java.util LinkedHashSet LinkedHashSet

Introduction

In this page you can find the example usage for java.util LinkedHashSet LinkedHashSet.

Prototype

public LinkedHashSet(Collection<? extends E> c) 

Source Link

Document

Constructs a new linked hash set with the same elements as the specified collection.

Usage

From source file:Main.java

public static void main(String[] args) {

    LinkedHashSet linkedHashSet = new LinkedHashSet(10);

    linkedHashSet.add("1");
    linkedHashSet.add("2");
    linkedHashSet.add("java2s.com");

    Object[] objArray = linkedHashSet.toArray();
    System.out.println(Arrays.toString(objArray));
}

From source file:com.github.ansell.shp.SHPDump.java

public static void main(String... args) throws Exception {
    final OptionParser parser = new OptionParser();

    final OptionSpec<Void> help = parser.accepts("help").forHelp();
    final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required()
            .describedAs("The input SHP file");
    final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required()
            .describedAs("The output directory to use for debugging files");
    final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class)
            .defaultsTo("shp-debug").describedAs("The output prefix to use for debugging files");
    final OptionSpec<File> outputMappingTemplate = parser.accepts("output-mapping").withRequiredArg()
            .ofType(File.class).describedAs("The output mapping template file if it needs to be generated.");
    final OptionSpec<Integer> resolution = parser.accepts("resolution").withRequiredArg().ofType(Integer.class)
            .defaultsTo(2048).describedAs("The output image file resolution");
    final OptionSpec<String> format = parser.accepts("format").withRequiredArg().ofType(String.class)
            .defaultsTo("png").describedAs("The output image format");
    final OptionSpec<String> removeIfEmpty = parser.accepts("remove-if-empty").withRequiredArg()
            .ofType(String.class).describedAs(
                    "The name of an attribute to remove if its value is empty before outputting the resulting shapefile. Use multiple times to specify multiple fields to check");

    OptionSet options = null;//from w w w  .ja  v  a 2  s  . com

    try {
        options = parser.parse(args);
    } catch (final OptionException e) {
        System.out.println(e.getMessage());
        parser.printHelpOn(System.out);
        throw e;
    }

    if (options.has(help)) {
        parser.printHelpOn(System.out);
        return;
    }

    final Path inputPath = input.value(options).toPath();
    if (!Files.exists(inputPath)) {
        throw new FileNotFoundException("Could not find input SHP file: " + inputPath.toString());
    }

    final Path outputPath = output.value(options).toPath();
    if (!Files.exists(outputPath)) {
        throw new FileNotFoundException("Output directory does not exist: " + outputPath.toString());
    }

    final Path outputMappingPath = options.has(outputMappingTemplate)
            ? outputMappingTemplate.value(options).toPath()
            : null;
    if (options.has(outputMappingTemplate) && Files.exists(outputMappingPath)) {
        throw new FileNotFoundException(
                "Output mapping template file already exists: " + outputMappingPath.toString());
    }

    final Set<String> filterFields = ConcurrentHashMap.newKeySet();
    if (options.has(removeIfEmpty)) {
        for (String nextFilterField : removeIfEmpty.values(options)) {
            System.out.println("Will filter field if empty value found: " + nextFilterField);
            filterFields.add(nextFilterField);
        }
    }

    if (!filterFields.isEmpty()) {
        System.out.println("Full set of filter fields: " + filterFields);
    }

    final String prefix = outputPrefix.value(options);

    FileDataStore store = FileDataStoreFinder.getDataStore(inputPath.toFile());

    if (store == null) {
        throw new RuntimeException("Could not read the given input as an ESRI Shapefile: "
                + inputPath.toAbsolutePath().toString());
    }

    for (String typeName : new LinkedHashSet<>(Arrays.asList(store.getTypeNames()))) {
        System.out.println("");
        System.out.println("Type: " + typeName);
        SimpleFeatureSource featureSource = store.getFeatureSource(typeName);
        SimpleFeatureType schema = featureSource.getSchema();

        Name outputSchemaName = new NameImpl(schema.getName().getNamespaceURI(),
                schema.getName().getLocalPart().replace(" ", "").replace("%20", ""));
        System.out.println("Replacing name on schema: " + schema.getName() + " with " + outputSchemaName);
        SimpleFeatureType outputSchema = SHPUtils.changeSchemaName(schema, outputSchemaName);

        List<String> attributeList = new ArrayList<>();
        for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
            System.out.println("Attribute: " + attribute.getName().toString());
            attributeList.add(attribute.getName().toString());
        }
        CsvSchema csvSchema = CSVUtil.buildSchema(attributeList);

        SimpleFeatureCollection collection = featureSource.getFeatures();
        int featureCount = 0;
        Path nextCSVFile = outputPath.resolve(prefix + ".csv");
        Path nextSummaryCSVFile = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-Summary.csv");
        List<SimpleFeature> outputFeatureList = new CopyOnWriteArrayList<>();

        try (SimpleFeatureIterator iterator = collection.features();
                Writer bufferedWriter = Files.newBufferedWriter(nextCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                SequenceWriter csv = CSVUtil.newCSVWriter(bufferedWriter, csvSchema);) {
            List<String> nextLine = new ArrayList<>();
            while (iterator.hasNext()) {
                SimpleFeature feature = iterator.next();
                featureCount++;
                if (featureCount <= 2) {
                    System.out.println("");
                    System.out.println(feature.getIdentifier());
                } else if (featureCount % 100 == 0) {
                    System.out.print(".");
                }
                boolean filterThisFeature = false;
                for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
                    String featureString = Optional.ofNullable(feature.getAttribute(attribute.getName()))
                            .orElse("").toString();
                    nextLine.add(featureString);
                    if (filterFields.contains(attribute.getName().toString())
                            && featureString.trim().isEmpty()) {
                        filterThisFeature = true;
                    }
                    if (featureString.length() > 100) {
                        featureString = featureString.substring(0, 100) + "...";
                    }
                    if (featureCount <= 2) {
                        System.out.print(attribute.getName() + "=");
                        System.out.println(featureString);
                    }
                }
                if (!filterThisFeature) {
                    outputFeatureList.add(SHPUtils.changeSchemaName(feature, outputSchema));
                    csv.write(nextLine);
                }
                nextLine.clear();
            }
        }
        try (Reader csvReader = Files.newBufferedReader(nextCSVFile, StandardCharsets.UTF_8);
                Writer summaryOutput = Files.newBufferedWriter(nextSummaryCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                final Writer mappingWriter = options.has(outputMappingTemplate)
                        ? Files.newBufferedWriter(outputMappingPath)
                        : NullWriter.NULL_WRITER) {
            CSVSummariser.runSummarise(csvReader, summaryOutput, mappingWriter,
                    CSVSummariser.DEFAULT_SAMPLE_COUNT, false);
        }
        if (featureCount > 100) {
            System.out.println("");
        }
        System.out.println("");
        System.out.println("Feature count: " + featureCount);

        SimpleFeatureCollection outputCollection = new ListFeatureCollection(outputSchema, outputFeatureList);
        Path outputShapefilePath = outputPath.resolve(prefix + "-" + outputSchema.getTypeName() + "-dump");
        if (!Files.exists(outputShapefilePath)) {
            Files.createDirectory(outputShapefilePath);
        }
        SHPUtils.writeShapefile(outputCollection, outputShapefilePath);

        // Create ZIP file from the contents to keep the subfiles together
        Path outputShapefileZipPath = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-dump.zip");
        try (final OutputStream out = Files.newOutputStream(outputShapefileZipPath,
                StandardOpenOption.CREATE_NEW);
                final ZipOutputStream zip = new ZipOutputStream(out, StandardCharsets.UTF_8);) {
            Files.list(outputShapefilePath).forEachOrdered(Unchecked.consumer(e -> {
                zip.putNextEntry(new ZipEntry(e.getFileName().toString()));
                Files.copy(e, zip);
                zip.closeEntry();
            }));
        }

        try (final OutputStream outputStream = Files.newOutputStream(
                outputPath.resolve(prefix + "." + format.value(options)), StandardOpenOption.CREATE_NEW);) {
            MapContent map = new MapContent();
            map.setTitle(prefix + "-" + outputSchema.getTypeName());
            Style style = SLD.createSimpleStyle(featureSource.getSchema());
            Layer layer = new FeatureLayer(new CollectionFeatureSource(outputCollection), style);
            map.addLayer(layer);
            SHPUtils.renderImage(map, outputStream, resolution.value(options), format.value(options));
        }
    }
}

From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    final int n = 10000;
    final int nDist = 10000;
    //prepare problems and methods
    List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7",
            "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text");
    List<String> mappers = new ArrayList<>();
    for (int gs : new int[] { 64, 128, 256, 512, 1024 }) {
        mappers.add("ge-" + gs + "-2");
        mappers.add("ge-" + gs + "-4");
        mappers.add("ge-" + gs + "-8");
        mappers.add("ge-" + gs + "-12");
        mappers.add("pige-" + gs + "-4");
        mappers.add("pige-" + gs + "-8");
        mappers.add("pige-" + gs + "-16");
        mappers.add("pige-" + gs + "-24");
        mappers.add("hge-" + gs + "-0");
        mappers.add("whge-" + gs + "-2");
        mappers.add("whge-" + gs + "-3");
        mappers.add("whge-" + gs + "-5");
    }/*from www . j a v a 2 s . c  o  m*/
    mappers.add("sge-0-5");
    mappers.add("sge-0-6");
    mappers.add("sge-0-7");
    mappers.add("sge-0-8");
    mappers.clear();
    mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6"));
    PrintStream filePrintStream = null;
    if (args.length > 0) {
        filePrintStream = new PrintStream(args[0]);
    } else {
        filePrintStream = System.out;
    }
    filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n");
    //prepare distances
    Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>());
    Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming());
    //iterate
    for (String problemName : problems) {
        for (String mapperName : mappers) {
            System.out.printf("%20.20s, %20.20s", problemName, mapperName);
            //build problem
            Problem<String, NumericFitness> problem = null;
            if (problemName.equals("bool-parity5")) {
                problem = new Parity(5);
            } else if (problemName.equals("bool-mopm3")) {
                problem = new MultipleOutputParallelMultiplier(3);
            } else if (problemName.equals("sr-keijzer6")) {
                problem = new HarmonicCurve();
            } else if (problemName.equals("sr-nguyen7")) {
                problem = new Nguyen7(1);
            } else if (problemName.equals("sr-pagie1")) {
                problem = new Pagie1();
            } else if (problemName.equals("sr-vladislavleva4")) {
                problem = new Vladislavleva4(1);
            } else if (problemName.equals("other-klandscapes3")) {
                problem = new KLandscapes(3);
            } else if (problemName.equals("other-klandscapes7")) {
                problem = new KLandscapes(7);
            } else if (problemName.equals("other-text")) {
                problem = new Text();
            }
            //build configuration and evolver
            Mapper mapper = null;
            int genotypeSize = Integer.parseInt(mapperName.split("-")[1]);
            int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]);
            if (mapperName.split("-")[0].equals("ge")) {
                mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("pige")) {
                mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("sge")) {
                mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("hge")) {
                mapper = new HierarchicalMapper<>(problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("whge")) {
                mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar());
            }
            //prepare things
            Random random = new Random(1);
            Set<Sequence> genotypes = new LinkedHashSet<>(n);
            //build genotypes
            if (mapperName.split("-")[0].equals("sge")) {
                SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
                genotypeSize = factory.getBitSize();
            } else {
                BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
            }
            //build and fill map
            Multimap<Node<String>, Sequence> multimap = HashMultimap.create();
            int progress = 0;
            for (Sequence genotype : genotypes) {
                Node<String> phenotype;
                try {
                    if (mapperName.split("-")[0].equals("sge")) {
                        phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>());
                    } else {
                        phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>());
                    }
                } catch (MappingException e) {
                    phenotype = Node.EMPTY_TREE;
                }
                multimap.put(phenotype, genotype);
                progress = progress + 1;
                if (progress % Math.round(n / 10) == 0) {
                    System.out.print(".");
                }
            }
            System.out.println();
            //compute distances
            List<Pair<Double, Double>> allDistances = new ArrayList<>();
            List<Pair<Double, Double>> allValidDistances = new ArrayList<>();
            Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create();
            for (Node<String> phenotype : multimap.keySet()) {
                for (Sequence genotype1 : multimap.get(phenotype)) {
                    for (Sequence genotype2 : multimap.get(phenotype)) {
                        double gDistance = genotypeDistance.d(genotype1, genotype2);
                        genotypeDistances.put(phenotype, gDistance);
                        if (genotypeDistances.get(phenotype).size() > nDist) {
                            break;
                        }
                    }
                    if (genotypeDistances.get(phenotype).size() > nDist) {
                        break;
                    }
                }
            }
            List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries());
            Collections.shuffle(entries, random);
            for (Map.Entry<Node<String>, Sequence> entry1 : entries) {
                for (Map.Entry<Node<String>, Sequence> entry2 : entries) {
                    double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue());
                    double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey());
                    allDistances.add(new Pair<>(gDistance, pDistance));
                    if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) {
                        allValidDistances.add(new Pair<>(gDistance, pDistance));
                    }
                    if (allDistances.size() > nDist) {
                        break;
                    }
                }
                if (allDistances.size() > nDist) {
                    break;
                }
            }
            //compute properties
            double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size();
            double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size();
            double validRedundancy = redundancy;
            if (multimap.keySet().contains(Node.EMPTY_TREE)) {
                validRedundancy = 1 - ((double) multimap.keySet().size() - 1d)
                        / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size());
            }
            double locality = Utils.pearsonCorrelation(allDistances);
            double validLocality = Utils.pearsonCorrelation(allValidDistances);
            double[] sizes = new double[multimap.keySet().size()];
            double[] meanGenotypeDistances = new double[multimap.keySet().size()];
            int invalidIndex = -1;
            int c = 0;
            for (Node<String> phenotype : multimap.keySet()) {
                if (Node.EMPTY_TREE.equals(phenotype)) {
                    invalidIndex = c;
                }
                sizes[c] = multimap.get(phenotype).size();
                double[] distances = new double[genotypeDistances.get(phenotype).size()];
                int k = 0;
                for (Double distance : genotypeDistances.get(phenotype)) {
                    distances[k] = distance;
                    k = k + 1;
                }
                meanGenotypeDistances[c] = StatUtils.mean(distances);
                c = c + 1;
            }
            double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes);
            double nonSynonymousity = StatUtils.mean(meanGenotypeDistances)
                    / StatUtils.mean(firsts(allDistances));
            double validNonUniformity = nonUniformity;
            double validNonSynonymousity = nonSynonymousity;
            if (invalidIndex != -1) {
                double[] validSizes = new double[multimap.keySet().size() - 1];
                double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1];
                if (invalidIndex > 0) {
                    System.arraycopy(sizes, 0, validSizes, 0, invalidIndex);
                    System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex);
                }
                System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex,
                        sizes.length - invalidIndex - 1);
                System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances,
                        invalidIndex, meanGenotypeDistances.length - invalidIndex - 1);
                validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes);
                validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances)
                        / StatUtils.mean(firsts(allValidDistances));
            }
            //compute locality
            filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, invalidity);
            filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, redundancy);
            filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validRedundancy);
            filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, locality);
            filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validLocality);
            filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonSynonymousity);
            filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity);
        }
    }
    if (filePrintStream != null) {
        filePrintStream.close();
    }
}

From source file:Main.java

public static <K> List<K> removeDuplicateFromList(List<K> lst) {
    return new ArrayList<K>(new LinkedHashSet<K>(lst));
}

From source file:Main.java

public static <V> Set<V> newLinkedHashSet(int expectedSize) {
    return new LinkedHashSet<V>(expectedSizeToCapacity(expectedSize));
}

From source file:Main.java

public static <T> LinkedHashSet<T> asLinkedHashSetRemoveNulls(T[] elements) {
    LinkedHashSet<T> result = new LinkedHashSet<>(elements.length);
    for (T element : elements) {
        if (element != null) {
            result.add(element);/*w  ww.j a  va  2 s.c  o  m*/
        }
    }

    return result;
}

From source file:Main.java

public static List removeDuplicate(List list) {
    return new ArrayList(new LinkedHashSet(list));
}

From source file:Main.java

public static List uniq(List list) {
    return new ArrayList(new LinkedHashSet(list));
}

From source file:Main.java

public static <E> Set<E> asSet(E... elements) {
    if (elements == null || elements.length == 0) {
        return Collections.emptySet();
    }//from  w  ww  . j a v  a2 s  .  com
    LinkedHashSet<E> set = new LinkedHashSet<E>(elements.length * 4 / 3 + 1);
    Collections.addAll(set, elements);
    return set;
}

From source file:Main.java

public static <E> LinkedHashSet<E> getLinkedHashSet(int initialCapacity) {
    return new LinkedHashSet<E>(initialCapacity);
}