Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:net.spfbl.core.Reverse.java

public static ArrayList<String> getMXSet(String host) throws NamingException {
    TreeMap<Integer, TreeSet<String>> mxMap = new TreeMap<Integer, TreeSet<String>>();
    Attributes atributes = Server.getAttributesDNS(host, new String[] { "MX" });
    if (atributes == null || atributes.size() == 0) {
        atributes = Server.getAttributesDNS(host, new String[] { "CNAME" });
        Attribute attribute = atributes.get("CNAME");
        if (attribute != null) {
            String cname = (String) attribute.get(0);
            return getMXSet(cname);
        }//  w ww .  j  a  v a2s.c  o m
    } else {
        Attribute attribute = atributes.get("MX");
        if (attribute != null) {
            for (int index = 0; index < attribute.size(); index++) {
                try {
                    String mx = (String) attribute.get(index);
                    int space = mx.indexOf(' ');
                    String value = mx.substring(0, space);
                    int priority = Integer.parseInt(value);
                    mx = mx.substring(space + 1);
                    int last = mx.length() - 1;
                    TreeSet<String> mxSet = mxMap.get(priority);
                    if (mxSet == null) {
                        mxSet = new TreeSet<String>();
                        mxMap.put(priority, mxSet);
                    }
                    if (Subnet.isValidIP(mx.substring(0, last))) {
                        mxSet.add(Subnet.normalizeIP(mx.substring(0, last)));
                    } else if (Domain.isHostname(mx)) {
                        mxSet.add(Domain.normalizeHostname(mx, true));
                    }
                } catch (NumberFormatException ex) {
                }
            }
        }
    }
    ArrayList<String> mxList = new ArrayList<String>();
    if (mxMap.isEmpty()) {
        // https://tools.ietf.org/html/rfc5321#section-5
        mxList.add(Domain.normalizeHostname(host, true));
    } else {
        for (int priority : mxMap.keySet()) {
            TreeSet<String> mxSet = mxMap.get(priority);
            for (String mx : mxSet) {
                if (!mxList.contains(mx)) {
                    mxList.add(mx);
                }
            }
        }
    }
    return mxList;
}

From source file:com.opengamma.examples.bloomberg.loader.DemoEquityOptionCollarPortfolioLoader.java

private void addNodes(final ManageablePortfolioNode rootNode, final String underlying,
        final boolean includeUnderlying, final Period[] expiries) {
    final ExternalId ticker = ExternalSchemes.bloombergTickerSecurityId(underlying);
    ManageableSecurity underlyingSecurity = null;
    if (includeUnderlying) {
        underlyingSecurity = getOrLoadEquity(ticker);
    }/* w ww.j a  v  a2  s .co  m*/

    final ExternalIdBundle bundle = underlyingSecurity == null ? ExternalIdBundle.of(ticker)
            : underlyingSecurity.getExternalIdBundle();
    final HistoricalTimeSeriesInfoDocument timeSeriesInfo = getOrLoadTimeSeries(ticker, bundle);
    final double estimatedCurrentStrike = getOrLoadMostRecentPoint(timeSeriesInfo);
    final Set<ExternalId> optionChain = getOptionChain(ticker);

    //TODO: reuse positions/nodes?
    final String longName = underlyingSecurity == null ? "" : underlyingSecurity.getName();
    final String formattedName = MessageFormatter.format("[{}] {}", underlying, longName).getMessage();
    final ManageablePortfolioNode equityNode = new ManageablePortfolioNode(formattedName);

    final BigDecimal underlyingAmount = VALUE_OF_UNDERLYING.divide(BigDecimal.valueOf(estimatedCurrentStrike),
            BigDecimal.ROUND_HALF_EVEN);

    if (includeUnderlying) {
        addPosition(equityNode, underlyingAmount, ticker);
    }

    final TreeMap<LocalDate, Set<BloombergTickerParserEQOption>> optionsByExpiry = new TreeMap<LocalDate, Set<BloombergTickerParserEQOption>>();
    for (final ExternalId optionTicker : optionChain) {
        s_logger.debug("Got option {}", optionTicker);

        final BloombergTickerParserEQOption optionInfo = BloombergTickerParserEQOption
                .getOptionParser(optionTicker);
        s_logger.debug("Got option info {}", optionInfo);

        final LocalDate key = optionInfo.getExpiry();
        Set<BloombergTickerParserEQOption> set = optionsByExpiry.get(key);
        if (set == null) {
            set = new HashSet<BloombergTickerParserEQOption>();
            optionsByExpiry.put(key, set);
        }
        set.add(optionInfo);
    }
    final Set<ExternalId> tickersToLoad = new HashSet<ExternalId>();

    final BigDecimal expiryCount = BigDecimal.valueOf(expiries.length);
    final BigDecimal defaultAmountAtExpiry = underlyingAmount.divide(expiryCount, BigDecimal.ROUND_DOWN);
    final BigDecimal spareAmountAtExpiry = defaultAmountAtExpiry.add(BigDecimal.ONE);
    int spareCount = underlyingAmount.subtract(defaultAmountAtExpiry.multiply(expiryCount)).intValue();

    for (final Period bucketPeriod : expiries) {
        final ManageablePortfolioNode bucketNode = new ManageablePortfolioNode(
                bucketPeriod.toString().substring(1));

        final LocalDate nowish = LocalDate.now().withDayOfMonth(20); //This avoids us picking different options every time this script is run
        final LocalDate targetExpiry = nowish.plus(bucketPeriod);
        final LocalDate chosenExpiry = optionsByExpiry.floorKey(targetExpiry);
        if (chosenExpiry == null) {
            s_logger.info("No options for {} on {}", targetExpiry, underlying);
            continue;
        }
        s_logger.info("Using time {} for bucket {} ({})",
                new Object[] { chosenExpiry, bucketPeriod, targetExpiry });

        final Set<BloombergTickerParserEQOption> optionsAtExpiry = optionsByExpiry.get(chosenExpiry);
        final TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> optionsByStrike = new TreeMap<>();
        for (final BloombergTickerParserEQOption option : optionsAtExpiry) {
            //        s_logger.info("option {}", option);
            final double key = option.getStrike();
            Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike.get(key);
            if (pair == null) {
                pair = Pair.of(null, null);
            }
            if (option.getOptionType() == OptionType.CALL) {
                pair = Pair.of(option, pair.getSecond());
            } else {
                pair = Pair.of(pair.getFirst(), option);
            }
            optionsByStrike.put(key, pair);
        }

        //cascading collar?
        final BigDecimal amountAtExpiry = spareCount-- > 0 ? spareAmountAtExpiry : defaultAmountAtExpiry;

        s_logger.info(" est strike {}", estimatedCurrentStrike);
        final Double[] strikes = optionsByStrike.keySet().toArray(new Double[0]);

        int strikeIndex = Arrays.binarySearch(strikes, estimatedCurrentStrike);
        if (strikeIndex < 0) {
            strikeIndex = -(1 + strikeIndex);
        }
        s_logger.info("strikes length {} index {} strike of index {}",
                new Object[] { Integer.valueOf(strikes.length), Integer.valueOf(strikeIndex),
                        Double.valueOf(strikes[strikeIndex]) });

        int minIndex = strikeIndex - _numOptions;
        minIndex = Math.max(0, minIndex);
        int maxIndex = strikeIndex + _numOptions;
        maxIndex = Math.min(strikes.length - 1, maxIndex);

        s_logger.info("min {} max {}", Integer.valueOf(minIndex), Integer.valueOf(maxIndex));
        final StringBuffer sb = new StringBuffer("strikes: [");
        for (int j = minIndex; j <= maxIndex; j++) {
            sb.append(" ");
            sb.append(strikes[j]);
        }
        sb.append(" ]");
        s_logger.info(sb.toString());

        //Short Calls
        final ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> calls = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>();
        for (int j = minIndex; j < strikeIndex; j++) {
            final Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike
                    .get(strikes[j]);
            if (pair == null) {
                throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]);
            }
            calls.add(pair);
        }
        spreadOptions(bucketNode, calls, OptionType.CALL, -1, tickersToLoad, amountAtExpiry, includeUnderlying,
                calls.size());

        // Long Puts
        final ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> puts = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>();
        for (int j = strikeIndex + 1; j <= maxIndex; j++) {
            final Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike
                    .get(strikes[j]);
            if (pair == null) {
                throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]);
            }
            puts.add(pair);
        }
        spreadOptions(bucketNode, puts, OptionType.PUT, 1, tickersToLoad, amountAtExpiry, includeUnderlying,
                puts.size());

        if (bucketNode.getChildNodes().size() + bucketNode.getPositionIds().size() > 0) {
            equityNode.addChildNode(bucketNode); //Avoid generating empty nodes
        }
    }

    for (final ExternalId optionTicker : tickersToLoad) {
        final ManageableSecurity loaded = getOrLoadSecurity(optionTicker);
        if (loaded == null) {
            throw new OpenGammaRuntimeException("Unexpected option type " + loaded);
        }

        //TODO [LAPANA-29] Should be able to do this for index options too
        if (includeUnderlying) {
            try {
                final HistoricalTimeSeriesInfoDocument loadedTs = getOrLoadTimeSeries(optionTicker,
                        loaded.getExternalIdBundle());
                if (loadedTs == null) {
                    throw new OpenGammaRuntimeException("Failed to get time series for " + loaded);
                }
            } catch (final Exception ex) {
                s_logger.info("Failed to get time series for " + loaded, ex);
            }
        }
    }

    if (equityNode.getPositionIds().size() + equityNode.getChildNodes().size() > 0) {
        rootNode.addChildNode(equityNode);
    }
}

From source file:org.apache.hadoop.hbase.util.RegionSplitter.java

static void rollingSplit(String tableName, SplitAlgorithm splitAlgo, Configuration conf)
        throws IOException, InterruptedException {
    final int minOS = conf.getInt("split.outstanding", 2);

    HTable table = new HTable(conf, tableName);

    // max outstanding splits. default == 50% of servers
    final int MAX_OUTSTANDING = Math.max(table.getConnection().getCurrentNrHRS() / 2, minOS);

    Path hbDir = FSUtils.getRootDir(conf);
    Path tableDir = FSUtils.getTableDir(hbDir, table.getName());
    Path splitFile = new Path(tableDir, "_balancedSplit");
    FileSystem fs = FileSystem.get(conf);

    // get a list of daughter regions to create
    LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo);
    LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList();
    int splitCount = 0;
    final int origCount = tmpRegionSet.size();

    // all splits must compact & we have 1 compact thread, so 2 split
    // requests to the same RS can stall the outstanding split queue.
    // To fix, group the regions into an RS pool and round-robin through it
    LOG.debug("Bucketing regions by regionserver...");
    TreeMap<String, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps.newTreeMap();
    for (Pair<byte[], byte[]> dr : tmpRegionSet) {
        String rsLocation = table.getRegionLocation(dr.getSecond()).getHostnamePort();
        if (!daughterRegions.containsKey(rsLocation)) {
            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
            daughterRegions.put(rsLocation, entry);
        }//  w w  w  .  ja  v  a2s. c  om
        daughterRegions.get(rsLocation).add(dr);
    }
    LOG.debug("Done with bucketing.  Split time!");
    long startTime = System.currentTimeMillis();

    // open the split file and modify it as splits finish
    FSDataInputStream tmpIn = fs.open(splitFile);
    byte[] rawData = new byte[tmpIn.available()];
    tmpIn.readFully(rawData);
    tmpIn.close();
    FSDataOutputStream splitOut = fs.create(splitFile);
    splitOut.write(rawData);

    try {
        // *** split code ***
        while (!daughterRegions.isEmpty()) {
            LOG.debug(daughterRegions.size() + " RS have regions to splt.");

            // Get RegionServer : region count mapping
            final TreeMap<ServerName, Integer> rsSizes = Maps.newTreeMap();
            Map<HRegionInfo, ServerName> regionsInfo = table.getRegionLocations();
            for (ServerName rs : regionsInfo.values()) {
                if (rsSizes.containsKey(rs)) {
                    rsSizes.put(rs, rsSizes.get(rs) + 1);
                } else {
                    rsSizes.put(rs, 1);
                }
            }

            // sort the RS by the number of regions they have
            List<String> serversLeft = Lists.newArrayList(daughterRegions.keySet());
            Collections.sort(serversLeft, new Comparator<String>() {
                public int compare(String o1, String o2) {
                    return rsSizes.get(o1).compareTo(rsSizes.get(o2));
                }
            });

            // round-robin through the RS list. Choose the lightest-loaded servers
            // first to keep the master from load-balancing regions as we split.
            for (String rsLoc : serversLeft) {
                Pair<byte[], byte[]> dr = null;

                // find a region in the RS list that hasn't been moved
                LOG.debug("Finding a region on " + rsLoc);
                LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions.get(rsLoc);
                while (!regionList.isEmpty()) {
                    dr = regionList.pop();

                    // get current region info
                    byte[] split = dr.getSecond();
                    HRegionLocation regionLoc = table.getRegionLocation(split);

                    // if this region moved locations
                    String newRs = regionLoc.getHostnamePort();
                    if (newRs.compareTo(rsLoc) != 0) {
                        LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs
                                + ". Relocating...");
                        // relocate it, don't use it right now
                        if (!daughterRegions.containsKey(newRs)) {
                            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
                            daughterRegions.put(newRs, entry);
                        }
                        daughterRegions.get(newRs).add(dr);
                        dr = null;
                        continue;
                    }

                    // make sure this region wasn't already split
                    byte[] sk = regionLoc.getRegionInfo().getStartKey();
                    if (sk.length != 0) {
                        if (Bytes.equals(split, sk)) {
                            LOG.debug("Region already split on " + splitAlgo.rowToStr(split)
                                    + ".  Skipping this region...");
                            ++splitCount;
                            dr = null;
                            continue;
                        }
                        byte[] start = dr.getFirst();
                        Preconditions.checkArgument(Bytes.equals(start, sk),
                                splitAlgo.rowToStr(start) + " != " + splitAlgo.rowToStr(sk));
                    }

                    // passed all checks! found a good region
                    break;
                }
                if (regionList.isEmpty()) {
                    daughterRegions.remove(rsLoc);
                }
                if (dr == null)
                    continue;

                // we have a good region, time to split!
                byte[] split = dr.getSecond();
                LOG.debug("Splitting at " + splitAlgo.rowToStr(split));
                HBaseAdmin admin = new HBaseAdmin(table.getConfiguration());
                admin.split(table.getTableName(), split);

                LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList();
                if (conf.getBoolean("split.verify", true)) {
                    // we need to verify and rate-limit our splits
                    outstanding.addLast(dr);
                    // with too many outstanding splits, wait for some to finish
                    while (outstanding.size() >= MAX_OUTSTANDING) {
                        finished = splitScan(outstanding, table, splitAlgo);
                        if (finished.isEmpty()) {
                            Thread.sleep(30 * 1000);
                        } else {
                            outstanding.removeAll(finished);
                        }
                    }
                } else {
                    finished.add(dr);
                }

                // mark each finished region as successfully split.
                for (Pair<byte[], byte[]> region : finished) {
                    splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                            + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    splitCount++;
                    if (splitCount % 10 == 0) {
                        long tDiff = (System.currentTimeMillis() - startTime) / splitCount;
                        LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = "
                                + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
                    }
                }
            }
        }
        if (conf.getBoolean("split.verify", true)) {
            while (!outstanding.isEmpty()) {
                LinkedList<Pair<byte[], byte[]>> finished = splitScan(outstanding, table, splitAlgo);
                if (finished.isEmpty()) {
                    Thread.sleep(30 * 1000);
                } else {
                    outstanding.removeAll(finished);
                    for (Pair<byte[], byte[]> region : finished) {
                        splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                                + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    }
                }
            }
        }
        LOG.debug("All regions have been successfully split!");
    } finally {
        long tDiff = System.currentTimeMillis() - startTime;
        LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
        LOG.debug("Splits = " + splitCount);
        LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount));

        splitOut.close();
        if (table != null) {
            table.close();
        }
    }
    fs.delete(splitFile, false);
}

From source file:au.org.ala.layers.web.TabulationService.java

private double[] speciesTotals(List<Tabulation> tabulations, boolean row) throws IOException {
    //determine x & y field names
    TreeMap<String, String> objects1 = new TreeMap<String, String>();
    TreeMap<String, String> objects2 = new TreeMap<String, String>();

    for (Tabulation t : tabulations) {
        objects1.put(t.getPid1(), t.getName1());
        objects2.put(t.getPid2(), t.getName2());
    }//  w  ww.j  a va  2  s  .c o m

    int rows = Math.max(objects1.size(), objects2.size());
    int columns = Math.min(objects1.size(), objects2.size());

    double[] grid = new double[row ? rows : columns];

    //populate grid
    if (objects1.size() <= objects2.size()) {
        //row and column sort order and labels
        TreeMap<String, Integer> order1 = new TreeMap<String, Integer>();
        TreeMap<String, Integer> order2 = new TreeMap<String, Integer>();
        int pos = 0;
        for (String s : objects1.keySet()) {
            order1.put(s, pos++);
        }
        pos = 0;
        for (String s : objects2.keySet()) {
            order2.put(s, pos++);
        }

        //grid
        for (Tabulation t : tabulations) {
            //grid[order2.get(t.getPid2()) + 1][order1.get(t.getPid1()) + 1] = String.valueOf(t.getSpecies());
            if (row)
                grid[order2.get(t.getPid2())] = t.getSpeciest2() == null ? 0 : t.getSpeciest2();
            else
                grid[order1.get(t.getPid1())] = t.getSpeciest1() == null ? 0 : t.getSpeciest1();
        }
    } else {
        //row and column sort order and labels
        TreeMap<String, Integer> order1 = new TreeMap<String, Integer>();
        TreeMap<String, Integer> order2 = new TreeMap<String, Integer>();
        int pos = 0;
        for (String s : objects1.keySet()) {
            order1.put(s, pos++);
        }
        pos = 0;
        for (String s : objects2.keySet()) {
            order2.put(s, pos++);
        }

        //grid
        for (Tabulation t : tabulations) {
            //grid[order1.get(t.getPid1()) + 1][order2.get(t.getPid2()) + 1] = String.valueOf(t.getSpecies());
            if (!row)
                grid[order2.get(t.getPid2())] = t.getSpeciest2() == null ? 0 : t.getSpeciest2();
            else
                grid[order1.get(t.getPid1())] = t.getSpeciest1() == null ? 0 : t.getSpeciest1();
        }
    }
    return grid;
}

From source file:au.org.ala.layers.web.TabulationService.java

private String[][] tabulationGridGenerator(List<Tabulation> tabulations, String fid1, String fid2, String wkt,
        String func) throws IOException {
    //determine x & y field names
    TreeMap<String, String> objects1 = new TreeMap<String, String>();
    TreeMap<String, String> objects2 = new TreeMap<String, String>();

    for (Tabulation t : tabulations) {
        objects1.put(t.getPid1(), t.getName1());
        objects2.put(t.getPid2(), t.getName2());
    }/* w w w.  j  av  a 2 s.  c om*/

    int rows = Math.max(objects1.size(), objects2.size());
    int columns = Math.min(objects1.size(), objects2.size());

    String[][] grid = new String[rows + 1][columns + 1];

    //populate grid
    if (objects1.size() <= objects2.size()) {

        //row and column sort order and labels
        TreeMap<String, Integer> order1 = new TreeMap<String, Integer>();
        TreeMap<String, Integer> order2 = new TreeMap<String, Integer>();
        int pos = 0;
        for (String s : objects1.keySet()) {
            order1.put(s, pos++);
            grid[0][pos] = objects1.get(s);
        }
        pos = 0;
        for (String s : objects2.keySet()) {
            order2.put(s, pos++);
            grid[pos][0] = objects2.get(s);
        }

        //grid
        for (Tabulation t : tabulations) {
            if (func.equals("area") || func.equals("arearow") || func.equals("areacolumn")
                    || func.equals("areatotal")) {
                grid[order2.get(t.getPid2()) + 1][order1.get(t.getPid1()) + 1] = String.format("%.1f",
                        t.getArea() / 1000000.0); //convert sqm to sqkm
            } else if (func.equals("occurrences") || func.equals("occurrencesrow")
                    || func.equals("occurrencescolumn") || func.equals("occurrencestotal")) {
                grid[order2.get(t.getPid2()) + 1][order1.get(t.getPid1()) + 1] = String
                        .valueOf(t.getOccurrences());
            } else if (func.equals("species") || func.equals("speciesrow") || func.equals("speciescolumn")
                    || func.equals("speciestotal")) {
                grid[order2.get(t.getPid2()) + 1][order1.get(t.getPid1()) + 1] = String.valueOf(t.getSpecies());
            }
        }
    } else {
        //row and column sort order and labels
        TreeMap<String, Integer> order1 = new TreeMap<String, Integer>();
        TreeMap<String, Integer> order2 = new TreeMap<String, Integer>();
        int pos = 0;
        for (String s : objects1.keySet()) {
            order1.put(s, pos++);
            grid[pos][0] = objects1.get(s);
        }
        pos = 0;
        for (String s : objects2.keySet()) {
            order2.put(s, pos++);
            grid[0][pos] = objects2.get(s);
        }

        //grid
        for (Tabulation t : tabulations) {
            if (func.equals("area") || func.equals("arearow") || func.equals("areacolumn")
                    || func.equals("areatotal")) {
                grid[order1.get(t.getPid1()) + 1][order2.get(t.getPid2()) + 1] = String.format("%.1f",
                        t.getArea() / 1000000.0); //convert sqm to sqkm
            } else if (func.equals("occurrences") || func.equals("occurrencesrow")
                    || func.equals("occurrencescolumn") || func.equals("occurrencestotal")) {
                grid[order1.get(t.getPid1()) + 1][order2.get(t.getPid2()) + 1] = String
                        .valueOf(t.getOccurrences());
            } else if (func.equals("species") || func.equals("speciesrow") || func.equals("speciescolumn")
                    || func.equals("speciestotal")) {
                grid[order1.get(t.getPid1()) + 1][order2.get(t.getPid2()) + 1] = String.valueOf(t.getSpecies());
            }
        }
    }
    return grid;
}

From source file:com.opengamma.bloombergexample.loader.DemoEquityOptionCollarPortfolioLoader.java

private void addNodes(ManageablePortfolioNode rootNode, String underlying, boolean includeUnderlying,
        Period[] expiries) {/* ww w .  j a  va 2  s .c o  m*/
    ExternalId ticker = ExternalSchemes.bloombergTickerSecurityId(underlying);
    ManageableSecurity underlyingSecurity = null;
    if (includeUnderlying) {
        underlyingSecurity = getOrLoadEquity(ticker);
    }

    ExternalIdBundle bundle = underlyingSecurity == null ? ExternalIdBundle.of(ticker)
            : underlyingSecurity.getExternalIdBundle();
    HistoricalTimeSeriesInfoDocument timeSeriesInfo = getOrLoadTimeSeries(ticker, bundle);
    double estimatedCurrentStrike = getOrLoadMostRecentPoint(timeSeriesInfo);
    Set<ExternalId> optionChain = getOptionChain(ticker);

    //TODO: reuse positions/nodes?
    String longName = underlyingSecurity == null ? "" : underlyingSecurity.getName();
    String formattedName = MessageFormatter.format("[{}] {}", underlying, longName);
    ManageablePortfolioNode equityNode = new ManageablePortfolioNode(formattedName);

    BigDecimal underlyingAmount = VALUE_OF_UNDERLYING.divide(BigDecimal.valueOf(estimatedCurrentStrike),
            BigDecimal.ROUND_HALF_EVEN);

    if (includeUnderlying) {
        addPosition(equityNode, underlyingAmount, ticker);
    }

    TreeMap<LocalDate, Set<BloombergTickerParserEQOption>> optionsByExpiry = new TreeMap<LocalDate, Set<BloombergTickerParserEQOption>>();
    for (ExternalId optionTicker : optionChain) {
        s_logger.debug("Got option {}", optionTicker);

        BloombergTickerParserEQOption optionInfo = BloombergTickerParserEQOption.getOptionParser(optionTicker);
        s_logger.debug("Got option info {}", optionInfo);

        LocalDate key = optionInfo.getExpiry();
        Set<BloombergTickerParserEQOption> set = optionsByExpiry.get(key);
        if (set == null) {
            set = new HashSet<BloombergTickerParserEQOption>();
            optionsByExpiry.put(key, set);
        }
        set.add(optionInfo);
    }
    Set<ExternalId> tickersToLoad = new HashSet<ExternalId>();

    BigDecimal expiryCount = BigDecimal.valueOf(expiries.length);
    BigDecimal defaultAmountAtExpiry = underlyingAmount.divide(expiryCount, BigDecimal.ROUND_DOWN);
    BigDecimal spareAmountAtExpiry = defaultAmountAtExpiry.add(BigDecimal.ONE);
    int spareCount = underlyingAmount.subtract(defaultAmountAtExpiry.multiply(expiryCount)).intValue();

    for (int i = 0; i < expiries.length; i++) {
        Period bucketPeriod = expiries[i];

        ManageablePortfolioNode bucketNode = new ManageablePortfolioNode(bucketPeriod.toString().substring(1));

        LocalDate nowish = LocalDate.now().withDayOfMonth(20); //This avoids us picking different options every time this script is run
        LocalDate targetExpiry = nowish.plus(bucketPeriod);
        LocalDate chosenExpiry = optionsByExpiry.floorKey(targetExpiry);
        if (chosenExpiry == null) {
            s_logger.warn("No options for {} on {}", targetExpiry, underlying);
            continue;
        }
        s_logger.info("Using time {} for bucket {} ({})",
                new Object[] { chosenExpiry, bucketPeriod, targetExpiry });

        Set<BloombergTickerParserEQOption> optionsAtExpiry = optionsByExpiry.get(chosenExpiry);
        TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> optionsByStrike = new TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>();
        for (BloombergTickerParserEQOption option : optionsAtExpiry) {
            //        s_logger.info("option {}", option);
            double key = option.getStrike();
            Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike.get(key);
            if (pair == null) {
                pair = Pair.of(null, null);
            }
            if (option.getOptionType() == OptionType.CALL) {
                pair = Pair.of(option, pair.getSecond());
            } else {
                pair = Pair.of(pair.getFirst(), option);
            }
            optionsByStrike.put(key, pair);
        }

        //cascading collar?
        BigDecimal amountAtExpiry = spareCount-- > 0 ? spareAmountAtExpiry : defaultAmountAtExpiry;

        s_logger.info(" est strike {}", estimatedCurrentStrike);
        Double[] strikes = optionsByStrike.keySet().toArray(new Double[0]);

        int strikeIndex = Arrays.binarySearch(strikes, estimatedCurrentStrike);
        if (strikeIndex < 0) {
            strikeIndex = -(1 + strikeIndex);
        }
        s_logger.info("strikes length {} index {} strike of index {}",
                new Object[] { Integer.valueOf(strikes.length), Integer.valueOf(strikeIndex),
                        Double.valueOf(strikes[strikeIndex]) });

        int minIndex = strikeIndex - _numOptions;
        minIndex = Math.max(0, minIndex);
        int maxIndex = strikeIndex + _numOptions;
        maxIndex = Math.min(strikes.length - 1, maxIndex);

        s_logger.info("min {} max {}", Integer.valueOf(minIndex), Integer.valueOf(maxIndex));
        StringBuffer sb = new StringBuffer("strikes: [");
        for (int j = minIndex; j <= maxIndex; j++) {
            sb.append(" ");
            sb.append(strikes[j]);
        }
        sb.append(" ]");
        s_logger.info(sb.toString());

        //Short Calls
        ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> calls = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>();
        for (int j = minIndex; j < strikeIndex; j++) {
            Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike
                    .get(strikes[j]);
            if (pair == null) {
                throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]);
            }
            calls.add(pair);
        }
        spreadOptions(bucketNode, calls, OptionType.CALL, -1, tickersToLoad, amountAtExpiry, includeUnderlying,
                calls.size());

        // Long Puts
        ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> puts = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>();
        for (int j = strikeIndex + 1; j <= maxIndex; j++) {
            Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike
                    .get(strikes[j]);
            if (pair == null) {
                throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]);
            }
            puts.add(pair);
        }
        spreadOptions(bucketNode, puts, OptionType.PUT, 1, tickersToLoad, amountAtExpiry, includeUnderlying,
                puts.size());

        if (bucketNode.getChildNodes().size() + bucketNode.getPositionIds().size() > 0) {
            equityNode.addChildNode(bucketNode); //Avoid generating empty nodes   
        }
    }

    for (ExternalId optionTicker : tickersToLoad) {
        ManageableSecurity loaded = getOrLoadSecurity(optionTicker);
        if (loaded == null) {
            throw new OpenGammaRuntimeException("Unexpected option type " + loaded);
        }

        //TODO [LAPANA-29] Should be able to do this for index options too
        if (includeUnderlying) {
            try {
                HistoricalTimeSeriesInfoDocument loadedTs = getOrLoadTimeSeries(optionTicker,
                        loaded.getExternalIdBundle());
                if (loadedTs == null) {
                    throw new OpenGammaRuntimeException("Failed to get time series for " + loaded);
                }
            } catch (Exception ex) {
                s_logger.error("Failed to get time series for " + loaded, ex);
            }
        }
    }

    if (equityNode.getPositionIds().size() + equityNode.getChildNodes().size() > 0) {
        rootNode.addChildNode(equityNode);
    }
}

From source file:subsets.GenerateGFKMatrix.java

void query3() {
    ArrayList<String[]> hirarchy = new ArrayList<String[]>();
    String[] mc = { "sc", "MetaCube" };
    hirarchy.add(mc);/*from  w w  w .  j  a  v a  2 s . c  om*/
    String[] sc = { "sc", "name" };
    hirarchy.add(sc);
    String[] sca = { "sca", "name" };
    hirarchy.add(sca);
    String[] bca = { "bca", "name" };
    hirarchy.add(sca);

    TreeMap<String, JSONObject> SmartCube_SCAttribut = getDepTree(
            "MATCH (sc:SmartCube)<-[:MEMBER_OF]-(sca:SCAttribut) RETURN sc,sca", hirarchy);

    hirarchy.clear();

    //ArrayList<String[]> hirarchy = new ArrayList<String[]>();

    hirarchy.add(sc);
    hirarchy.add(sca);
    hirarchy.add(bca);
    TreeMap<String, JSONObject> SCAttribut_SCAlgo_BCAttribut = getDepTree_old(
            "MATCH (sc:SCAttribut) -[:DERIVED_BY]->(sca:SCAlgorithmen)-[:USES] ->(bca:BCAttribut) RETURN sc,sca,bca",
            hirarchy);

    hirarchy.clear();
    hirarchy.add(sc);
    hirarchy.add(bca);
    TreeMap<String, JSONObject> SCAttribut_BCAttribut = getDepTree(
            "MATCH (sc:SCAttribut) -[:TRANSITION]->(bca:BCAttribut) RETURN sc,bca", hirarchy);

    JSONObject MetaCube = new JSONObject();
    try {
        MetaCube.append("name", "MetaCube");

        for (String key : SmartCube_SCAttribut.keySet()) {
            JSONObject cube = SmartCube_SCAttribut.get(key);
            MetaCube.append("children", cube);
        }

    } catch (JSONException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }

    String content = MetaCube.toString();

    try {

        //String content = json_smartcube.toString();

        File file = new File("C://Users//frisch//Desktop//d3//flare.json");

        // if file doesnt exists, then create it
        if (!file.exists()) {
            file.createNewFile();
        }

        FileWriter fw = new FileWriter(file.getAbsoluteFile());
        BufferedWriter bw = new BufferedWriter(fw);
        bw.write(content);
        bw.close();

        System.out.println("Done");

    } catch (IOException e) {
        e.printStackTrace();
    }

}

From source file:net.spfbl.data.Block.java

public static void dropExpired() {
    long max = System.currentTimeMillis() - Server.DAY_TIME * 40L;
    TreeMap<String, Long> map = SET.getMap();
    for (String token : map.keySet()) {
        Long time = map.get(token);
        if (time == null || time < max) {
            if (SET.dropExact(token)) {
                Server.logDebug("expired BLOCK '" + token + "'.");
            }/*w w w. j a  v a  2s. c  o m*/
        }
    }
}

From source file:eoss.problem.EOSSProblem.java

private void aggregate_performance_score_facts(EOSSArchitecture arch) {
    ArrayList subobj_scores = new ArrayList();
    ArrayList obj_scores = new ArrayList();
    ArrayList panel_scores = new ArrayList();
    double science = 0.0;
    FuzzyValue fuzzy_science = null;//w  w  w  .  j a  v a  2 s  .c om
    Explanation explanations = new Explanation();
    TreeMap<String, Double> tm = new TreeMap<String, Double>();
    try {
        ArrayList<Fact> vals = qb.makeQuery("AGGREGATION::VALUE");
        Fact val = vals.get(0);
        science = val.getSlotValue("satisfaction").floatValue(r.getGlobalContext());
        if (Params.req_mode.equalsIgnoreCase("FUZZY-ATTRIBUTES")) {
            fuzzy_science = (FuzzyValue) val.getSlotValue("fuzzy-value").javaObjectValue(r.getGlobalContext());
        }
        panel_scores = jessList2ArrayList(val.getSlotValue("sh-scores").listValue(r.getGlobalContext()));

        ArrayList<Fact> subobj_facts = qb.makeQuery("AGGREGATION::SUBOBJECTIVE");
        for (int n = 0; n < subobj_facts.size(); n++) {
            Fact f = subobj_facts.get(n);
            String subobj = f.getSlotValue("id").stringValue(r.getGlobalContext());
            Double subobj_score = f.getSlotValue("satisfaction").floatValue(r.getGlobalContext());
            Double current_subobj_score = tm.get(subobj);
            if (current_subobj_score != null && subobj_score > current_subobj_score
                    || current_subobj_score == null) {
                tm.put(subobj, subobj_score);
            }
            explanations.put(subobj, qb.makeQuery("AGGREGATION::SUBOBJECTIVE (id " + subobj + ")"));
        }
        for (Iterator<String> name = tm.keySet().iterator(); name.hasNext();) {
            subobj_scores.add(tm.get(name.next()));
        }
        //TO DO: obj_score and subobj_scores.
    } catch (JessException ex) {
        Logger.getLogger(EOSSProblem.class.getName()).log(Level.SEVERE, null, ex);
    }
    if (Params.req_mode.equalsIgnoreCase("FUZZY-ATTRIBUTES")) {
        arch.setFuzzyObjective(0, fuzzy_science);
    }
    if (explanation) {
        arch.setExplanation(0, explanations);
        arch.setCapabilities(qb.makeQuery("REQUIREMENTS::Measurement"));
    }
}

From source file:org.talend.mdm.webapp.browserecords.server.actions.BrowseRecordsAction.java

public static List<ItemBaseModel> getViewsListOrderedByLabels(Map<String, String> unsortedViewsMap) {
    TreeMap<String, String> sortedViewsByLabelsMap = new TreeMap<String, String>(
            new ViewLabelComparator(unsortedViewsMap));
    sortedViewsByLabelsMap.putAll(unsortedViewsMap);

    List<ItemBaseModel> viewsList = new ArrayList<ItemBaseModel>();
    for (String viewName : sortedViewsByLabelsMap.keySet()) {
        String viewLabel = unsortedViewsMap.get(viewName);
        ItemBaseModel bm = new ItemBaseModel();
        bm.set("name", viewLabel); //$NON-NLS-1$
        bm.set("value", viewName); //$NON-NLS-1$
        viewsList.add(bm);//from   w  w  w  .jav a  2  s  .  c  o m
    }
    return viewsList;
}