Example usage for java.util TreeMap put

List of usage examples for java.util TreeMap put

Introduction

In this page you can find the example usage for java.util TreeMap put.

Prototype

public V put(K key, V value) 

Source Link

Document

Associates the specified value with the specified key in this map.

Usage

From source file:com.sfs.DataFilter.java

/**
 * Tokenizer to map./*from   www.  j a  v a2  s.  c  om*/
 *
 * @param tokenizer the tokenizer
 *
 * @return the tree map< integer, string>
 */
private static TreeMap<Integer, String> tokenizerToMap(final StringTokenizer tokenizer) {

    TreeMap<Integer, String> parsedData = new TreeMap<Integer, String>();

    int lineCounter = 1;
    if (tokenizer != null) {
        while (tokenizer.hasMoreTokens()) {
            String token = tokenizer.nextToken();

            parsedData.put(lineCounter, token.trim());
            lineCounter++;
        }
    }
    return parsedData;
}

From source file:com.genentech.application.calcProps.SDFCalcProps.java

private static void printProperties(Set<Calculator> calculators, boolean showHidden) {
    //Print properties by alphabetical order
    TreeMap<String, String> sortedCalcs = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
    for (Calculator calc : calculators) {
        if (calc.isPublic()) {
            sortedCalcs.put(calc.getName(), calc.getHelpText());
        } else if (showHidden == true) {//print non public props as well
            sortedCalcs.put(calc.getName(), calc.getHelpText());
        }/*from  www  .j  a va  2 s  .c  o  m*/
    }

    for (String key : sortedCalcs.keySet()) {
        System.err.println(key + ":\t" + sortedCalcs.get(key));
    }
}

From source file:Main.java

public static Window getOwnerForChildWindow() {
    Window w = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusedWindow();
    if (w != null) {
        return w;
    }//from   www . ja va2s .  c  o m
    w = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
    if (w != null) {
        return w;
    }
    /*
     * Priority level1
     * modal dialog: +200
     * non-modal dialog: +100
     * frame: +0
     *
     * Priority level2
     * no owned windows: +10
     */
    TreeMap<Integer, Window> prioMap = new TreeMap<Integer, Window>();
    for (Window cand : Window.getWindows()) {
        if (cand == null) {
            continue;
        }
        if (!cand.isVisible()) {
            continue;
        }
        if (!cand.isShowing()) {
            continue;
        }
        int prio = 0;
        Window[] children = cand.getOwnedWindows();
        if (children == null || children.length == 0) {
            prio += 10;
        }
        if (cand instanceof Dialog) {
            Dialog dlg = (Dialog) cand;
            if (dlg.isModal()) {
                prio += 200;
            } else {
                prio += 100;
            }
            prioMap.put(prio, cand);
        } else if (cand instanceof Frame) {
            if (!prioMap.containsKey(prio)) {
                prioMap.put(prio, cand);
            }
        }
    }
    if (prioMap.size() > 0) {
        return prioMap.get(prioMap.lastKey());
    }
    //last line of defense
    if (prioMap.size() == 0) {
        for (Window cand : Window.getWindows()) {
            if (cand == null) {
                continue;
            }
            if (cand.isVisible()) {
                return cand;
            }
        }
    }
    return null;
}

From source file:org.opendatakit.database.data.ColumnDefinition.java

/**
 * Covert the ColumnDefinition map into a JSON schema. and augment it with
 * the schema for the administrative columns.
 * <p>// ww w  .j a va2 s.  c o  m
 * The structure of this schema matches the dataTableModel produced by XLSXConverter
 *
 * @param orderedDefns Used for getting the data model
 * @return An extended data model for the columns
 */
static TreeMap<String, Object> getExtendedDataModel(List<ColumnDefinition> orderedDefns) {
    TreeMap<String, Object> model = getDataModel(orderedDefns);

    TreeMap<String, Object> jsonSchema;
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.ID, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.TRUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.ID);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.ID);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.ID);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.ROW_ETAG, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.ROW_ETAG);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.ROW_ETAG);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.ROW_ETAG);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.SYNC_STATE, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.TRUE);
    // don't force a default value -- the database layer handles sync state initialization itself.
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.SYNC_STATE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.SYNC_STATE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.SYNC_STATE);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.CONFLICT_TYPE, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.integer.name());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.CONFLICT_TYPE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.CONFLICT_TYPE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.CONFLICT_TYPE);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.DEFAULT_ACCESS, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.DEFAULT_ACCESS);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.DEFAULT_ACCESS);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.DEFAULT_ACCESS);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.ROW_OWNER, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.ROW_OWNER);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.ROW_OWNER);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.ROW_OWNER);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.GROUP_READ_ONLY, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.GROUP_READ_ONLY);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.GROUP_READ_ONLY);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.GROUP_READ_ONLY);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.GROUP_MODIFY, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.GROUP_MODIFY);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.GROUP_MODIFY);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.GROUP_MODIFY);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.GROUP_PRIVILEGED, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.GROUP_PRIVILEGED);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.GROUP_PRIVILEGED);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.GROUP_PRIVILEGED);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.FORM_ID, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.FORM_ID);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.FORM_ID);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.FORM_ID);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.LOCALE, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.LOCALE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.LOCALE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.LOCALE);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.SAVEPOINT_TYPE, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.SAVEPOINT_TYPE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.SAVEPOINT_TYPE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.SAVEPOINT_TYPE);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.SAVEPOINT_TIMESTAMP, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.TRUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.SAVEPOINT_TIMESTAMP);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.SAVEPOINT_TIMESTAMP);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.SAVEPOINT_TIMESTAMP);
    //
    jsonSchema = new TreeMap<>();
    model.put(DataTableColumns.SAVEPOINT_CREATOR, jsonSchema);
    jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
    jsonSchema.put(JSON_SCHEMA_IS_NOT_NULLABLE, Boolean.FALSE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_METADATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, DataTableColumns.SAVEPOINT_CREATOR);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, DataTableColumns.SAVEPOINT_CREATOR);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_PATH, DataTableColumns.SAVEPOINT_CREATOR);

    return model;
}

From source file:me.philnate.textmanager.updates.Updater.java

static TreeMap<Version, Class<? extends Update>> createUpdateList(String packageName) {
    final TreeMap<Version, Class<? extends Update>> updates = Maps.newTreeMap();

    final TypeReporter reporter = new TypeReporter() {

        @SuppressWarnings("unchecked")
        @Override//  w  w w . j  a  v a 2  s.  c o  m
        public Class<? extends Annotation>[] annotations() {
            return new Class[] { UpdateScript.class };
        }

        @SuppressWarnings("unchecked")
        @Override
        public void reportTypeAnnotation(Class<? extends Annotation> annotation, String className) {
            Class<? extends Update> clazz;
            try {
                clazz = (Class<? extends Update>) Updater.class.getClassLoader().loadClass(className);
                updates.put(new Version(clazz.getAnnotation(UpdateScript.class).UpdatesVersion()), clazz);
            } catch (ClassNotFoundException e) {
                LOG.error("Found annotated class, but could not load it " + className, e);
            }
        }

    };
    final AnnotationDetector cf = new AnnotationDetector(reporter);
    try {
        // load updates
        cf.detect(packageName);
    } catch (IOException e) {
        LOG.error("An error occured while collecting Updates", e);
    }
    return updates;
}

From source file:com.deliciousdroid.client.DeliciousApi.java

/**
 * Retrieves a specific list of bookmarks from Delicious.
 * /*  w ww.ja  v a 2s .  com*/
 * @param hashes A list of bookmark hashes to be retrieved.  
 *    The hashes are MD5 hashes of the URL of the bookmark.
 * 
 * @param account The account being synced.
 * @param context The current application context.
 * @return A list of bookmarks received from the server.
 * @throws IOException If a server error was encountered.
 * @throws AuthenticationException If an authentication error was encountered.
 */
public static ArrayList<Bookmark> getBookmark(ArrayList<String> hashes, Account account, Context context)
        throws IOException, AuthenticationException, TooManyRequestsException {

    ArrayList<Bookmark> bookmarkList = new ArrayList<Bookmark>();
    TreeMap<String, String> params = new TreeMap<String, String>();
    String hashString = "";
    InputStream responseStream = null;
    String url = FETCH_BOOKMARK_URI;

    for (String h : hashes) {
        if (hashes.get(0) != h) {
            hashString += "+";
        }
        hashString += h;
    }
    params.put("meta", "yes");
    params.put("hashes", hashString);

    responseStream = DeliciousApiCall(url, params, account, context);
    SaxBookmarkParser parser = new SaxBookmarkParser(responseStream);

    try {
        bookmarkList = parser.parse();
    } catch (ParseException e) {
        Log.e(TAG, "Server error in fetching bookmark list");
        throw new IOException();
    }

    responseStream.close();
    return bookmarkList;
}

From source file:de.vandermeer.skb.interfaces.application.CliOptionList.java

/**
 * Returns a sorted map of CLI options, the mapping is the sort string to the CLI option.
 * @param list option list to sort/*from w  ww . j  ava 2  s  .  c  om*/
 * @param numberShort number of arguments with short command
 * @param numberLong number of arguments with long command
 * @return sorted map
 */
static TreeMap<String, ApoBaseC> sortedMap(Set<ApoBaseC> list, int numberShort, int numberLong) {
    TreeMap<String, ApoBaseC> ret = new TreeMap<>();
    if (list == null) {
        return ret;
    }

    for (ApoBaseC opt : list) {
        String key;
        if (numberShort == 0) {
            key = opt.getCliLong();
        }
        if (numberLong == 0) {
            key = opt.getCliShort().toString();
        } else {
            if (opt.getCliLong() != null) {
                if (opt.getCliShort() != null) {
                    key = opt.getCliShort().toString() + "," + opt.getCliLong();
                } else {
                    key = opt.getCliLong().charAt(0) + "," + opt.getCliLong();
                }
            } else {
                key = opt.getCliShort().toString();
            }
        }
        ret.put(key.toLowerCase(), opt);
    }
    return ret;
}

From source file:backtype.storm.utils.Utils.java

/**
 * /*from   w  w  w.jav a  2  s .c  om*/
 (defn integer-divided [sum num-pieces] (let [base (int (/ sum num-pieces)) num-inc (mod sum num-pieces) num-bases (- num-pieces num-inc)] (if (= num-inc
 * 0) {base num-bases} {base num-bases (inc base) num-inc} )))
 * 
 * @param sum
 * @param numPieces
 * @return
 */

public static TreeMap<Integer, Integer> integerDivided(int sum, int numPieces) {
    int base = sum / numPieces;
    int numInc = sum % numPieces;
    int numBases = numPieces - numInc;
    TreeMap<Integer, Integer> ret = new TreeMap<Integer, Integer>();
    ret.put(base, numBases);
    if (numInc != 0) {
        ret.put(base + 1, numInc);
    }
    return ret;
}

From source file:org.opendatakit.database.data.ColumnDefinition.java

private static void getDataModelHelper(TreeMap<String, Object> jsonSchema, ColumnDefinition c,
        boolean nestedInsideUnitOfRetention) {
    ElementType type = c.getType();
    ElementDataType dataType = type.getDataType();

    // this is a user-defined field
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_DATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, c.getElementName());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, c.getElementKey());

    if (nestedInsideUnitOfRetention) {
        jsonSchema.put(JSON_SCHEMA_NOT_UNIT_OF_RETENTION, Boolean.TRUE);
    }/*  www  .  ja  v a2 s.co  m*/

    if (dataType == ElementDataType.array) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        ColumnDefinition ch = c.getChildren().get(0);
        TreeMap<String, Object> itemSchema = new TreeMap<>();
        jsonSchema.put(JSON_SCHEMA_ITEMS, itemSchema);
        itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                (String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH) + '.' + ch.getElementName());
        // if it isn't already nested within a unit of retention,
        // an array is always itself a unit of retention
        getDataModelHelper(itemSchema, ch, true); // recursion...

        ArrayList<String> keys = new ArrayList<String>();
        keys.add(ch.getElementKey());
        jsonSchema.put(JSON_SCHEMA_LIST_CHILD_ELEMENT_KEYS, keys);
    } else if (dataType == ElementDataType.bool) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.configpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
    } else if (dataType == ElementDataType.integer || dataType == ElementDataType.number) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.object) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        TreeMap<String, Object> propertiesSchema = new TreeMap<>();
        jsonSchema.put(JSON_SCHEMA_PROPERTIES, propertiesSchema);
        ArrayList<String> keys = new ArrayList<>();
        for (ColumnDefinition ch : c.getChildren()) {
            TreeMap<String, Object> itemSchema = new TreeMap<>();
            propertiesSchema.put(ch.getElementName(), itemSchema);
            itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                    (String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH) + '.' + ch.getElementName());
            // objects are not units of retention -- propagate retention status.
            getDataModelHelper(itemSchema, ch, nestedInsideUnitOfRetention); // recursion...
            keys.add(ch.getElementKey());
        }
        jsonSchema.put(JSON_SCHEMA_LIST_CHILD_ELEMENT_KEYS, keys);
    } else if (dataType == ElementDataType.rowpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, ElementDataType.rowpath.name());
    } else if (dataType == ElementDataType.string) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else {
        throw new IllegalStateException("unexpected alternative ElementDataType");
    }
}

From source file:gov.usgs.anss.query.MultiplexedMSOutputer.java

/**
 * This does the hard work of sorting - called as a shutdown hook.
 * TODO: consider recursion./*from  w ww .j av a  2  s  .c om*/
 * @param outputName name for the output file.
 * @param files list of MiniSEED files to multiplex.
 * @param cleanup flag indicating whether to cleanup after ourselves or not.
 * @throws IOException
 */
public static void multiplexFiles(String outputName, List<File> files, boolean cleanup, boolean allowEmpty)
        throws IOException {
    ArrayList<File> cleanupFiles = new ArrayList<File>(files);
    ArrayList<File> moreFiles = new ArrayList<File>();

    File outputFile = new File(outputName);
    File tempOutputFile = new File(outputName + ".tmp");

    do {
        // This checks if we're in a subsequent (i.e. not the first) iteration and if there are any more files to process...?
        if (!moreFiles.isEmpty()) {
            logger.info("more files left to multiplex...");
            FileUtils.deleteQuietly(tempOutputFile);
            FileUtils.moveFile(outputFile, tempOutputFile);

            cleanupFiles.add(tempOutputFile);
            moreFiles.add(tempOutputFile);
            files = moreFiles;
            moreFiles = new ArrayList<File>();
        }

        logger.log(Level.FINE, "Multiplexing blocks from {0} temp files to {1}",
                new Object[] { files.size(), outputName });
        BufferedOutputStream out = new BufferedOutputStream(FileUtils.openOutputStream(outputFile));

        // The hard part, sorting the temp files...
        TreeMap<MiniSeed, FileInputStream> blks = new TreeMap<MiniSeed, FileInputStream>(
                new MiniSeedTimeOnlyComparator());
        // Prime the TreeMap
        logger.log(Level.FINEST, "Priming the TreeMap with files: {0}", files);
        for (File file : files) {
            logger.log(Level.INFO, "Reading first block from {0}", file.toString());
            try {
                FileInputStream fs = FileUtils.openInputStream(file);
                MiniSeed ms = getNextValidMiniSeed(fs, allowEmpty);
                if (ms != null) {
                    blks.put(ms, fs);
                } else {
                    logger.log(Level.WARNING, "Failed to read valid MiniSEED block from {0}", file.toString());
                }
            } catch (IOException ex) {
                // Catch "Too many open files" i.e. hitting ulimit, throw anything else.
                if (ex.getMessage().contains("Too many open files")) {
                    logger.log(Level.INFO, "Too many open files - {0} deferred.", file.toString());
                    moreFiles.add(file);
                } else
                    throw ex;
            }
        }

        while (!blks.isEmpty()) {
            MiniSeed next = blks.firstKey();
            out.write(next.getBuf(), 0, next.getBlockSize());

            FileInputStream fs = blks.remove(next);
            next = getNextValidMiniSeed(fs, allowEmpty);
            if (next != null) {
                blks.put(next, fs);
            } else {
                fs.close();
            }
        }

        out.close();
    } while (!moreFiles.isEmpty());

    if (cleanup) {
        logger.log(Level.INFO, "Cleaning up...");
        for (File file : cleanupFiles) {
            FileUtils.deleteQuietly(file);
        }
    }
}