Example usage for java.util LinkedHashMap values

List of usage examples for java.util LinkedHashMap values

Introduction

In this page you can find the example usage for java.util LinkedHashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:au.com.wallaceit.reddinator.SubredditSelectActivity.java

private void showWidgetThemeDialog() {

    // set themes list
    LinkedHashMap<String, String> themeList = global.mThemeManager.getThemeList(ThemeManager.LISTMODE_ALL);
    themeList.put("app_select", "Use App theme");
    final String[] keys = themeList.keySet().toArray(new String[themeList.keySet().size()]);
    String curTheme = mSharedPreferences.getString("widgettheme-" + mAppWidgetId, "app_select");
    int curIndex = 0;
    for (int i = 0; i < keys.length; i++) {
        if (keys[i].equals(curTheme)) {
            curIndex = i;/*from  ww w .  j a  v a 2  s .  c  o m*/
            break;
        }
    }
    AlertDialog.Builder builder = new AlertDialog.Builder(this);
    builder.setTitle("Select Widget Theme")
            .setSingleChoiceItems(themeList.values().toArray(new String[themeList.values().size()]), curIndex,
                    new DialogInterface.OnClickListener() {
                        @Override
                        public void onClick(DialogInterface dialogInterface, int i) {
                            needsThemeUpdate = true;
                            SharedPreferences.Editor editor = mSharedPreferences.edit();
                            editor.putString("widgettheme-" + mAppWidgetId, keys[i]);
                            System.out.println(keys[i]);
                            editor.apply();
                            dialogInterface.cancel();
                        }
                    })
            .setPositiveButton("Close", new DialogInterface.OnClickListener() {
                public void onClick(DialogInterface dialog, int id) {
                    dialog.cancel();
                }
            }).show();
}

From source file:org.polymap.core.data.imex.csv.CsvImporter.java

/**
 * Convert a csv file to a FeatureCollection. <b>This for now supports only
 * point geometries</b>.<br>
 * For different crs it also performs coor transformation.
 * <p>//w ww  . j a v a 2 s  .  co m
 * <b>NOTE: this doesn't support date attributes</b>
 * </p>
 * <p>
 * This code was initially taken from
 * {@link eu.hydrologis.jgrass.libs.utils.features.FeatureUtilities}.
 * 
 * @param crs the crs to use.
 * @param fieldsAndTypes the {@link Map} of filed names and
 *        {@link JGrassConstants#CSVTYPESARRAY types}.
 * @param monitor progress monitor.
 * @return the created {@link FeatureCollection}
 * @throws Exception
 */
@SuppressWarnings("nls")
public FeatureCollection<SimpleFeatureType, SimpleFeature> createFeatureCollection(String name,
        CoordinateReferenceSystem crs, LinkedHashMap<String, Integer> fieldsAndTypesIndex,
        IProgressMonitorJGrass monitor) throws Exception {

    checkReadLines();

    GeometryFactory gf = new GeometryFactory();
    Map<String, Class> typesMap = JGrassConstants.CSVTYPESCLASSESMAP;
    String[] typesArray = JGrassConstants.CSVTYPESARRAY;

    SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
    b.setName(name);
    b.setCRS(crs);
    b.add("the_geom", Point.class);

    int xIndex = -1;
    int yIndex = -1;
    // the bbox of all imported points
    Envelope bbox = null;
    Set<String> fieldNames = fieldsAndTypesIndex.keySet();
    String[] fieldNamesArray = fieldNames.toArray(new String[fieldNames.size()]);
    for (int i = 0; i < fieldNamesArray.length; i++) {
        String fieldName = fieldNamesArray[i];
        Integer typeIndex = fieldsAndTypesIndex.get(fieldName);

        if (typeIndex == 0) {
            xIndex = i;
        } else if (typeIndex == 1) {
            yIndex = i;
        } else {
            Class class1 = typesMap.get(typesArray[typeIndex]);
            b.add(fieldName, class1);
            log.debug("    field: name=" + fieldName + ", type=" + class1 + ", index=" + i);
        }
    }
    SimpleFeatureType featureType = b.buildFeatureType();

    // FeatureCollection
    FeatureCollection<SimpleFeatureType, SimpleFeature> newCollection = new DefaultFeatureCollection(null,
            featureType);

    try {
        Collection<Integer> orderedTypeIndexes = fieldsAndTypesIndex.values();
        Integer[] orderedTypeIndexesArray = orderedTypeIndexes.toArray(new Integer[orderedTypeIndexes.size()]);

        featureErrors.clear();
        int featureId = 0;
        monitor.beginTask("Reading CSV Data", lines.size());
        int count = 0;
        for (String[] line : lines) {
            monitor.worked(1);
            if (monitor.isCanceled()) {
                return newCollection;
            }
            try {
                SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);
                Object[] values = new Object[fieldNames.size() - 1];

                try {
                    double x = line[xIndex].length() > 0 ? nf.parse(line[xIndex]).doubleValue() : -1; //bbox.centre().x;
                    double y = line[yIndex].length() > 0 ? nf.parse(line[yIndex]).doubleValue() : -1; //bbox.centre().y;

                    if (x <= 0 || y <= 0) {
                        log.info("        Missing geom. skipping this object!");
                        continue;
                    }
                    Point point = gf.createPoint(new Coordinate(x, y));
                    values[0] = point;
                    if (bbox != null) {
                        bbox.expandToInclude(point.getCoordinate());
                    } else {
                        bbox = point.getEnvelope().getEnvelopeInternal();
                    }
                } catch (Exception e) {
                    // don't break the entire run
                    log.warn("Error while parsing ccordinates." + " index=" + count + " | xIndex=" + xIndex
                            + ", value=" + line[xIndex] + " | yIndex=" + yIndex + ", value=" + line[yIndex]
                            + " (" + e.toString() + ")");
                }

                int objIndex = 1;
                for (int i = 0; i < orderedTypeIndexesArray.length; i++) {
                    if (i == xIndex || i == yIndex) {
                        continue;
                    }

                    String value = line[i];
                    int typeIndex = orderedTypeIndexesArray[i];
                    String typeName = typesArray[typeIndex];
                    if (typeName.equals(typesArray[3])) {
                        values[objIndex] = value;
                    } else if (typeName.equals(typesArray[4])) {
                        //values[objIndex] = new Double( value );
                        values[objIndex] = nf.parse(value);
                    } else if (typeName.equals(typesArray[5])) {
                        values[objIndex] = new Integer(value);
                    } else {
                        throw new IllegalArgumentException("An undefined value type was found");
                    }
                    objIndex++;
                }
                builder.addAll(values);

                SimpleFeature feature = builder.buildFeature(featureType.getTypeName() + "." + featureId);
                newCollection.add(feature);
                count++;
            } catch (Exception e) {
                featureErrors.add(featureId);
                log.warn("Error while creating FeatureCollection.", e);
            }
            featureId++;
        }
        monitor.done();

    } catch (Exception e) {
        //JGrassLibsPlugin.log( "JGrassLibsPlugin problem", e ); //$NON-NLS-1$
        e.printStackTrace();
        throw e;
    }
    return newCollection;
}

From source file:org.artifactory.storage.db.search.model.VfsQueryDbImpl.java

private VfsQueryResult executeSelect(DbSqlQueryBuilder query, int limit) {
    if (limit != Integer.MAX_VALUE) {
        DbService dbService = StorageContextHelper.get().beanForType(DbService.class);
        DbType databaseType = dbService.getDatabaseType();
        boolean supportsLimitSyntax = databaseType == DbType.MYSQL;
        if (supportsLimitSyntax) {
            query.append(" LIMIT ").append(String.valueOf(limit));
        }//from   w w w  . j  ava 2s . co  m
    }

    ResultSet rs = null;
    try {
        log.debug("Executing search query: {}", query);
        List<Object> params = query.params;
        JdbcHelper jdbcHelper = StorageContextHelper.get().beanForType(JdbcHelper.class);
        rs = jdbcHelper.executeSelect(query.sqlQuery.toString(),
                (Object[]) params.toArray(new Object[params.size()]));
        LinkedHashMap<NodePath, VfsQueryRow> map = Maps.newLinkedHashMap();
        int nbLines = 0;
        while (rs.next()) {
            if (nbLines >= limit) {
                break;
            }
            nbLines++;
            int pos = 1;
            long nodeId = rs.getLong(pos++);
            boolean file = rs.getBoolean(pos++);
            NodePath nodePath = new NodePath(rs.getString(pos++), BaseDao.emptyIfNullOrDot(rs.getString(pos++)),
                    BaseDao.emptyIfNullOrDot(rs.getString(pos++)), file);
            VfsQueryRow row = map.get(nodePath);
            if (row == null) {
                if (hasArchiveEntries()) {
                    row = new VfsQueryRowDbImpl(nodeId, file, nodePath, rs.getString(pos++),
                            rs.getString(pos++));
                } else {
                    row = new VfsQueryRowDbImpl(nodeId, file, nodePath);
                }
                map.put(nodePath, row);
            } else {
                if (hasArchiveEntries()) {
                    ((VfsQueryRowDbImpl) row).addArchiveEntry(rs.getString(pos++), rs.getString(pos++));
                } else {
                    log.warn("Got multiple times the same node " + nodePath + " in query " + query);
                }
            }
            log.debug("Used {} params", pos);
        }
        if (nbLines >= limit) {
            nbLines = -1;
        }
        return new VfsQueryResultDbImpl(map.values(), nbLines);
    } catch (SQLException e) {
        throw new StorageException("Could not execute query '" + query + "' due to:" + e.getMessage(), e);
    } finally {
        DbUtils.close(rs);
    }
}

From source file:com.alibaba.wasp.client.WaspAdmin.java

public String describeIndex(String tableName, String indexName) throws IOException {
    FTable table = getTableDescriptor(Bytes.toBytes(tableName));
    LinkedHashMap<String, Index> indexMap = table.getIndex();
    Index index = indexMap.get(indexName);
    if (index == null) {
        return "";
    }/* w  w  w .ja  v  a2 s.  co  m*/

    StringBuilder builder = new StringBuilder();
    builder.append("+----------------------+----------+-------+\n");
    builder.append("|              INDEX_KEYS                 |\n");
    builder.append("+----------------------+----------+-------+\n");
    builder.append("| Field                | Type     | ORDER |\n");
    builder.append("+----------------------+----------+-------+\n");
    String line = "| {0} | {1} | {2} |";
    LinkedHashMap<String, Field> indexKeys = index.getIndexKeys();
    Map<String, Field> storings = index.getStoring();
    Set<String> desc = index.getDesc();

    for (Field field : indexKeys.values()) {
        String fieldname = field.getName();
        String s0 = fieldname + (fieldname.length() < 20 ? getGivenBlanks(20 - fieldname.length()) : "");
        String type = field.getType().toString();
        String s1 = type + (type.length() < 8 ? getGivenBlanks(8 - type.length()) : "");
        String s2 = desc.contains(fieldname) ? "desc " : "asc  ";
        builder.append(MessageFormat.format(line, s0, s1, s2));
        builder.append("\n");
    }
    builder.append("+----------------------+----------+-------+\n");
    builder.append("|               STORINGS                  |\n");
    builder.append("+----------------------+----------+-------+\n");
    builder.append("| Field                | Type     | ORDER |\n");
    builder.append("+----------------------+----------+-------+\n");
    for (Field field : storings.values()) {
        String fieldname = field.getName();
        String s0 = fieldname + (fieldname.length() < 15 ? getGivenBlanks(15 - fieldname.length()) : "");
        String type = field.getType().toString();
        String s1 = type + (type.length() < 8 ? getGivenBlanks(8 - type.length()) : "");
        String s2 = desc.contains(fieldname) ? "desc " : "asc  ";
        builder.append(MessageFormat.format(line, s0, s1, s2));
        builder.append("\n");
    }
    builder.append("+----------------------+----------+-------+\n");
    return builder.toString();
}

From source file:org.cerberus.servlet.crud.testexecution.ReadTestCaseExecution.java

private List<TestCaseExecution> hashExecution(List<TestCaseExecution> testCaseExecutions,
        List<TestCaseExecutionInQueue> testCaseExecutionsInQueue) throws ParseException {
    LinkedHashMap<String, TestCaseExecution> testCaseExecutionsList = new LinkedHashMap();
    SimpleDateFormat formater = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");

    for (TestCaseExecution testCaseExecution : testCaseExecutions) {
        String key = testCaseExecution.getBrowser() + "_" + testCaseExecution.getCountry() + "_"
                + testCaseExecution.getEnvironment() + "_" + testCaseExecution.getTest() + "_"
                + testCaseExecution.getTestCase();
        testCaseExecutionsList.put(key, testCaseExecution);
    }//from  ww  w.  java 2 s .  com
    for (TestCaseExecutionInQueue testCaseExecutionInQueue : testCaseExecutionsInQueue) {
        TestCaseExecution testCaseExecution = testCaseExecutionInQueueService
                .convertToTestCaseExecution(testCaseExecutionInQueue);
        String key = testCaseExecution.getBrowser() + "_" + testCaseExecution.getCountry() + "_"
                + testCaseExecution.getEnvironment() + "_" + testCaseExecution.getTest() + "_"
                + testCaseExecution.getTestCase();
        if ((testCaseExecutionsList.containsKey(key) && testCaseExecutionsList.get(key)
                .getStart() < testCaseExecutionInQueue.getRequestDate().getTime())
                || !testCaseExecutionsList.containsKey(key)) {
            testCaseExecutionsList.put(key, testCaseExecution);
        }
    }
    List<TestCaseExecution> result = new ArrayList<TestCaseExecution>(testCaseExecutionsList.values());

    return result;
}

From source file:org.bimserver.charting.Algorithms.Binning.java

/**
 * Get bins (a bin is variant of an ArrayList<Vector2d> object) containing data points. This is done to query length.
 *
 * @param filteredData//  ww  w.j  a v  a2 s.  c om
 * @return
 */
public BinCollection getBinsById(ModelDimension x, ModelDimension y, ChartRows filteredData) {
    LinkedHashMap<String, Bin> binsById = new LinkedHashMap<String, Bin>();
    boolean xHandlerIsValid = XHandler != null;
    boolean yHandlerIsValid = YHandler != null;
    if (x != null && y != null) {
        for (ChartRow row : filteredData) {
            // Create place holders for values.
            Vector2d point = new Vector2d();
            Vector2d worldSpacePoint = new Vector2d();
            // Get filtered raw values.
            ArrayList<Object> xValues = row.get(x);
            ArrayList<Object> yValues = row.get(y);
            // Coerce the values. Also, modulate them based on the supplied handlers.
            if (xValues.size() > 0) {
                Object testValue = xValues.get(0);
                if (testValue instanceof Number) {
                    Double actualValue = ((Number) testValue).doubleValue();
                    point.x(actualValue);
                    if (xHandlerIsValid)
                        worldSpacePoint.x(XHandler.modulate(actualValue));
                    else
                        worldSpacePoint.x(actualValue);
                }
            }
            if (yValues.size() > 0) {
                Object testValue = yValues.get(0);
                if (testValue instanceof Number) {
                    Double actualValue = ((Number) testValue).doubleValue();
                    point.y(actualValue);
                    if (yHandlerIsValid)
                        worldSpacePoint.y(YHandler.modulate(actualValue));
                    else
                        worldSpacePoint.y(actualValue);
                }
            }
            handlePointAndIndex(point, worldSpacePoint, binsById);
        }
    }
    // Send it back. Implicitly caches lower and upper lengths.
    return new BinCollection(binsById.values());
}

From source file:com.alibaba.wasp.plan.parser.druid.DruidDDLParser.java

/**
 * Process Alter Table Statement and generate Execute Plan
 * /*from w ww  .ja va2s . c  o m*/
 */
private void getAlterTablePlan(ParseContext context, MySqlAlterTableStatement sqlAlterTableStatement,
        MetaEventOperation metaEventOperation) throws IOException {
    SQLExprTableSource tableSource = sqlAlterTableStatement.getTableSource();
    String tableName = parseFromClause(tableSource);
    // check if table exists and get Table info
    FTable oldTable = metaEventOperation.checkAndGetTable(tableName, true);

    FTable newTable = FTable.clone(oldTable);

    List<SQLAlterTableItem> items = sqlAlterTableStatement.getItems();
    for (SQLAlterTableItem item : items) {
        if (item instanceof WaspSqlAlterTableChangeColumn) {
            // Alter Table Change Column
            WaspSqlAlterTableChangeColumn changeColumn = (WaspSqlAlterTableChangeColumn) item;

            SQLName columnName = changeColumn.getColumnName();
            LinkedHashMap<String, Field> ftableColumns = newTable.getColumns();
            String oldColumnName = parseName(columnName);
            // Table have this column and this column is not primary key
            metaEventOperation.checkFieldExists(oldTable, oldColumnName);
            metaEventOperation.checkFieldNotInPrimaryKeys(oldTable, oldColumnName);
            // Check column not in a index
            metaEventOperation.checkColumnNotInIndex(oldTable, oldColumnName);

            // Which column(index) to change
            Field field = ftableColumns.get(oldColumnName); // Change this Field
            SQLColumnDefinition newColumnDefinition = changeColumn.getNewColumnDefinition();
            // ColumnFamily specify do not supported.
            if (newColumnDefinition instanceof WaspSqlColumnDefinition) {
                WaspSqlColumnDefinition waspSqlColumnDefinition = (WaspSqlColumnDefinition) newColumnDefinition;
                if (waspSqlColumnDefinition.getColumnFamily() != null) {
                    throw new UnsupportedException("Alter Table, columnFamily specify do not supported.");
                }
            }
            if (newColumnDefinition.getDataType() != null) {
                field.setType(parse(newColumnDefinition.getDataType()));
            }
            String newColumnName = parseName(newColumnDefinition.getName());
            if (!oldColumnName.equals(newColumnName)) { // Change column name
                for (Field f : ftableColumns.values()) {
                    if (f.getName().equalsIgnoreCase(newColumnName)) {
                        throw new UnsupportedException(
                                "Unsupported. Rename one column to a column that already column "
                                        + newColumnName);
                    }
                }
                field.setName(newColumnName);
            }
        } else if (item instanceof MySqlAlterTableAddColumn) {
            // Alter Table Add Column
            MySqlAlterTableAddColumn addColumn = (MySqlAlterTableAddColumn) item;
            List<SQLColumnDefinition> columns = addColumn.getColumns();
            boolean first = addColumn.isFirst();
            SQLName afterColumn = addColumn.getAfterColumn();
            LinkedHashMap<String, Field> ftableColumns = newTable.getColumns();

            List<Field> addFields = convertColumnDefForAlterTable(columns);
            // check Duplicate column name
            metaEventOperation.areLegalTableColumns(ftableColumns.values(), addFields);
            // Do not support add ColumnFamily dynamic right now.
            metaEventOperation.checkColumnFamilyName(ftableColumns.values(), addFields);
            if (first) {
                this.addFieldByPosition(-1, addFields, ftableColumns, newTable);
            } else if (afterColumn != null) {
                int index = getIndex(parseName(afterColumn), ftableColumns);
                this.addFieldByPosition(index, addFields, ftableColumns, newTable);
            } else {
                int index = ftableColumns.size() - 1;
                this.addFieldByPosition(index, addFields, ftableColumns, newTable);
            }
        } else if (item instanceof SQLAlterTableDropColumnItem) {
            // Alter Table Drop Column
            SQLAlterTableDropColumnItem dropColumn = (SQLAlterTableDropColumnItem) item;
            SQLName columnName = dropColumn.getColumnName();
            String cname = parseName(columnName);
            // This column is not primary key
            metaEventOperation.checkFieldNotInPrimaryKeys(oldTable, cname);
            // Check column not in a index, if you want to drop the column you
            // should drop the index first
            metaEventOperation.checkColumnNotInIndex(oldTable, cname);

            LinkedHashMap<String, Field> ftableColumns = newTable.getColumns();
            Field field = ftableColumns.remove(cname);
            if (field == null) {
                throw new UnsupportedException("Unsupported Do not find this column "
                        + SQLUtils.toSQLString(((SQLAlterTableDropColumnItem) item).getColumnName()));
            }
            newTable.setColumns(ftableColumns);
        } else {
            throw new UnsupportedException(SQLUtils.toSQLString(item) + " SQLAlterTableItem Unsupported");
        }
    }

    AlterTablePlan alterTable = new AlterTablePlan(oldTable, newTable);
    context.setPlan(alterTable);
    LOG.debug("AlterTablePlan " + alterTable.toString());
}

From source file:org.jahia.ajax.gwt.helper.PublicationHelper.java

public List<GWTJahiaPublicationInfo> getFullPublicationInfos(List<String> uuids, Set<String> languages,
        JCRSessionWrapper currentUserSession, boolean allSubTree, boolean checkForUnpublication)
        throws GWTJahiaServiceException {
    try {// w ww . j a  v a 2  s. c  o  m
        if (!checkForUnpublication) {
            LinkedHashMap<String, GWTJahiaPublicationInfo> res = new LinkedHashMap<String, GWTJahiaPublicationInfo>();
            for (String language : languages) {
                List<PublicationInfo> infos = publicationService.getPublicationInfos(uuids,
                        Collections.singleton(language), true, true, allSubTree,
                        currentUserSession.getWorkspace().getName(), Constants.LIVE_WORKSPACE);
                for (PublicationInfo info : infos) {
                    info.clearInternalAndPublishedReferences(uuids);
                }
                final List<GWTJahiaPublicationInfo> infoList = convert(infos, currentUserSession, language,
                        "publish");
                String lastGroup = null;
                String lastTitle = null;
                Locale l = new Locale(language);
                for (GWTJahiaPublicationInfo info : infoList) {
                    if (((info.isPublishable()
                            || info.getStatus() == GWTJahiaPublicationInfo.MANDATORY_LANGUAGE_UNPUBLISHABLE)
                            && (info.getWorkflowDefinition() != null
                                    || info.isAllowedToPublishWithoutWorkflow()))) {
                        res.put(language + "/" + info.getUuid(), info);
                        if (lastGroup == null || !info.getWorkflowGroup().equals(lastGroup)) {
                            lastGroup = info.getWorkflowGroup();
                            lastTitle = info.getTitle() + " ( " + l.getDisplayName(l) + " )";
                        }
                        info.setWorkflowTitle(lastTitle);
                    }
                }
            }
            return new ArrayList<GWTJahiaPublicationInfo>(res.values());
        } else {
            List<PublicationInfo> infos = publicationService.getPublicationInfos(uuids, null, false, true,
                    allSubTree, currentUserSession.getWorkspace().getName(), Constants.LIVE_WORKSPACE);
            LinkedHashMap<String, GWTJahiaPublicationInfo> res = new LinkedHashMap<String, GWTJahiaPublicationInfo>();
            for (String language : languages) {
                final List<GWTJahiaPublicationInfo> infoList = convert(infos, currentUserSession, language,
                        "unpublish");
                String lastGroup = null;
                String lastTitle = null;
                Locale l = new Locale(language);
                for (GWTJahiaPublicationInfo info : infoList) {
                    if ((info.getStatus() == GWTJahiaPublicationInfo.PUBLISHED
                            && (info.getWorkflowDefinition() != null
                                    || info.isAllowedToPublishWithoutWorkflow()))) {
                        res.put(language + "/" + info.getUuid(), info);
                        if (lastGroup == null || !info.getWorkflowGroup().equals(lastGroup)) {
                            lastGroup = info.getWorkflowGroup();
                            lastTitle = info.getTitle() + " ( " + l.getDisplayName(l) + " )";
                        }
                        info.setWorkflowTitle(lastTitle);
                    }
                }
            }
            for (PublicationInfo info : infos) {
                Set<String> publishedLanguages = info.getAllPublishedLanguages();
                if (!languages.containsAll(publishedLanguages)) {
                    keepOnlyTranslation(res);
                }
            }
            return new ArrayList<GWTJahiaPublicationInfo>(res.values());
        }
    } catch (RepositoryException e) {
        logger.error("repository exception", e);
        throw new GWTJahiaServiceException(
                "Cannot get publication status for nodes " + uuids + ". Cause: " + e.getLocalizedMessage(), e);
    }
}

From source file:com.sonicle.webtop.core.Service.java

public void processLookupLanguages(HttpServletRequest request, HttpServletResponse response, PrintWriter out) {
    LinkedHashMap<String, JsSimple> items = new LinkedHashMap<>();
    Locale locale = getEnv().getSession().getLocale();

    try {/*from w  w w .  j  a va  2s.c om*/
        for (AppLocale apploc : WT.getInstalledLocales()) {
            final Locale loc = apploc.getLocale();
            final String lang = loc.getLanguage();
            if (!items.containsKey(lang)) {
                //items.put(lang, new JsSimple(lang, loc.getDisplayLanguage(locale)));
                items.put(lang, new JsSimple(apploc.getId(), apploc.getLocale().getDisplayName(locale)));
            }
        }
        new JsonResult("languages", items.values(), items.size()).printTo(out);

    } catch (Exception ex) {
        logger.error("Error in LookupLanguages", ex);
        new JsonResult(false, "Unable to lookup languages").printTo(out);
    }
}

From source file:juicebox.tools.utils.original.Preprocessor.java

/**
 * Note -- compressed/*  w w  w.j  a  va 2  s.co  m*/
 *
 * @param zd          Matrix zoom data
 * @param block       Block to write
 * @param sampledData Array to hold a sample of the data (to compute statistics)
 * @throws IOException
 */
private void writeBlock(MatrixZoomDataPP zd, BlockPP block, DownsampledDoubleArrayList sampledData)
        throws IOException {

    final Map<Point, ContactCount> records = block.getContactRecordMap();//   getContactRecords();

    // System.out.println("Write contact records : records count = " + records.size());

    // Count records first
    int nRecords;
    if (countThreshold > 0) {
        nRecords = 0;
        for (ContactCount rec : records.values()) {
            if (rec.getCounts() >= countThreshold) {
                nRecords++;
            }
        }
    } else {
        nRecords = records.size();
    }
    BufferedByteWriter buffer = new BufferedByteWriter(nRecords * 12);
    buffer.putInt(nRecords);
    zd.cellCount += nRecords;

    // Find extents of occupied cells
    int binXOffset = Integer.MAX_VALUE;
    int binYOffset = Integer.MAX_VALUE;
    int binXMax = 0;
    int binYMax = 0;
    for (Map.Entry<Point, ContactCount> entry : records.entrySet()) {
        Point point = entry.getKey();
        binXOffset = Math.min(binXOffset, point.x);
        binYOffset = Math.min(binYOffset, point.y);
        binXMax = Math.max(binXMax, point.x);
        binYMax = Math.max(binYMax, point.y);
    }

    buffer.putInt(binXOffset);
    buffer.putInt(binYOffset);

    // Sort keys in row-major order
    List<Point> keys = new ArrayList<Point>(records.keySet());
    Collections.sort(keys, new Comparator<Point>() {
        @Override
        public int compare(Point o1, Point o2) {
            if (o1.y != o2.y) {
                return o1.y - o2.y;
            } else {
                return o1.x - o2.x;
            }
        }
    });
    Point lastPoint = keys.get(keys.size() - 1);
    final short w = (short) (binXMax - binXOffset + 1);

    boolean isInteger = true;
    float maxCounts = 0;

    LinkedHashMap<Integer, List<ContactRecord>> rows = new LinkedHashMap<Integer, List<ContactRecord>>();
    for (Point point : keys) {
        final ContactCount contactCount = records.get(point);
        float counts = contactCount.getCounts();
        if (counts >= countThreshold) {

            isInteger = isInteger && (Math.floor(counts) == counts);
            maxCounts = Math.max(counts, maxCounts);

            final int px = point.x - binXOffset;
            final int py = point.y - binYOffset;
            List<ContactRecord> row = rows.get(py);
            if (row == null) {
                row = new ArrayList<ContactRecord>(10);
                rows.put(py, row);
            }
            row.add(new ContactRecord(px, py, counts));
        }
    }

    // Compute size for each representation and choose smallest
    boolean useShort = isInteger && (maxCounts < Short.MAX_VALUE);
    int valueSize = useShort ? 2 : 4;

    int lorSize = 0;
    int nDensePts = (lastPoint.y - binYOffset) * w + (lastPoint.x - binXOffset) + 1;

    int denseSize = nDensePts * valueSize;
    for (List<ContactRecord> row : rows.values()) {
        lorSize += 4 + row.size() * valueSize;
    }

    buffer.put((byte) (useShort ? 0 : 1));

    if (lorSize < denseSize) {

        buffer.put((byte) 1); // List of rows representation

        buffer.putShort((short) rows.size()); // # of rows

        for (Map.Entry<Integer, List<ContactRecord>> entry : rows.entrySet()) {

            int py = entry.getKey();
            List<ContactRecord> row = entry.getValue();
            buffer.putShort((short) py); // Row number
            buffer.putShort((short) row.size()); // size of row

            for (ContactRecord contactRecord : row) {
                buffer.putShort((short) (contactRecord.getBinX()));
                final float counts = contactRecord.getCounts();

                if (useShort) {
                    buffer.putShort((short) counts);
                } else {
                    buffer.putFloat(counts);
                }

                sampledData.add(counts);
                zd.sum += counts;
            }
        }

    } else {
        buffer.put((byte) 2); // Dense matrix

        buffer.putInt(nDensePts);
        buffer.putShort(w);

        int lastIdx = 0;
        for (Point p : keys) {

            int idx = (p.y - binYOffset) * w + (p.x - binXOffset);
            for (int i = lastIdx; i < idx; i++) {
                // Filler value
                if (useShort) {
                    buffer.putShort(Short.MIN_VALUE);
                } else {
                    buffer.putFloat(Float.NaN);
                }
            }
            float counts = records.get(p).getCounts();
            if (useShort) {
                buffer.putShort((short) counts);
            } else {
                buffer.putFloat(counts);
            }
            lastIdx = idx + 1;

            sampledData.add(counts);
            zd.sum += counts;
        }
    }

    byte[] bytes = buffer.getBytes();
    byte[] compressedBytes = compress(bytes);
    los.write(compressedBytes);

}