Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.opendatakit.database.data.ColumnDefinition.java

private static void getDataModelHelper(TreeMap<String, Object> jsonSchema, ColumnDefinition c,
        boolean nestedInsideUnitOfRetention) {
    ElementType type = c.getType();
    ElementDataType dataType = type.getDataType();

    // this is a user-defined field
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_DATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, c.getElementName());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, c.getElementKey());

    if (nestedInsideUnitOfRetention) {
        jsonSchema.put(JSON_SCHEMA_NOT_UNIT_OF_RETENTION, Boolean.TRUE);
    }//from   ww w  .  ja  va 2 s . co m

    if (dataType == ElementDataType.array) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        ColumnDefinition ch = c.getChildren().get(0);
        TreeMap<String, Object> itemSchema = new TreeMap<>();
        jsonSchema.put(JSON_SCHEMA_ITEMS, itemSchema);
        itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                (String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH) + '.' + ch.getElementName());
        // if it isn't already nested within a unit of retention,
        // an array is always itself a unit of retention
        getDataModelHelper(itemSchema, ch, true); // recursion...

        ArrayList<String> keys = new ArrayList<String>();
        keys.add(ch.getElementKey());
        jsonSchema.put(JSON_SCHEMA_LIST_CHILD_ELEMENT_KEYS, keys);
    } else if (dataType == ElementDataType.bool) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.configpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
    } else if (dataType == ElementDataType.integer || dataType == ElementDataType.number) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.object) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        TreeMap<String, Object> propertiesSchema = new TreeMap<>();
        jsonSchema.put(JSON_SCHEMA_PROPERTIES, propertiesSchema);
        ArrayList<String> keys = new ArrayList<>();
        for (ColumnDefinition ch : c.getChildren()) {
            TreeMap<String, Object> itemSchema = new TreeMap<>();
            propertiesSchema.put(ch.getElementName(), itemSchema);
            itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                    (String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH) + '.' + ch.getElementName());
            // objects are not units of retention -- propagate retention status.
            getDataModelHelper(itemSchema, ch, nestedInsideUnitOfRetention); // recursion...
            keys.add(ch.getElementKey());
        }
        jsonSchema.put(JSON_SCHEMA_LIST_CHILD_ELEMENT_KEYS, keys);
    } else if (dataType == ElementDataType.rowpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, ElementDataType.rowpath.name());
    } else if (dataType == ElementDataType.string) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else {
        throw new IllegalStateException("unexpected alternative ElementDataType");
    }
}

From source file:com.sun.faces.generate.RenderKitSpecificationGenerator.java

/**
 *
 * @return a SortedMap, where the keys are component-family String
 * entries, and the values are {@link RendererBean} instances
 *///  w  ww .  java  2s  . co m

public static SortedMap getComponentFamilyRendererMap(String rkId) throws IllegalStateException {
    RenderKitBean renderKit = null;
    RendererBean[] renderers = null;
    RendererBean renderer = null;
    TreeMap result = null;
    ArrayList list = null;
    String componentFamily = null;

    if (null == (renderKit = fcb.getRenderKit(rkId))) {
        RenderKitBean[] kits = null;
        if (null == (kits = fcb.getRenderKits())) {
            throw new IllegalStateException("no RenderKits");
        }
        if (null == (renderKit = kits[0])) {
            throw new IllegalStateException("no RenderKits");
        }
    }

    if (null == (renderers = renderKit.getRenderers())) {
        throw new IllegalStateException("no Renderers");
    }

    result = new TreeMap();

    for (int i = 0, len = renderers.length; i < len; i++) {
        if (null == (renderer = renderers[i])) {
            throw new IllegalStateException("no Renderer");
        }
        // if this is the first time we've encountered this
        // componentFamily
        if (null == (list = (ArrayList) result.get(componentFamily = renderer.getComponentFamily()))) {
            // create a list for it
            list = new ArrayList();
            list.add(renderer);
            result.put(componentFamily, list);
        } else {
            list.add(renderer);
        }
    }
    return result;
}

From source file:org.apache.hadoop.raid.TestRaidHistogram.java

public void testRepeatSendingRecoveryTime() throws Exception {
    int rounds = 4;
    int nPercents = 2;
    int range = 1000000;
    int dividedRange = range / 1000;
    float step = 1.0f / nPercents;
    long gapTime = 3000L;
    ArrayList<Long> windows = new ArrayList<Long>();
    windows.add(gapTime);/*from   w  w w. j a v a  2 s  .co m*/
    windows.add(3600000L);
    int sendRound = 2;
    try {
        mySetup();
        Configuration localConf = new Configuration(conf);
        localConf.set(BlockIntegrityMonitor.MONITOR_SECONDS_KEY, gapTime / 1000 + ",3600");
        cnode = RaidNode.createRaidNode(null, localConf);
        ArrayList<Float> percents = new ArrayList<Float>();

        for (int i = 0; i <= 2; i++) {
            percents.add(step * i);
        }
        Collections.shuffle(percents);
        for (int r = 0; r < rounds; r++) {
            // submit some data
            long sTime = System.currentTimeMillis();
            sendRecoveryTimes(2, 0, range, sendRound);
            LOG.info("Get blockFixStatus");
            String monitorDir = monitorDirs[0];
            TreeMap<Long, BlockFixStatus> status = cnode.blockIntegrityMonitor.getBlockFixStatus(monitorDir,
                    nPercents, percents, sTime + gapTime - 1000);
            printBlockFixStatus(status);
            assertTrue(status.containsKey(windows.get(0)));
            assertTrue(status.containsKey(windows.get(1)));
            BlockFixStatus bfs = status.get(windows.get(0));
            // Verify failed recovered files for the first window
            assertEquals("The number of failed recovery files should match", sendRound * nPercents,
                    bfs.failedPaths);
            // Verify percent values for the first window
            assertEquals(nPercents + 1, bfs.percentValues.length);
            assertEquals(0, bfs.percentValues[0]);
            for (int j = 1; j <= nPercents; j++) {
                assertEquals(dividedRange * j - 1, bfs.percentValues[j]);
            }
            bfs = status.get(windows.get(1));
            // Verify failed recovered files for the second window
            assertEquals("The number of failed recovery files should match", sendRound * nPercents,
                    bfs.failedPaths);
            // Verify percent values for the second window
            assertEquals(nPercents + 1, bfs.percentValues.length);
            assertEquals(0, bfs.percentValues[0]);
            for (int j = 1; j <= nPercents; j++) {
                assertEquals(dividedRange * j - 1, bfs.percentValues[j]);
            }
            Thread.sleep(gapTime + 1000);
            status = cnode.blockIntegrityMonitor.getBlockFixStatus(monitorDir, nPercents, percents,
                    System.currentTimeMillis());
            printBlockFixStatus(status);
            assertTrue(status.containsKey(windows.get(0)));
            assertTrue(status.containsKey(windows.get(1)));
            bfs = status.get(windows.get(0));
            // Verify failed recovered files for the first window
            assertEquals("The number of failed recovery files should be 0", 0, bfs.failedPaths);
            // Verify percent values for the first window, they should all be -1
            assertEquals(nPercents + 1, bfs.percentValues.length);
            assertEquals(-1, bfs.percentValues[0]);
            for (int j = 1; j <= nPercents; j++) {
                assertEquals(-1, bfs.percentValues[j]);
            }
        }
    } finally {
        myTearDown();
    }
}

From source file:edu.indiana.soic.ts.mapreduce.VectorCalculator.java

public void submitJob() {
    try {//  w ww.  j av  a 2 s.  c o  m
        Configuration config = HBaseConfiguration.create();
        config.set("mapreduce.output.textoutputformat.separator", ",");
        TreeMap<String, List<Date>> genDates = TableUtils.genDates(TableUtils.getDate(startDate),
                TableUtils.getDate(endDate), this.window, TimeUnit.DAYS, this.headShift, this.tailShift,
                TimeUnit.DAYS);
        LOG.info("Start Date : {} End Date : {}, Gen dates size: {}", startDate, endDate, genDates.size());
        for (String id : genDates.keySet()) {
            LOG.info("Vector calculation for: {}", id);
            Scan scan = new Scan();
            scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
            scan.setCacheBlocks(false); // don't set to true for MR jobs
            List<Date> dates = genDates.get(id);
            String start = TableUtils.convertDateToString(dates.get(0));
            String end = TableUtils.convertDateToString(dates.get(1));
            List<String> suitableDateList = TableUtils.getDates(start, end);
            config.set(Constants.Job.NO_OF_DAYS, String.valueOf(suitableDateList.size()));
            LOG.info("Vector calculator for start: {}, end: {} time window: {}, shift: {}, days: {}", startDate,
                    endDate, window, headShift, suitableDateList.size());
            for (String date : suitableDateList) {
                scan.addColumn(Constants.STOCK_TABLE_CF_BYTES, date.getBytes());
            }
            Job job = new Job(config, "Vector calculation: " + id);
            job.setJarByClass(VectorCalculator.class);
            TableMapReduceUtil.initTableMapperJob(Constants.STOCK_TABLE_NAME, // input HBase table name
                    scan, // Scan instance to control CF and attribute selection
                    VectorCalculatorMapper.class, // mapper
                    IntWritable.class, // mapper output key
                    Text.class, // mapper output value
                    job);
            // adjust directories as required
            String outPutDir = tsConfiguration.getInterMediateVectorDir() + "/" + id;
            FileOutputFormat.setOutputPath(job, new Path(outPutDir));
            boolean b = job.waitForCompletion(true);
            if (!b) {
                LOG.error("Error with job for vector calculation");
                throw new RuntimeException("Error with job for vector calculation");
            }
            Utils.concatOutput(config, id, outPutDir, tsConfiguration.getVectorDir());
        }
    } catch (ParseException e) {
        LOG.error("Error while parsing date", e);
        throw new RuntimeException("Error while parsing date", e);
    } catch (InterruptedException | ClassNotFoundException | IOException e) {
        LOG.error("Error while creating the job", e);
        throw new RuntimeException("Error while creating the job", e);
    }
}

From source file:com.redhat.rhn.taskomatic.task.DailySummary.java

private StringBuffer renderActionTree(int longestActionLength, int longestStatusLength,
        LinkedHashSet<String> statusSet, TreeMap<String, Map<String, Integer>> actionTree) {
    StringBuffer formattedActions = new StringBuffer();
    for (String actionName : actionTree.keySet()) {
        formattedActions/*from  ww  w  . j  a v a 2s. co m*/
                .append(actionName + StringUtils.repeat(" ", (longestActionLength - (actionName.length()))));
        for (String status : statusSet) {
            Map<String, Integer> counts = actionTree.get(actionName);
            Integer theCount = counts.get(status);
            if (counts.containsKey(status)) {
                theCount = counts.get(status);
            } else {
                theCount = 0;
            }
            formattedActions.append(theCount);
            formattedActions.append(StringUtils.repeat(" ",
                    longestStatusLength + ERRATA_SPACER - theCount.toString().length()));
        }
        formattedActions.append("\n");
    }
    return formattedActions;
}

From source file:org.wso2.carbon.apimgt.gateway.handlers.security.oauth.OAuthAuthenticator.java

public boolean authenticate(MessageContext synCtx) throws APISecurityException {
    String apiKey = null;// w ww  .  j a va 2 s.  com
    boolean defaultVersionInvoked = false;
    Map headers = (Map) ((Axis2MessageContext) synCtx).getAxis2MessageContext()
            .getProperty(org.apache.axis2.context.MessageContext.TRANSPORT_HEADERS);

    if (headers != null) {
        requestOrigin = (String) headers.get("Origin");
        apiKey = extractCustomerKeyFromAuthHeader(headers);
        if (log.isDebugEnabled()) {
            log.debug(
                    apiKey != null ? "Received Token ".concat(apiKey) : "No valid Authorization header found");
        }
        //Check if client invoked the default version API (accessing API without version).
        defaultVersionInvoked = headers.containsKey(defaultAPIHeader);
    }

    if (log.isDebugEnabled()) {
        log.debug("Default Version API invoked");
    }

    if (removeOAuthHeadersFromOutMessage) {
        headers.remove(securityHeader);
        if (log.isDebugEnabled()) {
            log.debug("Removing Authorization header from headers");
        }
    }
    if (removeDefaultAPIHeaderFromOutMessage) {
        headers.remove(defaultAPIHeader);
    }

    String apiContext = (String) synCtx.getProperty(RESTConstants.REST_API_CONTEXT);
    String apiVersion = (String) synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION);
    String httpMethod = (String) ((Axis2MessageContext) synCtx).getAxis2MessageContext()
            .getProperty(Constants.Configuration.HTTP_METHOD);

    String clientDomain = getClientDomain(synCtx);
    if (log.isDebugEnabled() && null != clientDomain) {
        log.debug("Received Client Domain ".concat(clientDomain));
    }
    //If the matching resource does not require authentication
    Timer timer = MetricManager.timer(org.wso2.carbon.metrics.manager.Level.INFO, MetricManager
            .name(APIConstants.METRICS_PREFIX, this.getClass().getSimpleName(), "GET_RESOURCE_AUTH"));
    Timer.Context context = timer.start();

    String authenticationScheme = keyValidator.getResourceAuthenticationScheme(synCtx);
    context.stop();
    APIKeyValidationInfoDTO info;
    if (APIConstants.AUTH_NO_AUTHENTICATION.equals(authenticationScheme)) {

        if (log.isDebugEnabled()) {
            log.debug("Found Authentication Scheme: ".concat(authenticationScheme));
        }

        //using existing constant in Message context removing the additinal constant in API Constants
        String clientIP = null;
        org.apache.axis2.context.MessageContext axis2MessageContext = ((Axis2MessageContext) synCtx)
                .getAxis2MessageContext();
        TreeMap<String, String> transportHeaderMap = (TreeMap<String, String>) axis2MessageContext
                .getProperty(org.apache.axis2.context.MessageContext.TRANSPORT_HEADERS);

        if (transportHeaderMap != null) {
            clientIP = transportHeaderMap.get(APIMgtGatewayConstants.X_FORWARDED_FOR);
        }

        //Setting IP of the client
        if (clientIP != null && !clientIP.isEmpty()) {
            if (clientIP.indexOf(",") > 0) {
                clientIP = clientIP.substring(0, clientIP.indexOf(","));
            }
        } else {
            clientIP = (String) axis2MessageContext
                    .getProperty(org.apache.axis2.context.MessageContext.REMOTE_ADDR);
        }

        //Create a dummy AuthenticationContext object with hard coded values for
        // Tier and KeyType. This is because we cannot determine the Tier nor Key
        // Type without subscription information..
        AuthenticationContext authContext = new AuthenticationContext();
        authContext.setAuthenticated(true);
        authContext.setTier(APIConstants.UNAUTHENTICATED_TIER);
        authContext.setStopOnQuotaReach(true);//Since we don't have details on unauthenticated tier we setting stop on quota reach true
        //Requests are throttled by the ApiKey that is set here. In an unauthenticated scenario,
        //we will use the client's IP address for throttling.
        authContext.setApiKey(clientIP);
        authContext.setKeyType(APIConstants.API_KEY_TYPE_PRODUCTION);
        //This name is hardcoded as anonymous because there is no associated user token
        authContext.setUsername(APIConstants.END_USER_ANONYMOUS);
        authContext.setCallerToken(null);
        authContext.setApplicationName(null);
        authContext.setApplicationId(clientIP); //Set clientIp as application ID in unauthenticated scenario
        authContext.setConsumerKey(null);
        APISecurityUtils.setAuthenticationContext(synCtx, authContext, securityContextHeader);
        return true;
    } else if (APIConstants.NO_MATCHING_AUTH_SCHEME.equals(authenticationScheme)) {
        info = new APIKeyValidationInfoDTO();
        info.setAuthorized(false);
        info.setValidationStatus(900906);
    } else if (apiKey == null || apiContext == null || apiVersion == null) {
        if (log.isDebugEnabled()) {
            if (apiKey == null) {
                log.debug("OAuth headers not found");
            } else if (apiContext == null) {
                log.debug("Couldn't find API Context");
            } else if (apiVersion == null) {
                log.debug("Could not find api version");
            }
        }
        throw new APISecurityException(APISecurityConstants.API_AUTH_MISSING_CREDENTIALS,
                "Required OAuth credentials not provided");
    } else {
        String matchingResource = (String) synCtx.getProperty(APIConstants.API_ELECTED_RESOURCE);
        if (log.isDebugEnabled()) {
            log.debug("Matching resource is: ".concat(matchingResource));
        }
        org.apache.axis2.context.MessageContext axis2MessageCtx = ((Axis2MessageContext) synCtx)
                .getAxis2MessageContext();
        org.apache.axis2.context.MessageContext.setCurrentMessageContext(axis2MessageCtx);

        timer = MetricManager.timer(org.wso2.carbon.metrics.manager.Level.INFO, MetricManager
                .name(APIConstants.METRICS_PREFIX, this.getClass().getSimpleName(), "GET_KEY_VALIDATION_INFO"));
        context = timer.start();

        info = keyValidator.getKeyValidationInfo(apiContext, apiKey, apiVersion, authenticationScheme,
                clientDomain, matchingResource, httpMethod, defaultVersionInvoked);
        context.stop();

        synCtx.setProperty(APIMgtGatewayConstants.APPLICATION_NAME, info.getApplicationName());
        synCtx.setProperty(APIMgtGatewayConstants.END_USER_NAME, info.getEndUserName());
        synCtx.setProperty(APIMgtGatewayConstants.SCOPES,
                info.getScopes() == null ? null : info.getScopes().toString());
    }

    if (info.isAuthorized()) {
        AuthenticationContext authContext = new AuthenticationContext();
        authContext.setAuthenticated(true);
        authContext.setTier(info.getTier());
        authContext.setApiKey(apiKey);
        authContext.setKeyType(info.getType());
        if (info.getEndUserName() != null) {
            authContext.setUsername(info.getEndUserName());
        } else {
            authContext.setUsername(APIConstants.END_USER_ANONYMOUS);
        }
        authContext.setCallerToken(info.getEndUserToken());
        authContext.setApplicationId(info.getApplicationId());
        authContext.setApplicationName(info.getApplicationName());
        authContext.setApplicationTier(info.getApplicationTier());
        authContext.setSubscriber(info.getSubscriber());
        authContext.setConsumerKey(info.getConsumerKey());
        authContext.setApiTier(info.getApiTier());
        authContext.setThrottlingDataList(info.getThrottlingDataList());
        authContext.setSubscriberTenantDomain(info.getSubscriberTenantDomain());
        authContext.setSpikeArrestLimit(info.getSpikeArrestLimit());
        authContext.setSpikeArrestUnit(info.getSpikeArrestUnit());
        authContext.setStopOnQuotaReach(info.isStopOnQuotaReach());
        authContext.setIsContentAware(info.isContentAware());
        APISecurityUtils.setAuthenticationContext(synCtx, authContext, securityContextHeader);

        /* Synapse properties required for BAM Mediator*/
        //String tenantDomain = MultitenantUtils.getTenantDomain(info.getApiPublisher());
        synCtx.setProperty("api.ut.apiPublisher", info.getApiPublisher());
        synCtx.setProperty("API_NAME", info.getApiName());

        if (log.isDebugEnabled()) {
            log.debug("User is authorized to access the Resource");
        }
        return true;
    } else {
        if (log.isDebugEnabled()) {
            log.debug("User is NOT authorized to access the Resource");
        }
        throw new APISecurityException(info.getValidationStatus(),
                "Access failure for API: " + apiContext + ", version: " + apiVersion + " status: ("
                        + info.getValidationStatus() + ") - "
                        + APISecurityConstants.getAuthenticationFailureMessage(info.getValidationStatus()));
    }
}

From source file:com.fdu.jira.plugin.report.timesheet.TimeSheet.java

private Map<String, List<Worklog>> getWorklogMapByUser(List<Worklog> worklogObjects) {
    TreeMap<String, List<Worklog>> presult = new TreeMap<String, List<Worklog>>();
    for (Worklog w : worklogObjects) {
        List<Worklog> worklogs = presult.get(w.getAuthor());
        if (worklogs == null) {
            worklogs = new ArrayList<Worklog>();
            presult.put(w.getAuthor(), worklogs);
        }/*from   ww  w.  j a  v  a  2s.  c  om*/
        worklogs.add(w);
    }
    LinkedHashMap<String, List<Worklog>> result = new LinkedHashMap<String, List<Worklog>>();
    List<Map.Entry<String, List<Worklog>>> keyList = new ArrayList<Map.Entry<String, List<Worklog>>>(
            presult.entrySet());
    final UserManager userManager = ComponentAccessor.getUserManager();
    if (userManager != null)
        Collections.sort(keyList, new Comparator<Map.Entry<String, List<Worklog>>>() {
            public int compare(Map.Entry<String, List<Worklog>> e1, Map.Entry<String, List<Worklog>> e2) {
                User user1 = userManager.getUser(e1.getKey());
                User user2 = userManager.getUser(e2.getKey());
                String userFullName1 = (user1 != null) ? user1.getDisplayName() : e1.getKey();
                String userFullName2 = (user1 != null) ? user2.getDisplayName() : e2.getKey();
                return userFullName1.compareTo(userFullName2);
            }
        });
    for (Map.Entry<String, List<Worklog>> e : keyList) {
        result.put(e.getKey(), e.getValue());
    }
    return result;
}

From source file:io.mapzone.arena.analytics.graph.ui.FeaturePropertySelectorUI.java

private void fill() {
    if (combo != null && !combo.isDisposed() && featureSource != null) {

        final Collection<PropertyDescriptor> schemaDescriptors = featureSource.getSchema().getDescriptors();
        final GeometryDescriptor geometryDescriptor = featureSource.getSchema().getGeometryDescriptor();
        final TreeMap<String, PropertyDescriptor> properties = Maps.newTreeMap();
        for (PropertyDescriptor descriptor : schemaDescriptors) {
            if (geometryDescriptor == null || !geometryDescriptor.equals(descriptor)) {
                properties.put(descriptor.getName().getLocalPart(), descriptor);
            }/*from   w  w  w  .ja  va 2  s.  co  m*/
        }
        final List<String> columns = Lists.newArrayList();
        columns.addAll(properties.keySet());
        combo.setItems(columns.toArray(new String[properties.size()]));

        combo.addSelectionListener(new SelectionAdapter() {

            @Override
            public void widgetSelected(SelectionEvent e) {
                onSelect.accept(properties.get(columns.get(combo.getSelectionIndex())));
            }
        });
        combo.getParent().layout();
    }
}

From source file:org.apache.hadoop.hbase.stargate.client.RemoteHTable.java

public void put(List<Put> puts) throws IOException {
    // this is a trick: Stargate accepts multiple rows in a cell set and
    // ignores the row specification in the URI

    // separate puts by row
    TreeMap<byte[], List<KeyValue>> map = new TreeMap<byte[], List<KeyValue>>(Bytes.BYTES_COMPARATOR);
    for (Put put : puts) {
        byte[] row = put.getRow();
        List<KeyValue> kvs = map.get(row);
        if (kvs == null) {
            kvs = new ArrayList<KeyValue>();
            map.put(row, kvs);/*from w w w. j  a va2 s.c  o  m*/
        }
        for (List<KeyValue> l : put.getFamilyMap().values()) {
            kvs.addAll(l);
        }
    }

    // build the cell set
    CellSetModel model = new CellSetModel();
    for (Map.Entry<byte[], List<KeyValue>> e : map.entrySet()) {
        RowModel row = new RowModel(e.getKey());
        for (KeyValue kv : e.getValue()) {
            row.addCell(new CellModel(kv));
        }
        model.addRow(row);
    }

    // build path for multiput
    StringBuilder sb = new StringBuilder();
    sb.append('/');
    if (accessToken != null) {
        sb.append(accessToken);
        sb.append('/');
    }
    sb.append(Bytes.toStringBinary(name));
    sb.append("/$multiput"); // can be any nonexistent row
    for (int i = 0; i < maxRetries; i++) {
        Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF,
                model.createProtobufOutput());
        int code = response.getCode();
        switch (code) {
        case 200:
            return;
        case 509:
            try {
                Thread.sleep(sleepTime);
            } catch (InterruptedException e) {
            }
            break;
        default:
            throw new IOException("multiput request failed with " + code);
        }
    }
    throw new IOException("multiput request timed out");
}

From source file:org.eobjects.datacleaner.widgets.properties.MultipleInputColumnsPropertyWidget.java

/**
 * Reorders the values/*from  ww  w. java2s . c om*/
 * 
 * @param sortedValue
 */
public void reorderValue(final InputColumn<?>[] sortedValue) {
    // the offset represents the search textfield and the button panel
    final int offset = 2;

    // reorder the visual components
    for (int i = 0; i < sortedValue.length; i++) {
        InputColumn<?> inputColumn = sortedValue[i];
        JComponent decoration = getOrCreateCheckBoxDecoration(inputColumn, true);
        add(decoration, i + offset);
    }
    updateUI();

    // recreate the _checkBoxes map
    final TreeMap<InputColumn<?>, DCCheckBox<InputColumn<?>>> checkBoxesCopy = new TreeMap<InputColumn<?>, DCCheckBox<InputColumn<?>>>(
            _checkBoxes);
    _checkBoxes.clear();
    for (InputColumn<?> inputColumn : sortedValue) {
        _checkBoxes.put(inputColumn, checkBoxesCopy.get(inputColumn));
    }
    _checkBoxes.putAll(checkBoxesCopy);
}