Example usage for java.lang Byte valueOf

List of usage examples for java.lang Byte valueOf

Introduction

In this page you can find the example usage for java.lang Byte valueOf.

Prototype

public static Byte valueOf(String s) throws NumberFormatException 

Source Link

Document

Returns a Byte object holding the value given by the specified String .

Usage

From source file:org.apache.hadoop.hive.ql.optimizer.ReduceSinkMapJoinProc.java

public static Object processReduceSinkToHashJoin(ReduceSinkOperator parentRS, MapJoinOperator mapJoinOp,
        GenTezProcContext context) throws SemanticException {
    // remove the tag for in-memory side of mapjoin
    parentRS.getConf().setSkipTag(true);
    parentRS.setSkipTag(true);/*w  w w .  ja v a 2 s .  com*/

    // Mark this small table as being processed
    if (mapJoinOp.getConf().isDynamicPartitionHashJoin()) {
        context.mapJoinToUnprocessedSmallTableReduceSinks.get(mapJoinOp).remove(parentRS);
    }

    List<BaseWork> mapJoinWork = null;

    /*
     *  if there was a pre-existing work generated for the big-table mapjoin side,
     *  we need to hook the work generated for the RS (associated with the RS-MJ pattern)
     *  with the pre-existing work.
     *
     *  Otherwise, we need to associate that the mapjoin op
     *  to be linked to the RS work (associated with the RS-MJ pattern).
     *
     */
    mapJoinWork = context.mapJoinWorkMap.get(mapJoinOp);
    BaseWork parentWork = getMapJoinParentWork(context, parentRS);

    // set the link between mapjoin and parent vertex
    int pos = context.mapJoinParentMap.get(mapJoinOp).indexOf(parentRS);
    if (pos == -1) {
        throw new SemanticException("Cannot find position of parent in mapjoin");
    }
    MapJoinDesc joinConf = mapJoinOp.getConf();
    long keyCount = Long.MAX_VALUE, rowCount = Long.MAX_VALUE, bucketCount = 1;
    long tableSize = Long.MAX_VALUE;
    Statistics stats = parentRS.getStatistics();
    if (stats != null) {
        keyCount = rowCount = stats.getNumRows();
        if (keyCount <= 0) {
            keyCount = rowCount = Long.MAX_VALUE;
        }
        tableSize = stats.getDataSize();
        ArrayList<String> keyCols = parentRS.getConf().getOutputKeyColumnNames();
        if (keyCols != null && !keyCols.isEmpty()) {
            // See if we can arrive at a smaller number using distinct stats from key columns.
            long maxKeyCount = 1;
            String prefix = Utilities.ReduceField.KEY.toString();
            for (String keyCol : keyCols) {
                ExprNodeDesc realCol = parentRS.getColumnExprMap().get(prefix + "." + keyCol);
                ColStatistics cs = StatsUtils.getColStatisticsFromExpression(context.conf, stats, realCol);
                if (cs == null || cs.getCountDistint() <= 0) {
                    maxKeyCount = Long.MAX_VALUE;
                    break;
                }
                maxKeyCount *= cs.getCountDistint();
                if (maxKeyCount >= keyCount) {
                    break;
                }
            }
            keyCount = Math.min(maxKeyCount, keyCount);
        }
        if (joinConf.isBucketMapJoin()) {
            OpTraits opTraits = mapJoinOp.getOpTraits();
            bucketCount = (opTraits == null) ? -1 : opTraits.getNumBuckets();
            if (bucketCount > 0) {
                // We cannot obtain a better estimate without CustomPartitionVertex providing it
                // to us somehow; in which case using statistics would be completely unnecessary.
                keyCount /= bucketCount;
                tableSize /= bucketCount;
            }
        } else if (joinConf.isDynamicPartitionHashJoin()) {
            // For dynamic partitioned hash join, assuming table is split evenly among the reduce tasks.
            bucketCount = parentRS.getConf().getNumReducers();
            keyCount /= bucketCount;
            tableSize /= bucketCount;
        }
    }
    LOG.info("Mapjoin " + mapJoinOp + ", pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount
            + " keys estimated from " + rowCount + " rows, " + bucketCount + " buckets)");
    joinConf.getParentToInput().put(pos, parentWork.getName());
    if (keyCount != Long.MAX_VALUE) {
        joinConf.getParentKeyCounts().put(pos, keyCount);
    }
    joinConf.getParentDataSizes().put(pos, tableSize);

    int numBuckets = -1;
    EdgeType edgeType = EdgeType.BROADCAST_EDGE;
    if (joinConf.isBucketMapJoin()) {

        // disable auto parallelism for bucket map joins
        parentRS.getConf().setReducerTraits(EnumSet.of(FIXED));

        numBuckets = (Integer) joinConf.getBigTableBucketNumMapping().values().toArray()[0];
        /*
         * Here, we can be in one of 4 states.
         *
         * 1. If map join work is null implies that we have not yet traversed the big table side. We
         * just need to see if we can find a reduce sink operator in the big table side. This would
         * imply a reduce side operation.
         *
         * 2. If we don't find a reducesink in 1 it has to be the case that it is a map side operation.
         *
         * 3. If we have already created a work item for the big table side, we need to see if we can
         * find a table scan operator in the big table side. This would imply a map side operation.
         *
         * 4. If we don't find a table scan operator, it has to be a reduce side operation.
         */
        if (mapJoinWork == null) {
            Operator<?> rootOp = OperatorUtils.findSingleOperatorUpstream(
                    mapJoinOp.getParentOperators().get(joinConf.getPosBigTable()), ReduceSinkOperator.class);
            if (rootOp == null) {
                // likely we found a table scan operator
                edgeType = EdgeType.CUSTOM_EDGE;
            } else {
                // we have found a reduce sink
                edgeType = EdgeType.CUSTOM_SIMPLE_EDGE;
            }
        } else {
            Operator<?> rootOp = OperatorUtils.findSingleOperatorUpstream(
                    mapJoinOp.getParentOperators().get(joinConf.getPosBigTable()), TableScanOperator.class);
            if (rootOp != null) {
                // likely we found a table scan operator
                edgeType = EdgeType.CUSTOM_EDGE;
            } else {
                // we have found a reduce sink
                edgeType = EdgeType.CUSTOM_SIMPLE_EDGE;
            }
        }
    } else if (mapJoinOp.getConf().isDynamicPartitionHashJoin()) {
        edgeType = EdgeType.CUSTOM_SIMPLE_EDGE;
    }
    TezEdgeProperty edgeProp = new TezEdgeProperty(null, edgeType, numBuckets);

    if (mapJoinWork != null) {
        for (BaseWork myWork : mapJoinWork) {
            // link the work with the work associated with the reduce sink that triggered this rule
            TezWork tezWork = context.currentTask.getWork();
            LOG.debug("connecting " + parentWork.getName() + " with " + myWork.getName());
            tezWork.connect(parentWork, myWork, edgeProp);
            if (edgeType == EdgeType.CUSTOM_EDGE) {
                tezWork.setVertexType(myWork, VertexType.INITIALIZED_EDGES);
            }

            ReduceSinkOperator r = null;
            if (context.connectedReduceSinks.contains(parentRS)) {
                LOG.debug("Cloning reduce sink for multi-child broadcast edge");
                // we've already set this one up. Need to clone for the next work.
                r = (ReduceSinkOperator) OperatorFactory.getAndMakeChild(
                        (ReduceSinkDesc) parentRS.getConf().clone(), new RowSchema(parentRS.getSchema()),
                        parentRS.getParentOperators());
                context.clonedReduceSinks.add(r);
            } else {
                r = parentRS;
            }
            // remember the output name of the reduce sink
            r.getConf().setOutputName(myWork.getName());
            context.connectedReduceSinks.add(r);
        }
    }

    // remember in case we need to connect additional work later
    Map<BaseWork, TezEdgeProperty> linkWorkMap = null;
    if (context.linkOpWithWorkMap.containsKey(mapJoinOp)) {
        linkWorkMap = context.linkOpWithWorkMap.get(mapJoinOp);
    } else {
        linkWorkMap = new HashMap<BaseWork, TezEdgeProperty>();
    }
    linkWorkMap.put(parentWork, edgeProp);
    context.linkOpWithWorkMap.put(mapJoinOp, linkWorkMap);

    List<ReduceSinkOperator> reduceSinks = context.linkWorkWithReduceSinkMap.get(parentWork);
    if (reduceSinks == null) {
        reduceSinks = new ArrayList<ReduceSinkOperator>();
    }
    reduceSinks.add(parentRS);
    context.linkWorkWithReduceSinkMap.put(parentWork, reduceSinks);

    // create the dummy operators
    List<Operator<?>> dummyOperators = new ArrayList<Operator<?>>();

    // create an new operator: HashTableDummyOperator, which share the table desc
    HashTableDummyDesc desc = new HashTableDummyDesc();
    @SuppressWarnings("unchecked")
    HashTableDummyOperator dummyOp = (HashTableDummyOperator) OperatorFactory.get(desc);
    TableDesc tbl;

    // need to create the correct table descriptor for key/value
    RowSchema rowSchema = parentRS.getParentOperators().get(0).getSchema();
    tbl = PlanUtils.getReduceValueTableDesc(PlanUtils.getFieldSchemasFromRowSchema(rowSchema, ""));
    dummyOp.getConf().setTbl(tbl);

    Map<Byte, List<ExprNodeDesc>> keyExprMap = mapJoinOp.getConf().getKeys();
    List<ExprNodeDesc> keyCols = keyExprMap.get(Byte.valueOf((byte) 0));
    StringBuilder keyOrder = new StringBuilder();
    for (ExprNodeDesc k : keyCols) {
        keyOrder.append("+");
    }
    TableDesc keyTableDesc = PlanUtils.getReduceKeyTableDesc(
            PlanUtils.getFieldSchemasFromColumnList(keyCols, "mapjoinkey"), keyOrder.toString());
    mapJoinOp.getConf().setKeyTableDesc(keyTableDesc);

    // let the dummy op be the parent of mapjoin op
    mapJoinOp.replaceParent(parentRS, dummyOp);
    List<Operator<? extends OperatorDesc>> dummyChildren = new ArrayList<Operator<? extends OperatorDesc>>();
    dummyChildren.add(mapJoinOp);
    dummyOp.setChildOperators(dummyChildren);
    dummyOperators.add(dummyOp);

    // cut the operator tree so as to not retain connections from the parent RS downstream
    List<Operator<? extends OperatorDesc>> childOperators = parentRS.getChildOperators();
    int childIndex = childOperators.indexOf(mapJoinOp);
    childOperators.remove(childIndex);

    // the "work" needs to know about the dummy operators. They have to be separately initialized
    // at task startup
    if (mapJoinWork != null) {
        for (BaseWork myWork : mapJoinWork) {
            myWork.addDummyOp(dummyOp);
        }
    }
    if (context.linkChildOpWithDummyOp.containsKey(mapJoinOp)) {
        for (Operator<?> op : context.linkChildOpWithDummyOp.get(mapJoinOp)) {
            dummyOperators.add(op);
        }
    }
    context.linkChildOpWithDummyOp.put(mapJoinOp, dummyOperators);

    return true;
}

From source file:com.bosscs.spark.commons.utils.Utils.java

public static Object castingUtil(String value, Class classCasting) {
    Object object = value;//  w w  w  .j a va  2s  .  com

    //Numeric
    if (Number.class.isAssignableFrom(classCasting)) {
        if (classCasting.isAssignableFrom(Double.class)) {
            return Double.valueOf(value);
        } else if (classCasting.isAssignableFrom(Long.class)) {
            return Long.valueOf(value);

        } else if (classCasting.isAssignableFrom(Float.class)) {
            return Float.valueOf(value);

        } else if (classCasting.isAssignableFrom(Integer.class)) {
            return Integer.valueOf(value);

        } else if (classCasting.isAssignableFrom(Short.class)) {
            return Short.valueOf(value);

        } else if (classCasting.isAssignableFrom(Byte.class)) {
            return Byte.valueOf(value);
        }
    } else if (String.class.isAssignableFrom(classCasting)) {
        return object.toString();
    }
    //Class not recognise yet
    return null;

}

From source file:org.apache.flink.api.java.utils.ParameterTool.java

/**
 * Returns the Byte value for the given key.
 * The method fails if the key does not exist.
 *///from  w w  w  .j a  va  2 s .c om
public byte getByte(String key) {
    addToDefaults(key, null);
    String value = getRequired(key);
    return Byte.valueOf(value);
}

From source file:net.sf.ezmorph.bean.BeanMorpherTest.java

public void testMorph_PrimitiveBean_to_TypedBean() {
    PrimitiveBean primitiveBean = new PrimitiveBean();
    primitiveBean.setPclass(Object.class);
    primitiveBean.setPstring("MORPH");
    morpherRegistry.registerMorpher(new BeanMorpher(TypedBean.class, morpherRegistry));
    TypedBean typedBean = (TypedBean) morpherRegistry.morph(TypedBean.class, primitiveBean);
    assertNotNull(typedBean);/*w w  w .j av a2s.com*/
    assertEquals(Boolean.FALSE, typedBean.getPboolean());
    assertEquals(Byte.valueOf("0"), typedBean.getPbyte());
    assertEquals(Short.valueOf("0"), typedBean.getPshort());
    assertEquals(Integer.valueOf("0"), typedBean.getPint());
    assertEquals(Long.valueOf("0"), typedBean.getPlong());
    assertEquals(Float.valueOf("0"), typedBean.getPfloat());
    assertEquals(Double.valueOf("0"), typedBean.getPdouble());
    assertEquals(new Character('\0'), typedBean.getPchar());
    assertEquals(null, typedBean.getParray());
    assertEquals(null, typedBean.getPlist());
    assertEquals(null, typedBean.getPbean());
    assertEquals(null, typedBean.getPmap());
    assertEquals("MORPH", typedBean.getPstring());
    assertEquals(Object.class, typedBean.getPclass());
}

From source file:com.jaspersoft.jasperserver.api.engine.scheduling.ReportSchedulingFacade.java

private String getSetOfBytesAsString(Set set, Map<Byte, String> namesMap) {
    if (set == null || set.isEmpty()) {
        return null;
    }//w  w  w. j ava2  s .c o m

    StringBuilder sb = new StringBuilder();
    for (Object element : set) {
        if (sb.length() > 0) {
            sb.append(",");
        }
        sb.append(namesMap.get(Byte.valueOf(element.toString())));
    }

    return sb.toString();
}

From source file:org.broadleafcommerce.openadmin.server.service.persistence.module.provider.BasicFieldPersistenceProvider.java

@Override
public FieldProviderResponse populateValue(PopulateValueRequest populateValueRequest, Serializable instance) {
    if (!canHandlePersistence(populateValueRequest, instance)) {
        return FieldProviderResponse.NOT_HANDLED;
    }//from  ww  w .  j a  v  a2s  . com
    boolean dirty = false;
    try {
        Property prop = populateValueRequest.getProperty();
        Object origValue = populateValueRequest.getFieldManager().getFieldValue(instance, prop.getName());
        switch (populateValueRequest.getMetadata().getFieldType()) {
        case BOOLEAN:
            boolean v = Boolean.valueOf(populateValueRequest.getRequestedValue());
            prop.setOriginalValue(String.valueOf(origValue));
            prop.setOriginalDisplayValue(prop.getOriginalValue());
            try {
                dirty = checkDirtyState(populateValueRequest, instance, v);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), v);
            } catch (IllegalArgumentException e) {
                char c = v ? 'Y' : 'N';
                dirty = checkDirtyState(populateValueRequest, instance, c);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), c);
            }
            break;
        case DATE:
            Date date = (Date) populateValueRequest.getFieldManager().getFieldValue(instance,
                    populateValueRequest.getProperty().getName());
            String oldValue = null;
            if (date != null) {
                oldValue = populateValueRequest.getDataFormatProvider().getSimpleDateFormatter().format(date);
            }
            prop.setOriginalValue(oldValue);
            prop.setOriginalDisplayValue(prop.getOriginalValue());
            dirty = !StringUtils.equals(oldValue, populateValueRequest.getRequestedValue());
            populateValueRequest.getFieldManager().setFieldValue(instance,
                    populateValueRequest.getProperty().getName(), populateValueRequest.getDataFormatProvider()
                            .getSimpleDateFormatter().parse(populateValueRequest.getRequestedValue()));
            break;
        case DECIMAL:
            if (origValue != null) {
                prop.setOriginalValue(String.valueOf(origValue));
                prop.setOriginalDisplayValue(prop.getOriginalValue());
            }
            if (BigDecimal.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                DecimalFormat format = populateValueRequest.getDataFormatProvider().getDecimalFormatter();
                format.setParseBigDecimal(true);
                BigDecimal val = (BigDecimal) format.parse(populateValueRequest.getRequestedValue());
                dirty = checkDirtyState(populateValueRequest, instance, val);

                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), val);
                format.setParseBigDecimal(false);
            } else {
                Double val = populateValueRequest.getDataFormatProvider().getDecimalFormatter()
                        .parse(populateValueRequest.getRequestedValue()).doubleValue();
                dirty = checkDirtyState(populateValueRequest, instance, val);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), val);
            }
            break;
        case MONEY:
            if (origValue != null) {
                prop.setOriginalValue(String.valueOf(origValue));
                prop.setOriginalDisplayValue(prop.getOriginalValue());
            }
            if (BigDecimal.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                DecimalFormat format = populateValueRequest.getDataFormatProvider().getDecimalFormatter();
                format.setParseBigDecimal(true);
                BigDecimal val = (BigDecimal) format.parse(populateValueRequest.getRequestedValue());
                dirty = checkDirtyState(populateValueRequest, instance, val);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), val);
                format.setParseBigDecimal(true);
            } else if (Double.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                Double val = populateValueRequest.getDataFormatProvider().getDecimalFormatter()
                        .parse(populateValueRequest.getRequestedValue()).doubleValue();
                dirty = checkDirtyState(populateValueRequest, instance, val);
                LOG.warn("The requested Money field is of type double and could result in a loss of precision."
                        + " Broadleaf recommends that the type of all Money fields are 'BigDecimal' in order to avoid"
                        + " this loss of precision that could occur.");
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), val);
            } else {
                DecimalFormat format = populateValueRequest.getDataFormatProvider().getDecimalFormatter();
                format.setParseBigDecimal(true);
                BigDecimal val = (BigDecimal) format.parse(populateValueRequest.getRequestedValue());
                dirty = checkDirtyState(populateValueRequest, instance, val);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), new Money(val));
                format.setParseBigDecimal(false);
            }
            break;
        case INTEGER:
            if (origValue != null) {
                prop.setOriginalValue(String.valueOf(origValue));
                prop.setOriginalDisplayValue(prop.getOriginalValue());
            }
            if (int.class.isAssignableFrom(populateValueRequest.getReturnType())
                    || Integer.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                dirty = checkDirtyState(populateValueRequest, instance,
                        Integer.valueOf(populateValueRequest.getRequestedValue()));
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(),
                        Integer.valueOf(populateValueRequest.getRequestedValue()));
            } else if (byte.class.isAssignableFrom(populateValueRequest.getReturnType())
                    || Byte.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                dirty = checkDirtyState(populateValueRequest, instance,
                        Byte.valueOf(populateValueRequest.getRequestedValue()));
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(),
                        Byte.valueOf(populateValueRequest.getRequestedValue()));
            } else if (short.class.isAssignableFrom(populateValueRequest.getReturnType())
                    || Short.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                dirty = checkDirtyState(populateValueRequest, instance,
                        Short.valueOf(populateValueRequest.getRequestedValue()));
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(),
                        Short.valueOf(populateValueRequest.getRequestedValue()));
            } else if (long.class.isAssignableFrom(populateValueRequest.getReturnType())
                    || Long.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                dirty = checkDirtyState(populateValueRequest, instance,
                        Long.valueOf(populateValueRequest.getRequestedValue()));
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(),
                        Long.valueOf(populateValueRequest.getRequestedValue()));
            }
            break;
        case CODE:
            // **NOTE** We want to fall through in this case, do not break.
            setNonDisplayableValues(populateValueRequest);
        case STRING:
        case HTML_BASIC:
        case HTML:
        case EMAIL:
            if (origValue != null) {
                prop.setOriginalValue(String.valueOf(origValue));
                prop.setOriginalDisplayValue(prop.getOriginalValue());
            }
            dirty = checkDirtyState(populateValueRequest, instance, populateValueRequest.getRequestedValue());
            populateValueRequest.getFieldManager().setFieldValue(instance,
                    populateValueRequest.getProperty().getName(), populateValueRequest.getRequestedValue());
            break;
        case FOREIGN_KEY: {
            if (origValue != null) {
                prop.setOriginalValue(String.valueOf(origValue));
            }
            Serializable foreignInstance;
            if (StringUtils.isEmpty(populateValueRequest.getRequestedValue())) {
                foreignInstance = null;
            } else {
                if (SupportedFieldType.INTEGER.toString()
                        .equals(populateValueRequest.getMetadata().getSecondaryType().toString())) {
                    foreignInstance = populateValueRequest.getPersistenceManager().getDynamicEntityDao()
                            .retrieve(Class.forName(populateValueRequest.getMetadata().getForeignKeyClass()),
                                    Long.valueOf(populateValueRequest.getRequestedValue()));
                } else {
                    foreignInstance = populateValueRequest.getPersistenceManager().getDynamicEntityDao()
                            .retrieve(Class.forName(populateValueRequest.getMetadata().getForeignKeyClass()),
                                    populateValueRequest.getRequestedValue());
                }
            }

            if (Collection.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                Collection collection;
                try {
                    collection = (Collection) populateValueRequest.getFieldManager().getFieldValue(instance,
                            populateValueRequest.getProperty().getName());
                } catch (FieldNotAvailableException e) {
                    throw new IllegalArgumentException(e);
                }
                if (!collection.contains(foreignInstance)) {
                    collection.add(foreignInstance);
                    dirty = true;
                }
            } else if (Map.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                throw new IllegalArgumentException("Map structures are not supported for foreign key fields.");
            } else {
                dirty = checkDirtyState(populateValueRequest, instance, foreignInstance);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), foreignInstance);
            }
            break;
        }
        case ADDITIONAL_FOREIGN_KEY: {
            Serializable foreignInstance;
            if (StringUtils.isEmpty(populateValueRequest.getRequestedValue())) {
                foreignInstance = null;
            } else {
                if (SupportedFieldType.INTEGER.toString()
                        .equals(populateValueRequest.getMetadata().getSecondaryType().toString())) {
                    foreignInstance = populateValueRequest.getPersistenceManager().getDynamicEntityDao()
                            .retrieve(Class.forName(populateValueRequest.getMetadata().getForeignKeyClass()),
                                    Long.valueOf(populateValueRequest.getRequestedValue()));
                } else {
                    foreignInstance = populateValueRequest.getPersistenceManager().getDynamicEntityDao()
                            .retrieve(Class.forName(populateValueRequest.getMetadata().getForeignKeyClass()),
                                    populateValueRequest.getRequestedValue());
                }
            }

            // Best guess at grabbing the original display value
            String fkProp = populateValueRequest.getMetadata().getForeignKeyDisplayValueProperty();
            Object origDispVal = null;
            if (origValue != null) {
                if (AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY.equals(fkProp)) {
                    if (origValue instanceof AdminMainEntity) {
                        origDispVal = ((AdminMainEntity) origValue).getMainEntityName();
                    }
                } else {
                    origDispVal = populateValueRequest.getFieldManager().getFieldValue(origValue, fkProp);
                }
            }
            if (origDispVal != null) {
                prop.setOriginalDisplayValue(String.valueOf(origDispVal));
                Session session = populateValueRequest.getPersistenceManager().getDynamicEntityDao()
                        .getStandardEntityManager().unwrap(Session.class);
                prop.setOriginalValue(String.valueOf(session.getIdentifier(foreignInstance)));
            }

            if (Collection.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                Collection collection;
                try {
                    collection = (Collection) populateValueRequest.getFieldManager().getFieldValue(instance,
                            populateValueRequest.getProperty().getName());
                } catch (FieldNotAvailableException e) {
                    throw new IllegalArgumentException(e);
                }
                if (!collection.contains(foreignInstance)) {
                    collection.add(foreignInstance);
                    dirty = true;
                }
            } else if (Map.class.isAssignableFrom(populateValueRequest.getReturnType())) {
                throw new IllegalArgumentException("Map structures are not supported for foreign key fields.");
            } else {
                dirty = checkDirtyState(populateValueRequest, instance, foreignInstance);
                populateValueRequest.getFieldManager().setFieldValue(instance,
                        populateValueRequest.getProperty().getName(), foreignInstance);
            }
            break;
        }
        case ID:
            if (populateValueRequest.getSetId()) {
                switch (populateValueRequest.getMetadata().getSecondaryType()) {
                case INTEGER:
                    dirty = checkDirtyState(populateValueRequest, instance,
                            Long.valueOf(populateValueRequest.getRequestedValue()));
                    populateValueRequest.getFieldManager().setFieldValue(instance,
                            populateValueRequest.getProperty().getName(),
                            Long.valueOf(populateValueRequest.getRequestedValue()));
                    break;
                case STRING:
                    dirty = checkDirtyState(populateValueRequest, instance,
                            populateValueRequest.getRequestedValue());
                    populateValueRequest.getFieldManager().setFieldValue(instance,
                            populateValueRequest.getProperty().getName(),
                            populateValueRequest.getRequestedValue());
                    break;
                }
            }
            break;
        }
    } catch (Exception e) {
        throw new PersistenceException(e);
    }
    populateValueRequest.getProperty().setIsDirty(dirty);
    return FieldProviderResponse.HANDLED;
}

From source file:org.apache.flink.api.java.utils.ParameterTool.java

/**
 * Returns the Byte value for the given key. If the key does not exists it will return the default value given.
 * The method fails if the value is not a Byte.
 *//*ww w  .j  a  va2  s.  c o m*/
public byte getByte(String key, byte defaultValue) {
    addToDefaults(key, Byte.toString(defaultValue));
    String value = get(key);
    if (value == null) {
        return defaultValue;
    } else {
        return Byte.valueOf(value);
    }
}

From source file:de.javakaffee.web.msm.serializer.javolution.JavolutionTranscoderTest.java

@DataProvider(name = "typesAsSessionAttributesProvider")
protected Object[][] createTypesAsSessionAttributesData() {
    return new Object[][] { { int.class, 42 }, { long.class, 42 }, { Boolean.class, Boolean.TRUE },
            { String.class, "42" }, { StringBuilder.class, new StringBuilder("42") },
            { StringBuffer.class, new StringBuffer("42") }, { Class.class, String.class },
            { Long.class, Long.valueOf(42) }, { Integer.class, Integer.valueOf(42) },
            { Character.class, Character.valueOf('c') }, { Byte.class, Byte.valueOf("b".getBytes()[0]) },
            { Double.class, Double.valueOf(42d) }, { Float.class, Float.valueOf(42f) },
            { Short.class, Short.valueOf((short) 42) }, { BigDecimal.class, new BigDecimal(42) },
            { AtomicInteger.class, new AtomicInteger(42) }, { AtomicLong.class, new AtomicLong(42) },
            { MutableInt.class, new MutableInt(42) }, { Integer[].class, new Integer[] { 42 } },
            { Date.class, new Date(System.currentTimeMillis() - 10000) },
            { Calendar.class, Calendar.getInstance() }, { Currency.class, Currency.getInstance("EUR") },
            { ArrayList.class, new ArrayList<String>(Arrays.asList("foo")) },
            { int[].class, new int[] { 1, 2 } }, { long[].class, new long[] { 1, 2 } },
            { short[].class, new short[] { 1, 2 } }, { float[].class, new float[] { 1, 2 } },
            { double[].class, new double[] { 1, 2 } }, { int[].class, new int[] { 1, 2 } },
            { byte[].class, "42".getBytes() }, { char[].class, "42".toCharArray() },
            { String[].class, new String[] { "23", "42" } },
            { Person[].class, new Person[] { createPerson("foo bar", Gender.MALE, 42) } } };
}

From source file:org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.java

public static Object randomObject(int column, Random r, PrimitiveCategory[] primitiveCategories,
        PrimitiveTypeInfo[] primitiveTypeInfos, String[] alphabets, boolean addEscapables,
        String needsEscapeStr) {/*from  w w  w.  j  av  a 2s  .  c o  m*/
    PrimitiveCategory primitiveCategory = primitiveCategories[column];
    PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
    try {
        switch (primitiveCategory) {
        case BOOLEAN:
            return Boolean.valueOf(r.nextInt(1) == 1);
        case BYTE:
            return Byte.valueOf((byte) r.nextInt());
        case SHORT:
            return Short.valueOf((short) r.nextInt());
        case INT:
            return Integer.valueOf(r.nextInt());
        case LONG:
            return Long.valueOf(r.nextLong());
        case DATE:
            return RandomTypeUtil.getRandDate(r);
        case FLOAT:
            return Float.valueOf(r.nextFloat() * 10 - 5);
        case DOUBLE:
            return Double.valueOf(r.nextDouble() * 10 - 5);
        case STRING:
        case CHAR:
        case VARCHAR: {
            String result;
            if (alphabets != null && alphabets[column] != null) {
                result = RandomTypeUtil.getRandString(r, alphabets[column], r.nextInt(10));
            } else {
                result = RandomTypeUtil.getRandString(r);
            }
            if (addEscapables && result.length() > 0) {
                int escapeCount = 1 + r.nextInt(2);
                for (int i = 0; i < escapeCount; i++) {
                    int index = r.nextInt(result.length());
                    String begin = result.substring(0, index);
                    String end = result.substring(index);
                    Character needsEscapeChar = needsEscapeStr.charAt(r.nextInt(needsEscapeStr.length()));
                    result = begin + needsEscapeChar + end;
                }
            }
            switch (primitiveCategory) {
            case STRING:
                return result;
            case CHAR:
                return new HiveChar(result, ((CharTypeInfo) primitiveTypeInfo).getLength());
            case VARCHAR:
                return new HiveChar(result, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
            default:
                throw new Error("Unknown primitive category " + primitiveCategory);
            }
        }
        case BINARY:
            return getRandBinary(r, 1 + r.nextInt(100));
        case TIMESTAMP:
            return RandomTypeUtil.getRandTimestamp(r);
        case INTERVAL_YEAR_MONTH:
            return getRandIntervalYearMonth(r);
        case INTERVAL_DAY_TIME:
            return getRandIntervalDayTime(r);
        case DECIMAL:
            return getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
        default:
            throw new Error("Unknown primitive category " + primitiveCategory);
        }
    } catch (Exception e) {
        throw new RuntimeException("randomObject failed on column " + column + " type " + primitiveCategory, e);
    }
}

From source file:de.blizzy.backup.backup.BackupRun.java

private void backupFile(IFile file, int parentFolderId) throws IOException {
    currentFileOrFolder.add(file);/*from ww w  . java 2  s . co m*/
    try {
        if ((numEntries % 50) == 0) {
            checkDiskSpaceAndRemoveOldBackups();
        }

        fireBackupStatusChanged(new BackupStatus(file.getAbsolutePath(), numEntries, totalEntries));

        FileTime creationTime = file.getCreationTime();
        FileTime lastModificationTime = file.getLastModificationTime();

        int fileId = -1;
        if (settings.isUseChecksums()) {
            String checksum = getChecksum(file);
            fileId = findOldFileViaChecksum(file, checksum);
        } else {
            fileId = findOldFileViaTimestamp(file);
        }
        EntryType type = EntryType.FILE;
        if (fileId <= 0) {
            try {
                String backupFilePath = Utils.createBackupFilePath(settings.getOutputFolder());
                File backupFile = Utils.toBackupFile(backupFilePath, settings.getOutputFolder());
                fileId = backupFileContents(file, backupFile, backupFilePath);
            } catch (IOException e) {
                BackupPlugin.getDefault().logError("error while backing up file: " + //$NON-NLS-1$
                        file.getAbsolutePath(), e);
                // file might be in use this time so only show a warning instead of an error
                fireBackupErrorOccurred(e, Severity.WARNING);
                type = EntryType.FAILED_FILE;
            }
        }

        database.factory().insertInto(Tables.ENTRIES)
                .set(Tables.ENTRIES.PARENT_ID, Integer.valueOf(parentFolderId))
                .set(Tables.ENTRIES.BACKUP_ID, Integer.valueOf(backupId))
                .set(Tables.ENTRIES.TYPE, Byte.valueOf((byte) type.getValue()))
                .set(Tables.ENTRIES.CREATION_TIME,
                        (creationTime != null) ? new Timestamp(creationTime.toMillis()) : null)
                .set(Tables.ENTRIES.MODIFICATION_TIME,
                        (lastModificationTime != null) ? new Timestamp(lastModificationTime.toMillis()) : null)
                .set(Tables.ENTRIES.HIDDEN, Boolean.valueOf(file.isHidden()))
                .set(Tables.ENTRIES.NAME, file.getName())
                .set(Tables.ENTRIES.NAME_LOWER, file.getName().toLowerCase())
                .set(Tables.ENTRIES.FILE_ID, (fileId > 0) ? Integer.valueOf(fileId) : null).execute();

        numEntries++;
    } finally {
        currentFileOrFolder.remove(currentFileOrFolder.size() - 1);
    }
}