List of usage examples for java.lang Byte valueOf
public static Byte valueOf(String s) throws NumberFormatException
From source file:uk.codingbadgers.SurvivalPlus.backup.PlayerBackup.java
/** * Convert a json array into an itemstack array * * @param array The json array to convert * @return An itemstack array of the given json array *///from w ww . j a va 2 s.c om @SuppressWarnings("deprecation") private ItemStack[] JSONArrayToItemStackArray(JSONArray array) { List<ItemStack> items = new ArrayList<ItemStack>(array.size()); for (Object itemObject : array) { if (!(itemObject instanceof JSONObject)) { continue; } JSONObject jsonItem = (JSONObject) itemObject; // Parse item ItemStack item = new ItemStack(Material.valueOf((String) jsonItem.get("type"))); item.setAmount(Integer.valueOf((String) jsonItem.get("amount"))); item.setDurability(Short.valueOf((String) jsonItem.get("durability"))); item.getData().setData(Byte.valueOf((String) jsonItem.get("data"))); // Parse enchantments JSONArray enchantments = (JSONArray) jsonItem.get("enchantment"); for (Object enchantmentObject : enchantments) { if (!(enchantmentObject instanceof JSONObject)) { continue; } JSONObject jsonEnchantment = (JSONObject) enchantmentObject; Enchantment enchantment = Enchantment.getByName((String) jsonEnchantment.get("id")); int enchantmentLevel = Integer.valueOf((String) jsonEnchantment.get("level")); item.addUnsafeEnchantment(enchantment, enchantmentLevel); } // Parse metadata if (jsonItem.containsKey("metadata")) { JSONObject metaData = (JSONObject) jsonItem.get("metadata"); ItemMeta itemMeta = item.getItemMeta(); if (metaData.containsKey("displayname")) { itemMeta.setDisplayName((String) metaData.get("displayname")); } if (metaData.containsKey("lores")) { List<String> lores = new ArrayList<String>(); JSONArray jsonLores = (JSONArray) metaData.get("lores"); for (Object loreObject : jsonLores) { String lore = (String) loreObject; lores.add(lore); } itemMeta.setLore(lores); } item.setItemMeta(itemMeta); } items.add(item); } int itemIndex = 0; ItemStack[] itemArray = new ItemStack[items.size()]; for (ItemStack item : items) { itemArray[itemIndex] = item; itemIndex++; } return itemArray; }
From source file:org.apache.hadoop.hive.ql.optimizer.spark.SparkReduceSinkMapJoinProc.java
@SuppressWarnings("unchecked") @Override/*from w w w . jav a2 s . c o m*/ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procContext, Object... nodeOutputs) throws SemanticException { GenSparkProcContext context = (GenSparkProcContext) procContext; if (!nd.getClass().equals(MapJoinOperator.class)) { return null; } MapJoinOperator mapJoinOp = (MapJoinOperator) nd; if (stack.size() < 2 || !(stack.get(stack.size() - 2) instanceof ReduceSinkOperator)) { context.currentMapJoinOperators.add(mapJoinOp); return null; } context.preceedingWork = null; context.currentRootOperator = null; ReduceSinkOperator parentRS = (ReduceSinkOperator) stack.get(stack.size() - 2); // remove the tag for in-memory side of mapjoin parentRS.getConf().setSkipTag(true); parentRS.setSkipTag(true); // remember the original parent list before we start modifying it. if (!context.mapJoinParentMap.containsKey(mapJoinOp)) { List<Operator<?>> parents = new ArrayList<Operator<?>>(mapJoinOp.getParentOperators()); context.mapJoinParentMap.put(mapJoinOp, parents); } List<BaseWork> mapJoinWork; /* * If there was a pre-existing work generated for the big-table mapjoin side, * we need to hook the work generated for the RS (associated with the RS-MJ pattern) * with the pre-existing work. * * Otherwise, we need to associate that the mapjoin op * to be linked to the RS work (associated with the RS-MJ pattern). * */ mapJoinWork = context.mapJoinWorkMap.get(mapJoinOp); int workMapSize = context.childToWorkMap.get(parentRS).size(); Preconditions.checkArgument(workMapSize == 1, "AssertionError: expected context.childToWorkMap.get(parentRS).size() to be 1, but was " + workMapSize); BaseWork parentWork = context.childToWorkMap.get(parentRS).get(0); // set the link between mapjoin and parent vertex int pos = context.mapJoinParentMap.get(mapJoinOp).indexOf(parentRS); if (pos == -1) { throw new SemanticException("Cannot find position of parent in mapjoin"); } LOG.debug("Mapjoin " + mapJoinOp + ", pos: " + pos + " --> " + parentWork.getName()); mapJoinOp.getConf().getParentToInput().put(pos, parentWork.getName()); SparkEdgeProperty edgeProp = new SparkEdgeProperty(SparkEdgeProperty.SHUFFLE_NONE); if (mapJoinWork != null) { for (BaseWork myWork : mapJoinWork) { // link the work with the work associated with the reduce sink that triggered this rule SparkWork sparkWork = context.currentTask.getWork(); LOG.debug("connecting " + parentWork.getName() + " with " + myWork.getName()); sparkWork.connect(parentWork, myWork, edgeProp); } } // remember in case we need to connect additional work later Map<BaseWork, SparkEdgeProperty> linkWorkMap = null; if (context.linkOpWithWorkMap.containsKey(mapJoinOp)) { linkWorkMap = context.linkOpWithWorkMap.get(mapJoinOp); } else { linkWorkMap = new HashMap<BaseWork, SparkEdgeProperty>(); } linkWorkMap.put(parentWork, edgeProp); context.linkOpWithWorkMap.put(mapJoinOp, linkWorkMap); List<ReduceSinkOperator> reduceSinks = context.linkWorkWithReduceSinkMap.get(parentWork); if (reduceSinks == null) { reduceSinks = new ArrayList<ReduceSinkOperator>(); } reduceSinks.add(parentRS); context.linkWorkWithReduceSinkMap.put(parentWork, reduceSinks); // create the dummy operators List<Operator<?>> dummyOperators = new ArrayList<Operator<?>>(); // create an new operator: HashTableDummyOperator, which share the table desc HashTableDummyDesc desc = new HashTableDummyDesc(); HashTableDummyOperator dummyOp = (HashTableDummyOperator) OperatorFactory.get(desc); TableDesc tbl; // need to create the correct table descriptor for key/value RowSchema rowSchema = parentRS.getParentOperators().get(0).getSchema(); tbl = PlanUtils.getReduceValueTableDesc(PlanUtils.getFieldSchemasFromRowSchema(rowSchema, "")); dummyOp.getConf().setTbl(tbl); Map<Byte, List<ExprNodeDesc>> keyExprMap = mapJoinOp.getConf().getKeys(); List<ExprNodeDesc> keyCols = keyExprMap.get(Byte.valueOf((byte) 0)); StringBuilder keyOrder = new StringBuilder(); for (int i = 0; i < keyCols.size(); i++) { keyOrder.append("+"); } TableDesc keyTableDesc = PlanUtils.getReduceKeyTableDesc( PlanUtils.getFieldSchemasFromColumnList(keyCols, "mapjoinkey"), keyOrder.toString()); mapJoinOp.getConf().setKeyTableDesc(keyTableDesc); // let the dummy op be the parent of mapjoin op mapJoinOp.replaceParent(parentRS, dummyOp); List<Operator<? extends OperatorDesc>> dummyChildren = new ArrayList<Operator<? extends OperatorDesc>>(); dummyChildren.add(mapJoinOp); dummyOp.setChildOperators(dummyChildren); dummyOperators.add(dummyOp); // cut the operator tree so as to not retain connections from the parent RS downstream List<Operator<? extends OperatorDesc>> childOperators = parentRS.getChildOperators(); int childIndex = childOperators.indexOf(mapJoinOp); childOperators.remove(childIndex); // the "work" needs to know about the dummy operators. They have to be separately initialized // at task startup if (mapJoinWork != null) { for (BaseWork myWork : mapJoinWork) { myWork.addDummyOp(dummyOp); } } if (context.linkChildOpWithDummyOp.containsKey(mapJoinOp)) { for (Operator<?> op : context.linkChildOpWithDummyOp.get(mapJoinOp)) { dummyOperators.add(op); } } context.linkChildOpWithDummyOp.put(mapJoinOp, dummyOperators); // replace ReduceSinkOp with HashTableSinkOp for the RSops which are parents of MJop MapJoinDesc mjDesc = mapJoinOp.getConf(); HiveConf conf = context.conf; // Unlike in MR, we may call this method multiple times, for each // small table HTS. But, since it's idempotent, it should be OK. mjDesc.resetOrder(); float hashtableMemoryUsage; if (hasGroupBy(mapJoinOp, context)) { hashtableMemoryUsage = conf.getFloatVar(HiveConf.ConfVars.HIVEHASHTABLEFOLLOWBYGBYMAXMEMORYUSAGE); } else { hashtableMemoryUsage = conf.getFloatVar(HiveConf.ConfVars.HIVEHASHTABLEMAXMEMORYUSAGE); } mjDesc.setHashTableMemoryUsage(hashtableMemoryUsage); SparkHashTableSinkDesc hashTableSinkDesc = new SparkHashTableSinkDesc(mjDesc); SparkHashTableSinkOperator hashTableSinkOp = (SparkHashTableSinkOperator) OperatorFactory .get(hashTableSinkDesc); byte tag = (byte) pos; int[] valueIndex = mjDesc.getValueIndex(tag); if (valueIndex != null) { List<ExprNodeDesc> newValues = new ArrayList<ExprNodeDesc>(); List<ExprNodeDesc> values = hashTableSinkDesc.getExprs().get(tag); for (int index = 0; index < values.size(); index++) { if (valueIndex[index] < 0) { newValues.add(values.get(index)); } } hashTableSinkDesc.getExprs().put(tag, newValues); } //get all parents of reduce sink List<Operator<? extends OperatorDesc>> rsParentOps = parentRS.getParentOperators(); for (Operator<? extends OperatorDesc> parent : rsParentOps) { parent.replaceChild(parentRS, hashTableSinkOp); } hashTableSinkOp.setParentOperators(rsParentOps); hashTableSinkOp.getConf().setTag(tag); return true; }
From source file:com.ebay.nest.io.sede.RegexSerDe.java
@Override public Object deserialize(Writable blob) throws SerDeException { Text rowText = (Text) blob; Matcher m = inputPattern.matcher(rowText.toString()); if (m.groupCount() != numColumns) { throw new SerDeException("Number of matching groups doesn't match the number of columns"); }//from w ww . j ava2 s . c o m // If do not match, ignore the line, return a row with all nulls. if (!m.matches()) { unmatchedRowsCount++; if (!alreadyLoggedNoMatch) { // Report the row if its the first time LOG.warn("" + unmatchedRowsCount + " unmatched rows are found: " + rowText); alreadyLoggedNoMatch = true; } return null; } // Otherwise, return the row. for (int c = 0; c < numColumns; c++) { try { String t = m.group(c + 1); TypeInfo typeInfo = columnTypes.get(c); String typeName = typeInfo.getTypeName(); // Convert the column to the correct type when needed and set in row obj if (typeName.equals(serdeConstants.STRING_TYPE_NAME)) { row.set(c, t); } else if (typeName.equals(serdeConstants.TINYINT_TYPE_NAME)) { Byte b; b = Byte.valueOf(t); row.set(c, b); } else if (typeName.equals(serdeConstants.SMALLINT_TYPE_NAME)) { Short s; s = Short.valueOf(t); row.set(c, s); } else if (typeName.equals(serdeConstants.INT_TYPE_NAME)) { Integer i; i = Integer.valueOf(t); row.set(c, i); } else if (typeName.equals(serdeConstants.BIGINT_TYPE_NAME)) { Long l; l = Long.valueOf(t); row.set(c, l); } else if (typeName.equals(serdeConstants.FLOAT_TYPE_NAME)) { Float f; f = Float.valueOf(t); row.set(c, f); } else if (typeName.equals(serdeConstants.DOUBLE_TYPE_NAME)) { Double d; d = Double.valueOf(t); row.set(c, d); } else if (typeName.equals(serdeConstants.BOOLEAN_TYPE_NAME)) { Boolean b; b = Boolean.valueOf(t); row.set(c, b); } else if (typeName.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) { Timestamp ts; ts = Timestamp.valueOf(t); row.set(c, ts); } else if (typeName.equals(serdeConstants.DATE_TYPE_NAME)) { Date d; d = Date.valueOf(t); row.set(c, d); } else if (typeName.equals(serdeConstants.DECIMAL_TYPE_NAME)) { HiveDecimal bd; bd = new HiveDecimal(t); row.set(c, bd); } else if (typeInfo instanceof PrimitiveTypeInfo && ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.VARCHAR) { VarcharTypeParams varcharParams = (VarcharTypeParams) ParameterizedPrimitiveTypeUtils .getTypeParamsFromTypeInfo(typeInfo); HiveVarchar hv = new HiveVarchar(t, varcharParams != null ? varcharParams.length : -1); row.set(c, hv); } } catch (RuntimeException e) { partialMatchedRowsCount++; if (!alreadyLoggedPartialMatch) { // Report the row if its the first row LOG.warn("" + partialMatchedRowsCount + " partially unmatched rows are found, " + " cannot find group " + c + ": " + rowText); alreadyLoggedPartialMatch = true; } row.set(c, null); } } return row; }
From source file:org.apache.hadoop.hive.ql.exec.SkewJoinHandler.java
public void handleSkew(int tag) throws HiveException { if (joinOp.newGroupStarted || tag != currTag) { rowNumber = 0;/*from w ww . j av a 2s . co m*/ currTag = tag; } if (joinOp.newGroupStarted) { currBigKeyTag = -1; joinOp.newGroupStarted = false; dummyKey = (List<Object>) joinOp.getGroupKeyObject(); skewKeyInCurrentGroup = false; for (int i = 0; i < numAliases; i++) { RowContainer<ArrayList<Object>> rc = (RowContainer) joinOp.storage[i]; if (rc != null) { rc.setKeyObject(dummyKey); } } } rowNumber++; if (currBigKeyTag == -1 && (tag < numAliases - 1) && rowNumber >= skewKeyDefinition) { // the first time we see a big key. If this key is not in the last // table (the last table can always be streamed), we define that we get // a skew key now. currBigKeyTag = tag; updateSkewJoinJobCounter(tag); // right now we assume that the group by is an ArrayList object. It may // change in future. if (!(dummyKey instanceof List)) { throw new RuntimeException("Bug in handle skew key in a seperate job."); } skewKeyInCurrentGroup = true; bigKeysExistingMap.put(Byte.valueOf((byte) currBigKeyTag), Boolean.TRUE); } }
From source file:cn.clickvalue.cv2.model.rowmapper.BeanPropertyRowMapper.java
/** * Retrieve a JDBC column value from a ResultSet, using the specified value type. * <p>Uses the specifically typed ResultSet accessor methods, falling back to * {@link #getResultSetValue(java.sql.ResultSet, int)} for unknown types. * <p>Note that the returned value may not be assignable to the specified * required type, in case of an unknown type. Calling code needs to deal * with this case appropriately, e.g. throwing a corresponding exception. * @param rs is the ResultSet holding the data * @param index is the column index/* w w w . j a v a 2 s . co m*/ * @param requiredType the required value type (may be <code>null</code>) * @return the value object * @throws SQLException if thrown by the JDBC API */ public static Object getResultSetValue(ResultSet rs, int index, Class requiredType) throws SQLException { if (requiredType == null) { return getResultSetValue(rs, index); } Object value = null; boolean wasNullCheck = false; // Explicitly extract typed value, as far as possible. if (String.class.equals(requiredType)) { value = rs.getString(index); } else if (boolean.class.equals(requiredType) || Boolean.class.equals(requiredType)) { value = Boolean.valueOf(rs.getBoolean(index)); wasNullCheck = true; } else if (byte.class.equals(requiredType) || Byte.class.equals(requiredType)) { value = Byte.valueOf(rs.getByte(index)); wasNullCheck = true; } else if (short.class.equals(requiredType) || Short.class.equals(requiredType)) { value = Short.valueOf(rs.getShort(index)); wasNullCheck = true; } else if (int.class.equals(requiredType) || Integer.class.equals(requiredType)) { value = Integer.valueOf(rs.getInt(index)); wasNullCheck = true; } else if (long.class.equals(requiredType) || Long.class.equals(requiredType)) { value = Long.valueOf(rs.getLong(index)); wasNullCheck = true; } else if (float.class.equals(requiredType) || Float.class.equals(requiredType)) { value = Float.valueOf(rs.getFloat(index)); wasNullCheck = true; } else if (double.class.equals(requiredType) || Double.class.equals(requiredType) || Number.class.equals(requiredType)) { value = Double.valueOf(rs.getDouble(index)); wasNullCheck = true; } else if (byte[].class.equals(requiredType)) { value = rs.getBytes(index); } else if (java.sql.Date.class.equals(requiredType)) { value = rs.getDate(index); } else if (java.sql.Time.class.equals(requiredType)) { value = rs.getTime(index); } else if (java.sql.Timestamp.class.equals(requiredType) || java.util.Date.class.equals(requiredType)) { value = rs.getTimestamp(index); } else if (BigDecimal.class.equals(requiredType)) { value = rs.getBigDecimal(index); } else if (Blob.class.equals(requiredType)) { value = rs.getBlob(index); } else if (Clob.class.equals(requiredType)) { value = rs.getClob(index); } else { // Some unknown type desired -> rely on getObject. value = getResultSetValue(rs, index); } // Perform was-null check if demanded (for results that the // JDBC driver returns as primitives). if (wasNullCheck && value != null && rs.wasNull()) { value = null; } return value; }
From source file:org.droidparts.persist.sql.EntityManager.java
protected Object readFromCursor(Cursor cursor, int columnIndex, Class<?> valType, Class<?> arrCollItemType) throws IllegalArgumentException { if (cursor.isNull(columnIndex)) { return null; } else if (isBoolean(valType)) { return cursor.getInt(columnIndex) == 1; } else if (isByte(valType)) { return Byte.valueOf(cursor.getString(columnIndex)); } else if (isByteArray(valType)) { return cursor.getBlob(columnIndex); } else if (isDouble(valType)) { return cursor.getDouble(columnIndex); } else if (isFloat(valType)) { return cursor.getFloat(columnIndex); } else if (isInteger(valType)) { return cursor.getInt(columnIndex); } else if (isLong(valType)) { return cursor.getLong(columnIndex); } else if (isShort(valType)) { return cursor.getShort(columnIndex); } else if (isString(valType)) { return cursor.getString(columnIndex); } else if (isUUID(valType)) { return UUID.fromString(cursor.getString(columnIndex)); } else if (isDate(valType)) { return new Date(cursor.getLong(columnIndex)); } else if (isBitmap(valType)) { byte[] arr = cursor.getBlob(columnIndex); return BitmapFactory.decodeByteArray(arr, 0, arr.length); } else if (isJsonObject(valType) || isJsonArray(valType)) { String str = cursor.getString(columnIndex); try {//www. ja v a 2 s .c o m return isJsonObject(valType) ? new JSONObject(str) : new JSONArray(str); } catch (JSONException e) { throw new IllegalArgumentException(e); } } else if (isEnum(valType)) { return instantiateEnum(valType, cursor.getString(columnIndex)); } else if (isEntity(valType)) { long id = cursor.getLong(columnIndex); @SuppressWarnings("unchecked") Entity entity = instantiate((Class<Entity>) valType); entity.id = id; return entity; } else if (isArray(valType) || isCollection(valType)) { String str = cursor.getString(columnIndex); String[] parts = (str.length() > 0) ? str.split("\\" + SEP) : new String[0]; if (isArray(valType)) { return toTypeArr(arrCollItemType, parts); } else { @SuppressWarnings("unchecked") Collection<Object> coll = (Collection<Object>) instantiate(valType); coll.addAll(toTypeColl(arrCollItemType, parts)); return coll; } } else { throw new IllegalArgumentException("Need to manually read " + valType.getName() + " from cursor."); } }
From source file:org.grouplens.grapht.BindingImpl.java
private Object coerce(Object in) { Class<?> boxedSource = Types.box(sourceType); if (Integer.class.equals(boxedSource)) { // normalize to BigInteger and then cast to int return Integer.valueOf(toBigInteger(in).intValue()); } else if (Short.class.equals(boxedSource)) { // normalize to BigInteger and then cast to short return Short.valueOf(toBigInteger(in).shortValue()); } else if (Byte.class.equals(boxedSource)) { // normalize to BigInteger and then cast to byte return Byte.valueOf(toBigInteger(in).byteValue()); } else if (Long.class.equals(boxedSource)) { // normalize to BigInteger and then cast to long return Long.valueOf(toBigInteger(in).longValue()); } else if (Float.class.equals(boxedSource)) { // normalize to BigDecimal and then cast to float return Float.valueOf(toBigDecimal(in).floatValue()); } else if (Double.class.equals(boxedSource)) { // normalize to BigDecimal and then cast to double return Double.valueOf(toBigDecimal(in).doubleValue()); } else if (BigDecimal.class.equals(boxedSource)) { // normalize to BigDecimal return toBigDecimal(in); } else if (BigInteger.class.equals(boxedSource)) { // normalize to BigInteger return toBigInteger(in); } else {/*from w ww . ja v a 2s.c o m*/ // don't perform any type coercion return in; } }
From source file:org.op4j.functions.FnObject.java
/** * <p>/*w w w . ja v a 2 s . c om*/ * Determines whether the target object and the specified object are equal * in value, this is, whether <tt>target.compareTo(object) == 0</tt>. Both * the target and the specified object have to implement {@link Comparable}. * </p> * * @param object the object to compare to the target * @return true if both objects are equal according to "compareTo", false if not. */ public static final Function<Object, Boolean> eqValue(final byte object) { return new EqualValue(Byte.valueOf(object)); }
From source file:com.mozilla.testpilot.hive.serde.TestPilotJsonSerde.java
/** * Deserialize a JSON Object into a row for the table */// w ww . ja v a 2 s .c o m @SuppressWarnings("unchecked") @Override public Object deserialize(Writable blob) throws SerDeException { String rowText = ((Text) blob).toString(); if (LOG.isDebugEnabled()) { LOG.debug("Deserialize row: " + rowText); } // Try parsing row into JSON object Map<String, Object> values = new HashMap<String, Object>(); try { Map<String, Object> tempValues = jsonMapper.readValue(rowText, new TypeReference<Map<String, Object>>() { }); // Metadata if (tempValues.containsKey("metadata")) { Map<String, Object> metadata = (Map<String, Object>) tempValues.get("metadata"); Map<String, String> preferencesMap = new HashMap<String, String>(); for (Map.Entry<String, Object> metaEntry : metadata.entrySet()) { String key = metaEntry.getKey(); Object vo = metaEntry.getValue(); // Extensions if ("extensions".equals(key)) { List<Object> extensions = (List<Object>) vo; Map<String, Boolean> extensionMap = new HashMap<String, Boolean>(); for (Object o : extensions) { Map<String, Object> ex = (Map<String, Object>) o; String id = (String) ex.get("id"); Boolean isEnabled = (Boolean) ex.get("isEnabled"); extensionMap.put(id, isEnabled); } values.put("extensions", extensionMap); // Accessibilities } else if ("accessibilities".equals(key)) { List<Object> accessibilities = (List<Object>) vo; Map<String, String> accessibilityMap = new HashMap<String, String>(); for (Object o : accessibilities) { Map<String, Object> a = (Map<String, Object>) o; String name = (String) a.get("name"); // Get a string value of everything since we have mixed types String v = String.valueOf(a.get("value")); accessibilityMap.put(name, v); } values.put("accessibilities", accessibilityMap); // Hack preferences } else if (key.startsWith("Preference")) { String name = key.replace("Preference ", ""); preferencesMap.put(name.toLowerCase(), String.valueOf(metaEntry.getValue())); } else if ("Sync configured".equals(key)) { preferencesMap.put("sync.configured", String.valueOf(metaEntry.getValue())); // Leave survey answers as a JSON value for now } else if ("surveyAnswers".equals(key)) { values.put("surveyanswers", jsonMapper.writeValueAsString(vo)); // location is a hive keyword } else if ("location".equals(key)) { values.put("loc", vo); } else { values.put(key.toLowerCase(), vo); } } if (preferencesMap.size() > 0) { values.put("preferences", preferencesMap); } } // Events if (tempValues.containsKey("events")) { List<List<Long>> events = (List<List<Long>>) tempValues.get("events"); values.put("events", events); } } catch (JsonParseException e) { LOG.error("JSON Parse Error", e); } catch (JsonMappingException e) { LOG.error("JSON Mapping Error", e); } catch (IOException e) { LOG.error("IOException during JSON parsing", e); } if (values.size() == 0) { return null; } // Loop over columns in table and set values for (int c = 0; c < numColumns; c++) { String colName = columnNames.get(c); TypeInfo ti = columnTypes.get(c); Object value = null; try { // Get type-safe JSON values if (ti.getTypeName().equalsIgnoreCase(Constants.DOUBLE_TYPE_NAME)) { value = Double.valueOf((String) values.get(colName)); } else if (ti.getTypeName().equalsIgnoreCase(Constants.BIGINT_TYPE_NAME)) { value = Long.valueOf((String) values.get(colName)); } else if (ti.getTypeName().equalsIgnoreCase(Constants.INT_TYPE_NAME)) { value = Integer.valueOf((String) values.get(colName)); } else if (ti.getTypeName().equalsIgnoreCase(Constants.TINYINT_TYPE_NAME)) { value = Byte.valueOf((String) values.get(colName)); } else if (ti.getTypeName().equalsIgnoreCase(Constants.FLOAT_TYPE_NAME)) { value = Float.valueOf((String) values.get(colName)); } else if (ti.getTypeName().equalsIgnoreCase(Constants.BOOLEAN_TYPE_NAME)) { value = Boolean.valueOf((String) values.get(colName)); } else { // Fall back, just get an object value = values.get(colName); } } catch (RuntimeException e) { LOG.error("Class cast error for column name: " + colName); Object o = values.get(colName); if (o != null) { LOG.error("Value was: " + o.toString()); } throw new SerDeException(e); } if (value == null) { // If the column cannot be found, just make it a NULL value and // skip over it LOG.warn("Column '" + colName + "' not found in row: " + rowText.toString()); } row.set(c, value); } return row; }
From source file:net.sf.jasperreports.olap.JROlapDataSource.java
/** * Convert the value of the data type of the Field * @param jrField the Field whose type has to be converted * @return value of field in the requested type * *//*w w w .j a v a 2 s . c o m*/ @Override public Object getFieldValue(JRField jrField) throws JRException { Class<?> valueClass = jrField.getValueClass(); Object value = fieldValues.get(jrField.getName()); try { /* * Everything in the result is a string, apart from Member */ if (valueClass.equals(mondrian.olap.Member.class)) { if (!(value instanceof mondrian.olap.Member)) { throw new JRException(EXCEPTION_MESSAGE_KEY_OLAP_CANNOT_CONVERT_FIELD_TYPE, new Object[] { jrField.getName(), value.getClass(), valueClass.getName() }); } return value; } /* * Convert the rest from String */ String fieldValue = (String) value; if (fieldValue == null) { return null; } if (Number.class.isAssignableFrom(valueClass)) { fieldValue = fieldValue.trim(); } if (fieldValue.length() == 0) { fieldValue = "0"; } if (valueClass.equals(String.class)) { return fieldValue; } else if (valueClass.equals(Boolean.class)) { return fieldValue.equalsIgnoreCase("true"); } else if (valueClass.equals(Byte.class)) { return Byte.valueOf(fieldValue); } else if (valueClass.equals(Integer.class)) { return Integer.valueOf(fieldValue); } else if (valueClass.equals(Long.class)) { return Long.valueOf(fieldValue); } else if (valueClass.equals(Short.class)) { return Short.valueOf(fieldValue); } else if (valueClass.equals(Double.class)) { return Double.valueOf(fieldValue); } else if (valueClass.equals(Float.class)) { return Float.valueOf(fieldValue); } else if (valueClass.equals(java.math.BigDecimal.class)) { return new java.math.BigDecimal(fieldValue); } else if (valueClass.equals(java.util.Date.class)) { return dateFormat.parse(fieldValue); } else if (valueClass.equals(java.sql.Timestamp.class)) { return new java.sql.Timestamp(dateFormat.parse(fieldValue).getTime()); } else if (valueClass.equals(java.sql.Time.class)) { return new java.sql.Time(dateFormat.parse(fieldValue).getTime()); } else if (valueClass.equals(java.lang.Number.class)) { return Double.valueOf(fieldValue); } else { throw new JRException(EXCEPTION_MESSAGE_KEY_OLAP_CANNOT_CONVERT_STRING_VALUE_TYPE, new Object[] { jrField.getName(), fieldValue, fieldValues.get(jrField.getName()).getClass(), valueClass.getName() }); } } catch (Exception e) { throw new JRException(EXCEPTION_MESSAGE_KEY_OLAP_FIELD_VALUE_NOT_RETRIEVED, new Object[] { jrField.getName(), valueClass.getName() }, e); } }