List of usage examples for java.math BigDecimal scale
int scale
To view the source code for java.math BigDecimal scale.
Click Source Link
From source file:cn.iie.haiep.hbase.value.Bytes.java
/** * Convert a BigDecimal value to a byte array * * @param val//from w w w . j a va 2 s. c o m * @return the byte array */ public static byte[] toBytes(BigDecimal val) { byte[] valueBytes = val.unscaledValue().toByteArray(); byte[] result = new byte[valueBytes.length + SIZEOF_INT]; int offset = putInt(result, 0, val.scale()); putBytes(result, offset, valueBytes, 0, valueBytes.length); return result; }
From source file:org.kalypso.model.wspm.pdb.internal.wspm.ClassChecker.java
/** * Check if two class values are different: if yes, add warning *//*from w ww .j a va 2s . c om*/ private void checkValues(final BigDecimal local, final BigDecimal remote, final IClassificationClass localClass, final IPdbClass remoteClass, final String valueLabel) { if (local == null && remote == null) return; if (local == null && remote != null) { addWarning(local, remote, remoteClass.getLabel(), valueLabel); return; } if (local != null && remote == null) { addWarning(local, remote, localClass.getDescription(), valueLabel); return; } /* Bring to same scale to avoid false warnings */ final int localScale = local.scale(); final int remoteScale = remote.scale(); final int maxScale = Math.max(localScale, remoteScale); final BigDecimal localScaled = local.setScale(maxScale); final BigDecimal remoteScaled = remote.setScale(maxScale); if (localScaled.compareTo(remoteScaled) == 0) return; addWarning(local, remote, remoteClass.getLabel(), valueLabel); }
From source file:cn.iie.haiep.hbase.value.Bytes.java
/** * Put a BigDecimal value out to the specified byte array position. * * @param bytes the byte array//from w w w .j a va 2 s . c om * @param offset position in the array * @param val BigDecimal to write out * @return incremented offset */ public static int putBigDecimal(byte[] bytes, int offset, BigDecimal val) { if (bytes == null) { return offset; } byte[] valueBytes = val.unscaledValue().toByteArray(); byte[] result = new byte[valueBytes.length + SIZEOF_INT]; offset = putInt(result, offset, val.scale()); return putBytes(result, offset, valueBytes, 0, valueBytes.length); }
From source file:org.kuali.kpme.core.earncode.service.EarnCodeServiceImpl.java
@Override public BigDecimal roundHrsWithEarnCode(BigDecimal hours, EarnCodeContract earnCode) { String roundOption = HrConstants.ROUND_OPTION_MAP.get(earnCode.getRoundingOption()); BigDecimal fractScale = new BigDecimal(earnCode.getFractionalTimeAllowed()); if (roundOption == null) { LOG.error("Rounding option of Earn Code " + earnCode.getEarnCode() + " is not recognized."); return null; // throw new RuntimeException("Rounding option of Earn Code " + earnCode.getEarnCode() + " is not recognized."); }//from w ww . java2s.co m BigDecimal roundedHours = hours; if (roundOption.equals("Traditional")) { roundedHours = hours.setScale(fractScale.scale(), BigDecimal.ROUND_HALF_EVEN); } else if (roundOption.equals("Truncate")) { roundedHours = hours.setScale(fractScale.scale(), BigDecimal.ROUND_DOWN); } return roundedHours; }
From source file:org.apache.hive.hcatalog.pig.HCatBaseStorer.java
/** * Convert from Pig value object to Hive value object * This method assumes that {@link #validateSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema, org.apache.hive.hcatalog.data.schema.HCatFieldSchema, org.apache.pig.impl.logicalLayer.schema.Schema, org.apache.hive.hcatalog.data.schema.HCatSchema, int)} * which checks the types in Pig schema are compatible with target Hive table, has been called. *//*from w w w .j a v a 2s . co m*/ private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws HCatException, BackendException { try { if (pigObj == null) return null; // The real work-horse. Spend time and energy in this method if there is // need to keep HCatStorer lean and go fast. Type type = hcatFS.getType(); switch (type) { case BINARY: return ((DataByteArray) pigObj).get(); case STRUCT: HCatSchema structSubSchema = hcatFS.getStructSubSchema(); // Unwrap the tuple. List<Object> all = ((Tuple) pigObj).getAll(); ArrayList<Object> converted = new ArrayList<Object>(all.size()); for (int i = 0; i < all.size(); i++) { converted.add(getJavaObj(all.get(i), structSubSchema.get(i))); } return converted; case ARRAY: // Unwrap the bag. DataBag pigBag = (DataBag) pigObj; HCatFieldSchema tupFS = hcatFS.getArrayElementSchema().get(0); boolean needTuple = tupFS.getType() == Type.STRUCT; List<Object> bagContents = new ArrayList<Object>((int) pigBag.size()); Iterator<Tuple> bagItr = pigBag.iterator(); while (bagItr.hasNext()) { // If there is only one element in tuple contained in bag, we throw away the tuple. bagContents.add(getJavaObj(needTuple ? bagItr.next() : bagItr.next().get(0), tupFS)); } return bagContents; case MAP: Map<?, ?> pigMap = (Map<?, ?>) pigObj; Map<Object, Object> typeMap = new HashMap<Object, Object>(); for (Entry<?, ?> entry : pigMap.entrySet()) { // the value has a schema and not a FieldSchema typeMap.put( // Schema validation enforces that the Key is a String (String) entry.getKey(), getJavaObj(entry.getValue(), hcatFS.getMapValueSchema().get(0))); } return typeMap; case STRING: case INT: case BIGINT: case FLOAT: case DOUBLE: return pigObj; case SMALLINT: if ((Integer) pigObj < Short.MIN_VALUE || (Integer) pigObj > Short.MAX_VALUE) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return ((Integer) pigObj).shortValue(); case TINYINT: if ((Integer) pigObj < Byte.MIN_VALUE || (Integer) pigObj > Byte.MAX_VALUE) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return ((Integer) pigObj).byteValue(); case BOOLEAN: if (pigObj instanceof String) { if (((String) pigObj).trim().compareTo("0") == 0) { return Boolean.FALSE; } if (((String) pigObj).trim().compareTo("1") == 0) { return Boolean.TRUE; } throw new BackendException("Unexpected type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE); } return Boolean.parseBoolean(pigObj.toString()); case DECIMAL: BigDecimal bd = (BigDecimal) pigObj; DecimalTypeInfo dti = (DecimalTypeInfo) hcatFS.getTypeInfo(); if (bd.precision() > dti.precision() || bd.scale() > dti.scale()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return HiveDecimal.create(bd); case CHAR: String charVal = (String) pigObj; CharTypeInfo cti = (CharTypeInfo) hcatFS.getTypeInfo(); if (charVal.length() > cti.getLength()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return new HiveChar(charVal, cti.getLength()); case VARCHAR: String varcharVal = (String) pigObj; VarcharTypeInfo vti = (VarcharTypeInfo) hcatFS.getTypeInfo(); if (varcharVal.length() > vti.getLength()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return new HiveVarchar(varcharVal, vti.getLength()); case TIMESTAMP: DateTime dt = (DateTime) pigObj; return new Timestamp(dt.getMillis());//getMillis() returns UTC time regardless of TZ case DATE: /** * We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any * meaningful way). So the assumption is that if Pig value has 0 time component (midnight) * we assume it reasonably 'fits' into a Hive DATE. If time part is not 0, it's considered * out of range for target type. */ DateTime dateTime = ((DateTime) pigObj); if (dateTime.getMillisOfDay() != 0) { handleOutOfRangeValue(pigObj, hcatFS, "Time component must be 0 (midnight) in local timezone; Local TZ val='" + pigObj + "'"); return null; } /*java.sql.Date is a poorly defined API. Some (all?) SerDes call toString() on it [e.g. LazySimpleSerDe, uses LazyUtils.writePrimitiveUTF8()], which automatically adjusts for local timezone. Date.valueOf() also uses local timezone (as does Date(int,int,int). Also see PigHCatUtil#extractPigObject() for corresponding read op. This way a DATETIME from Pig, when stored into Hive and read back comes back with the same value.*/ return new Date(dateTime.getYear() - 1900, dateTime.getMonthOfYear() - 1, dateTime.getDayOfMonth()); default: throw new BackendException("Unexpected HCat type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE); } } catch (BackendException e) { // provide the path to the field in the error message throw new BackendException((hcatFS.getName() == null ? " " : hcatFS.getName() + ".") + e.getMessage(), e); } }
From source file:org.apache.hadoop.hive.ql.optimizer.optiq.translator.RexNodeConverter.java
protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticException { RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo(); RelDataType optiqDataType = TypeConverter.convert(hiveType, dtFactory); PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); ConstantObjectInspector coi = literal.getWritableObjectInspector(); Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi); RexNode optiqLiteral = null;// w w w .j a v a 2 s . co m // TODO: Verify if we need to use ConstantObjectInspector to unwrap data switch (hiveTypeCategory) { case BOOLEAN: optiqLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue()); break; case BYTE: byte[] byteArray = new byte[] { (Byte) value }; ByteString bs = new ByteString(byteArray); optiqLiteral = rexBuilder.makeBinaryLiteral(bs); break; case SHORT: optiqLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), optiqDataType); break; case INT: optiqLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value)); break; case LONG: optiqLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value)); break; // TODO: is Decimal an exact numeric or approximate numeric? case DECIMAL: if (value instanceof HiveDecimal) { value = ((HiveDecimal) value).bigDecimalValue(); } else if (value instanceof Decimal128) { value = ((Decimal128) value).toBigDecimal(); } if (value == null) { // We have found an invalid decimal value while enforcing precision and // scale. Ideally, // we would replace it with null here, which is what Hive does. However, // we need to plumb // this thru up somehow, because otherwise having different expression // type in AST causes // the plan generation to fail after CBO, probably due to some residual // state in SA/QB. // For now, we will not run CBO in the presence of invalid decimal // literals. throw new OptiqSemanticException( "Expression " + literal.getExprString() + " is not a valid decimal"); // TODO: return createNullLiteral(literal); } BigDecimal bd = (BigDecimal) value; BigInteger unscaled = bd.unscaledValue(); if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) { optiqLiteral = rexBuilder.makeExactLiteral(bd); } else { // CBO doesn't support unlimited precision decimals. In practice, this // will work... // An alternative would be to throw CboSemanticException and fall back // to no CBO. RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, bd.scale(), unscaled.toString().length()); optiqLiteral = rexBuilder.makeExactLiteral(bd, relType); } break; case FLOAT: optiqLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Float) value), optiqDataType); break; case DOUBLE: optiqLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), optiqDataType); break; case CHAR: if (value instanceof HiveChar) value = ((HiveChar) value).getValue(); optiqLiteral = rexBuilder.makeLiteral((String) value); break; case VARCHAR: if (value instanceof HiveVarchar) value = ((HiveVarchar) value).getValue(); optiqLiteral = rexBuilder.makeLiteral((String) value); break; case STRING: optiqLiteral = rexBuilder.makeLiteral((String) value); break; case DATE: Calendar cal = new GregorianCalendar(); cal.setTime((Date) value); optiqLiteral = rexBuilder.makeDateLiteral(cal); break; case TIMESTAMP: optiqLiteral = rexBuilder.makeTimestampLiteral((Calendar) value, RelDataType.PRECISION_NOT_SPECIFIED); break; case BINARY: case VOID: case UNKNOWN: default: throw new RuntimeException("UnSupported Literal"); } return optiqLiteral; }
From source file:org.zuinnote.hadoop.office.format.common.converter.ExcelConverterSimpleSpreadSheetCellDAO.java
/*** * This provides another sample to infer schema in form of simple datatypes * (e.g. boolean, byte etc.). You might add as many sample as necessary to get a * precise schema.//from w ww. j a v a 2 s . c o m * * @param dataRow */ public void updateSpreadSheetCellRowToInferSchemaInformation(SpreadSheetCellDAO[] dataRow) { // check size of cell based on address // if necessary add more to schemaRow for (SpreadSheetCellDAO currentSpreadSheetCellDAO : dataRow) { boolean dataTypeFound = false; if (currentSpreadSheetCellDAO != null) { // add potential column to list int j = new CellAddress(currentSpreadSheetCellDAO.getAddress()).getColumn(); if (j >= this.schemaRow.size()) { // fill up for (int x = this.schemaRow.size(); x <= j; x++) { this.schemaRow.add(null); } } // check if boolean data type if ((currentSpreadSheetCellDAO.getFormattedValue() != null) && (!"".equals(currentSpreadSheetCellDAO.getFormattedValue()))) { // skip null value String currentCellValue = currentSpreadSheetCellDAO.getFormattedValue(); // check if boolean if (("TRUE".equals(currentCellValue)) || ("FALSE".equals(currentCellValue))) { dataTypeFound = true; if (this.schemaRow.get(j) != null) { // check if previous assumption was boolean if (!(this.schemaRow.get(j) instanceof GenericBooleanDataType)) { // if not then the type needs to be set to string this.schemaRow.set(j, new GenericStringDataType()); } // if yes then nothing todo (already boolean) } else { // we face this the first time this.schemaRow.set(j, new GenericBooleanDataType()); } } // check if timestamp using provided format if (!dataTypeFound) { if (this.dateTimeFormat != null) { // only if a format is specified Date theDate = this.dateTimeFormat.parse(currentCellValue, new ParsePosition(0)); if (theDate != null) { // we found indeed a date time dataTypeFound = true; if (this.schemaRow.get(j) != null) { // check if previous assumption was date if (!(this.schemaRow.get(j) instanceof GenericTimestampDataType)) { // if not then the type needs to be set to string this.schemaRow.set(j, new GenericStringDataType()); } } else { // we face this the first time this.schemaRow.set(j, new GenericTimestampDataType()); } } } } // check for timestamp using java.sql.Timestamp if (!dataTypeFound) { try { java.sql.Timestamp ts = java.sql.Timestamp.valueOf(currentCellValue); dataTypeFound = true; this.schemaRow.set(j, new GenericTimestampDataType()); } catch (IllegalArgumentException e) { LOG.warn( "Could not identify timestamp using TimeStamp.valueOf. Trying last resort Date parsing...."); } } // check if date data type if (!dataTypeFound) { Date theDate = this.dateFormat.parse(currentCellValue, new ParsePosition(0)); if (theDate != null) { // we have indeed a date dataTypeFound = true; if (this.schemaRow.get(j) != null) { // check if previous assumption was date if (!(this.schemaRow.get(j) instanceof GenericDateDataType)) { // if not then the type needs to be set to string this.schemaRow.set(j, new GenericStringDataType()); } } else { // we face this the first time this.schemaRow.set(j, new GenericDateDataType()); // check if it has a time component } } } // check if BigDecimal BigDecimal bd = (BigDecimal) this.decimalFormat.parse(currentCellValue, new ParsePosition(0)); if ((!dataTypeFound) && (bd != null)) { BigDecimal bdv = bd.stripTrailingZeros(); dataTypeFound = true; if (this.schemaRow.get(j) != null) { // check if previous assumption was a number // check if we need to upgrade to decimal if ((bdv.scale() > 0) && (this.schemaRow.get(j) instanceof GenericNumericDataType)) { // upgrade to decimal, if necessary if (!(this.schemaRow.get(j) instanceof GenericBigDecimalDataType)) { this.schemaRow.set(j, new GenericBigDecimalDataType(bdv.precision(), bdv.scale())); } else { if ((bdv.scale() > ((GenericBigDecimalDataType) this.schemaRow.get(j)) .getScale()) && (bdv.precision() > ((GenericBigDecimalDataType) this.schemaRow .get(j)).getPrecision())) { this.schemaRow.set(j, new GenericBigDecimalDataType(bdv.precision(), bdv.scale())); } else if (bdv.scale() > ((GenericBigDecimalDataType) this.schemaRow.get(j)) .getScale()) { // upgrade scale GenericBigDecimalDataType gbd = ((GenericBigDecimalDataType) this.schemaRow .get(j)); gbd.setScale(bdv.scale()); this.schemaRow.set(j, gbd); } else if (bdv.precision() > ((GenericBigDecimalDataType) this.schemaRow.get(j)) .getPrecision()) { // upgrade precision // new precision is needed to extend to max scale GenericBigDecimalDataType gbd = ((GenericBigDecimalDataType) this.schemaRow .get(j)); int newpre = bdv.precision() + (gbd.getScale() - bdv.scale()); gbd.setPrecision(newpre); this.schemaRow.set(j, gbd); } } } else { // check if we need to upgrade one of the integer types // if current is byte boolean isByte = false; boolean isShort = false; boolean isInt = false; boolean isLong = true; try { bdv.longValueExact(); isLong = true; bdv.intValueExact(); isInt = true; bdv.shortValueExact(); isShort = true; bdv.byteValueExact(); isByte = true; } catch (Exception e) { LOG.debug("Possible data types: Long: " + isLong + " Int: " + isInt + " Short: " + isShort + " Byte: " + isByte); } // if it was Numeric before we can ignore testing the byte case, here just for // completeness if ((isByte) && ((this.schemaRow.get(j) instanceof GenericByteDataType) || (this.schemaRow.get(j) instanceof GenericShortDataType) || (this.schemaRow.get(j) instanceof GenericIntegerDataType) || (this.schemaRow.get(j) instanceof GenericLongDataType))) { // if it was Byte before we can ignore testing the byte case, here just for // completeness } else if ((isShort) && ((this.schemaRow.get(j) instanceof GenericByteDataType))) { // upgrade to short this.schemaRow.set(j, new GenericShortDataType()); } else if ((isInt) && ((this.schemaRow.get(j) instanceof GenericShortDataType) || (this.schemaRow.get(j) instanceof GenericByteDataType))) { // upgrade to integer this.schemaRow.set(j, new GenericIntegerDataType()); } else if ((!isByte) && (!isShort) && (!isInt) && !((this.schemaRow.get(j) instanceof GenericLongDataType))) { // upgrade to long this.schemaRow.set(j, new GenericLongDataType()); } } } else { // we face it for the first time // determine value type if (bdv.scale() > 0) { this.schemaRow.set(j, new GenericBigDecimalDataType(bdv.precision(), bdv.scale())); } else { boolean isByte = false; boolean isShort = false; boolean isInt = false; boolean isLong = true; try { bdv.longValueExact(); isLong = true; bdv.intValueExact(); isInt = true; bdv.shortValueExact(); isShort = true; bdv.byteValueExact(); isByte = true; } catch (Exception e) { LOG.debug("Possible data types: Long: " + isLong + " Int: " + isInt + " Short: " + isShort + " Byte: " + isByte); } if (isByte) { this.schemaRow.set(j, new GenericByteDataType()); } else if (isShort) { this.schemaRow.set(j, new GenericShortDataType()); } else if (isInt) { this.schemaRow.set(j, new GenericIntegerDataType()); } else if (isLong) { this.schemaRow.set(j, new GenericLongDataType()); } } } } if (!dataTypeFound) { // otherwise string if (!(this.schemaRow.get(j) instanceof GenericStringDataType)) { this.schemaRow.set(j, new GenericStringDataType()); } } } else { // ignore null values } } } }
From source file:net.sourceforge.squirrel_sql.fw.datasetviewer.cellcomponent.DataTypeBigDecimal.java
/** * Implement the interface for validating and converting to internal object. * Null is a valid successful return, so errors are indicated only by * existance or not of a message in the messageBuffer. *//*from ww w . j av a 2s .com*/ public Object validateAndConvert(String value, Object originalValue, StringBuffer messageBuffer) { // handle null, which is shown as the special string "<null>" if (value.equals("<null>") || value.equals("")) return null; // Do the conversion into the object in a safe manner try { BigDecimal obj; if (useJavaDefaultFormat) { obj = new BigDecimal(value); } else { obj = new BigDecimal("" + _numberFormat.parse(value)); } // Some DBs give a negative number when they do not have a value for // the scale. Assume that if the _scale is 0 or positive that the DB really // means for that to be the scale, but if it is negative then we do not check. if (_scale >= 0 && obj.scale() > _scale) { Object[] args = new Object[] { Integer.valueOf(obj.scale()), Integer.valueOf(_scale) }; // i18n[dataTypeBigDecimal.scaleEceeded=Scale Exceeded: Number //of digits to right of decimal place ({0})\nis greater than //allowed in column ({1}).] String msg = s_stringMgr.getString("dataTypeBigDecimal.scaleEceeded", args); messageBuffer.append(msg); return null; } // check the total number of digits in the number. // Since the string version of the number is therepresentation of // the digits in that number and including possibly a plus or minus // and a decimal, start by counting the number of digits in the string. int objPrecision = value.length(); // now remove the non-digit chars, if any if (value.indexOf("+") > -1 || value.indexOf("-") > -1) objPrecision--; if (value.indexOf(".") > -1) objPrecision--; // Some drivers (e.g. Oracle) give precision as 0 in some cases. // When precision is 0, we cannot check the length, so do not try. if (_precision > 0 && objPrecision > _precision) { Object[] args = new Object[] { Integer.valueOf(objPrecision), Integer.valueOf(_precision) }; // i18n[dataTypeBigDecimal.precisionEceeded=Precision Exceeded: //Number of digits in number ({0})\nis greater than allowed in //column ({1})] String msg = s_stringMgr.getString("dataTypeBigDecimal.precisionEceeded", args); messageBuffer.append(msg); return null; } return obj; } catch (Exception e) { messageBuffer.append(e.toString() + "\n"); //?? do we need the message also, or is it automatically part of the toString()? //messageBuffer.append(e.getMessage()); return null; } }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.translator.RexNodeConverter.java
protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException { RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo(); RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory); PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); ConstantObjectInspector coi = literal.getWritableObjectInspector(); Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi); RexNode calciteLiteral = null;//from ww w . j av a2s .co m // TODO: Verify if we need to use ConstantObjectInspector to unwrap data switch (hiveTypeCategory) { case BOOLEAN: calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue()); break; case BYTE: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType); break; case SHORT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType); break; case INT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value)); break; case LONG: calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value)); break; // TODO: is Decimal an exact numeric or approximate numeric? case DECIMAL: if (value instanceof HiveDecimal) { value = ((HiveDecimal) value).bigDecimalValue(); } else if (value instanceof Decimal128) { value = ((Decimal128) value).toBigDecimal(); } if (value == null) { // We have found an invalid decimal value while enforcing precision and // scale. Ideally, // we would replace it with null here, which is what Hive does. However, // we need to plumb // this thru up somehow, because otherwise having different expression // type in AST causes // the plan generation to fail after CBO, probably due to some residual // state in SA/QB. // For now, we will not run CBO in the presence of invalid decimal // literals. throw new CalciteSemanticException( "Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal); // TODO: return createNullLiteral(literal); } BigDecimal bd = (BigDecimal) value; BigInteger unscaled = bd.unscaledValue(); if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) { calciteLiteral = rexBuilder.makeExactLiteral(bd); } else { // CBO doesn't support unlimited precision decimals. In practice, this // will work... // An alternative would be to throw CboSemanticException and fall back // to no CBO. RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, bd.scale(), unscaled.toString().length()); calciteLiteral = rexBuilder.makeExactLiteral(bd, relType); } break; case FLOAT: calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Float) value), calciteDataType); break; case DOUBLE: calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), calciteDataType); break; case CHAR: if (value instanceof HiveChar) { value = ((HiveChar) value).getValue(); } calciteLiteral = rexBuilder.makeLiteral((String) value); break; case VARCHAR: if (value instanceof HiveVarchar) { value = ((HiveVarchar) value).getValue(); } calciteLiteral = rexBuilder.makeLiteral((String) value); break; case STRING: calciteLiteral = rexBuilder.makeLiteral((String) value); break; case DATE: Calendar cal = new GregorianCalendar(); cal.setTime((Date) value); calciteLiteral = rexBuilder.makeDateLiteral(cal); break; case TIMESTAMP: Calendar c = null; if (value instanceof Calendar) { c = (Calendar) value; } else { c = Calendar.getInstance(); c.setTimeInMillis(((Timestamp) value).getTime()); } calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED); break; case INTERVAL_YEAR_MONTH: // Calcite year-month literal value is months as BigDecimal BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths()); calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1))); break; case INTERVAL_DAY_TIME: // Calcite day-time interval is millis value as BigDecimal // Seconds converted to millis BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000); // Nanos converted to millis BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6); calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1))); break; case VOID: calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true); break; case BINARY: case UNKNOWN: default: throw new RuntimeException("UnSupported Literal"); } return calciteLiteral; }
From source file:org.sparkcommerce.openadmin.server.service.persistence.module.provider.BasicFieldPersistenceProvider.java
@Override public FieldProviderResponse extractValue(ExtractValueRequest extractValueRequest, Property property) throws PersistenceException { if (!canHandleExtraction(extractValueRequest, property)) { return FieldProviderResponse.NOT_HANDLED; }//from ww w . jav a2 s.c om try { if (extractValueRequest.getRequestedValue() != null) { String val = null; if (extractValueRequest.getMetadata().getForeignKeyCollection()) { ((BasicFieldMetadata) property.getMetadata()) .setFieldType(extractValueRequest.getMetadata().getFieldType()); } else if (extractValueRequest.getMetadata().getFieldType().equals(SupportedFieldType.BOOLEAN) && extractValueRequest.getRequestedValue() instanceof Character) { val = (extractValueRequest.getRequestedValue().equals('Y')) ? "true" : "false"; } else if (Date.class.isAssignableFrom(extractValueRequest.getRequestedValue().getClass())) { val = extractValueRequest.getDataFormatProvider().getSimpleDateFormatter() .format((Date) extractValueRequest.getRequestedValue()); } else if (Timestamp.class.isAssignableFrom(extractValueRequest.getRequestedValue().getClass())) { val = extractValueRequest.getDataFormatProvider().getSimpleDateFormatter() .format(new Date(((Timestamp) extractValueRequest.getRequestedValue()).getTime())); } else if (Calendar.class.isAssignableFrom(extractValueRequest.getRequestedValue().getClass())) { val = extractValueRequest.getDataFormatProvider().getSimpleDateFormatter() .format(((Calendar) extractValueRequest.getRequestedValue()).getTime()); } else if (Double.class.isAssignableFrom(extractValueRequest.getRequestedValue().getClass())) { val = extractValueRequest.getDataFormatProvider().getDecimalFormatter() .format(extractValueRequest.getRequestedValue()); } else if (BigDecimal.class.isAssignableFrom(extractValueRequest.getRequestedValue().getClass())) { BigDecimal decimal = (BigDecimal) extractValueRequest.getRequestedValue(); DecimalFormat format = extractValueRequest.getDataFormatProvider().getDecimalFormatter(); //track all the decimal places in the scale of the BigDecimal - even if they're all zeros StringBuilder sb = new StringBuilder(); sb.append("0"); if (decimal.scale() > 0) { sb.append("."); for (int j = 0; j < decimal.scale(); j++) { sb.append("0"); } } format.applyPattern(sb.toString()); val = format.format(extractValueRequest.getRequestedValue()); } else if (extractValueRequest.getMetadata().getForeignKeyClass() != null) { try { val = extractValueRequest.getFieldManager() .getFieldValue(extractValueRequest.getRequestedValue(), extractValueRequest.getMetadata().getForeignKeyProperty()) .toString(); //see if there's a name property and use it for the display value String entityName = null; if (extractValueRequest.getRequestedValue() instanceof AdminMainEntity) { entityName = ((AdminMainEntity) extractValueRequest.getRequestedValue()) .getMainEntityName(); } Object temp = null; if (!StringUtils .isEmpty(extractValueRequest.getMetadata().getForeignKeyDisplayValueProperty())) { String nameProperty = extractValueRequest.getMetadata() .getForeignKeyDisplayValueProperty(); try { temp = extractValueRequest.getFieldManager() .getFieldValue(extractValueRequest.getRequestedValue(), nameProperty); } catch (FieldNotAvailableException e) { //do nothing } } if (temp == null && StringUtils.isEmpty(entityName)) { try { temp = extractValueRequest.getFieldManager() .getFieldValue(extractValueRequest.getRequestedValue(), "name"); } catch (FieldNotAvailableException e) { //do nothing } } if (temp != null) { extractValueRequest.setDisplayVal(temp.toString()); } else if (!StringUtils.isEmpty(entityName)) { extractValueRequest.setDisplayVal(entityName); } } catch (FieldNotAvailableException e) { throw new IllegalArgumentException(e); } } else { val = extractValueRequest.getRequestedValue().toString(); } property.setValue(val); property.setDisplayValue(extractValueRequest.getDisplayVal()); } } catch (IllegalAccessException e) { throw new PersistenceException(e); } return FieldProviderResponse.HANDLED; }