List of usage examples for java.sql Date Date
@Deprecated public Date(int year, int month, int date)
From source file:org.apache.sqoop.hcat.HCatalogImportTest.java
public void testDateTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "date", Types.DATE, HCatFieldSchema.Type.STRING, 0, 0, "2013-12-31", new Date(113, 11, 31), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "date", Types.DATE, HCatFieldSchema.Type.DATE, 0, 0, new Date(113, 11, 31), new Date(113, 11, 31), KeyType.NOT_A_KEY),/*w ww . j a va 2 s. c o m*/ HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "time", Types.TIME, HCatFieldSchema.Type.STRING, 0, 0, "10:11:12", new Time(10, 11, 12), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3), "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.STRING, 0, 0, "2013-12-31 10:11:12.0", new Timestamp(113, 11, 31, 10, 11, 12, 0), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4), "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.TIMESTAMP, 0, 0, new Timestamp(113, 11, 31, 10, 11, 12, 0), new Timestamp(113, 11, 31, 10, 11, 12, 0), KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
From source file:org.apache.sqoop.hcat.HCatalogImportTest.java
public void testDateTypesToBigInt() throws Exception { final int TOTAL_RECORDS = 1 * 10; long offset = TimeZone.getDefault().getRawOffset(); String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0, 0, 0 - offset, new Date(70, 0, 1), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Time(10, 11, 12), KeyType.NOT_A_KEY),// w w w. j a v a 2s . co m HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0), KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--map-column-hive"); addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
From source file:org.apache.hive.hcatalog.pig.HCatBaseStorer.java
/** * Convert from Pig value object to Hive value object * This method assumes that {@link #validateSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema, org.apache.hive.hcatalog.data.schema.HCatFieldSchema, org.apache.pig.impl.logicalLayer.schema.Schema, org.apache.hive.hcatalog.data.schema.HCatSchema, int)} * which checks the types in Pig schema are compatible with target Hive table, has been called. *//* w ww. j a va2 s . c o m*/ private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws HCatException, BackendException { try { if (pigObj == null) return null; // The real work-horse. Spend time and energy in this method if there is // need to keep HCatStorer lean and go fast. Type type = hcatFS.getType(); switch (type) { case BINARY: return ((DataByteArray) pigObj).get(); case STRUCT: HCatSchema structSubSchema = hcatFS.getStructSubSchema(); // Unwrap the tuple. List<Object> all = ((Tuple) pigObj).getAll(); ArrayList<Object> converted = new ArrayList<Object>(all.size()); for (int i = 0; i < all.size(); i++) { converted.add(getJavaObj(all.get(i), structSubSchema.get(i))); } return converted; case ARRAY: // Unwrap the bag. DataBag pigBag = (DataBag) pigObj; HCatFieldSchema tupFS = hcatFS.getArrayElementSchema().get(0); boolean needTuple = tupFS.getType() == Type.STRUCT; List<Object> bagContents = new ArrayList<Object>((int) pigBag.size()); Iterator<Tuple> bagItr = pigBag.iterator(); while (bagItr.hasNext()) { // If there is only one element in tuple contained in bag, we throw away the tuple. bagContents.add(getJavaObj(needTuple ? bagItr.next() : bagItr.next().get(0), tupFS)); } return bagContents; case MAP: Map<?, ?> pigMap = (Map<?, ?>) pigObj; Map<Object, Object> typeMap = new HashMap<Object, Object>(); for (Entry<?, ?> entry : pigMap.entrySet()) { // the value has a schema and not a FieldSchema typeMap.put( // Schema validation enforces that the Key is a String (String) entry.getKey(), getJavaObj(entry.getValue(), hcatFS.getMapValueSchema().get(0))); } return typeMap; case STRING: case INT: case BIGINT: case FLOAT: case DOUBLE: return pigObj; case SMALLINT: if ((Integer) pigObj < Short.MIN_VALUE || (Integer) pigObj > Short.MAX_VALUE) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return ((Integer) pigObj).shortValue(); case TINYINT: if ((Integer) pigObj < Byte.MIN_VALUE || (Integer) pigObj > Byte.MAX_VALUE) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return ((Integer) pigObj).byteValue(); case BOOLEAN: if (pigObj instanceof String) { if (((String) pigObj).trim().compareTo("0") == 0) { return Boolean.FALSE; } if (((String) pigObj).trim().compareTo("1") == 0) { return Boolean.TRUE; } throw new BackendException("Unexpected type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE); } return Boolean.parseBoolean(pigObj.toString()); case DECIMAL: BigDecimal bd = (BigDecimal) pigObj; DecimalTypeInfo dti = (DecimalTypeInfo) hcatFS.getTypeInfo(); if (bd.precision() > dti.precision() || bd.scale() > dti.scale()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return HiveDecimal.create(bd); case CHAR: String charVal = (String) pigObj; CharTypeInfo cti = (CharTypeInfo) hcatFS.getTypeInfo(); if (charVal.length() > cti.getLength()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return new HiveChar(charVal, cti.getLength()); case VARCHAR: String varcharVal = (String) pigObj; VarcharTypeInfo vti = (VarcharTypeInfo) hcatFS.getTypeInfo(); if (varcharVal.length() > vti.getLength()) { handleOutOfRangeValue(pigObj, hcatFS); return null; } return new HiveVarchar(varcharVal, vti.getLength()); case TIMESTAMP: DateTime dt = (DateTime) pigObj; return new Timestamp(dt.getMillis());//getMillis() returns UTC time regardless of TZ case DATE: /** * We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any * meaningful way). So the assumption is that if Pig value has 0 time component (midnight) * we assume it reasonably 'fits' into a Hive DATE. If time part is not 0, it's considered * out of range for target type. */ DateTime dateTime = ((DateTime) pigObj); if (dateTime.getMillisOfDay() != 0) { handleOutOfRangeValue(pigObj, hcatFS, "Time component must be 0 (midnight) in local timezone; Local TZ val='" + pigObj + "'"); return null; } /*java.sql.Date is a poorly defined API. Some (all?) SerDes call toString() on it [e.g. LazySimpleSerDe, uses LazyUtils.writePrimitiveUTF8()], which automatically adjusts for local timezone. Date.valueOf() also uses local timezone (as does Date(int,int,int). Also see PigHCatUtil#extractPigObject() for corresponding read op. This way a DATETIME from Pig, when stored into Hive and read back comes back with the same value.*/ return new Date(dateTime.getYear() - 1900, dateTime.getMonthOfYear() - 1, dateTime.getDayOfMonth()); default: throw new BackendException("Unexpected HCat type " + type + " for value " + pigObj + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE); } } catch (BackendException e) { // provide the path to the field in the error message throw new BackendException((hcatFS.getName() == null ? " " : hcatFS.getName() + ".") + e.getMessage(), e); } }