Java tutorial
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension; import java.io.File; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import com.google.common.collect.Sets; import com.google.common.io.Files; import junit.framework.Assert; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; public class TestUtilities extends TestCase { public static final Log LOG = LogFactory.getLog(TestUtilities.class); public void testGetFileExtension() { JobConf jc = new JobConf(); assertEquals("No extension for uncompressed unknown format", "", getFileExtension(jc, false, null)); assertEquals("No extension for compressed unknown format", "", getFileExtension(jc, true, null)); assertEquals("No extension for uncompressed text format", "", getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat())); assertEquals("Deflate for uncompressed text format", ".deflate", getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat())); assertEquals("No extension for uncompressed default format", "", getFileExtension(jc, false)); assertEquals("Deflate for uncompressed default format", ".deflate", getFileExtension(jc, true)); String extension = ".myext"; jc.set("hive.output.file.extension", extension); assertEquals("Custom extension for uncompressed unknown format", extension, getFileExtension(jc, false, null)); assertEquals("Custom extension for compressed unknown format", extension, getFileExtension(jc, true, null)); assertEquals("Custom extension for uncompressed text format", extension, getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat())); assertEquals("Custom extension for uncompressed text format", extension, getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat())); } public void testSerializeTimestamp() { Timestamp ts = new Timestamp(1374554702000L); ts.setNanos(123456); ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts); List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1); children.add(constant); ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo, new GenericUDFFromUtcTimestamp(), children); assertEquals(desc.getExprString(), Utilities.deserializeExpression(Utilities.serializeExpression(desc)).getExprString()); } public void testgetDbTableName() throws HiveException { String tablename; String[] dbtab; SessionState.start(new HiveConf(this.getClass())); String curDefaultdb = SessionState.get().getCurrentDatabase(); //test table without db portion tablename = "tab1"; dbtab = Utilities.getDbTableName(tablename); assertEquals("db name", curDefaultdb, dbtab[0]); assertEquals("table name", tablename, dbtab[1]); //test table with db portion tablename = "dab1.tab1"; dbtab = Utilities.getDbTableName(tablename); assertEquals("db name", "dab1", dbtab[0]); assertEquals("table name", "tab1", dbtab[1]); //test invalid table name tablename = "dab1.tab1.x1"; try { dbtab = Utilities.getDbTableName(tablename); fail("exception was expected for invalid table name"); } catch (HiveException ex) { assertEquals("Invalid table name " + tablename, ex.getMessage()); } } public void testGetJarFilesByPath() { File f = Files.createTempDir(); String jarFileName1 = f.getAbsolutePath() + File.separator + "a.jar"; String jarFileName2 = f.getAbsolutePath() + File.separator + "b.jar"; File jarFile = new File(jarFileName1); try { FileUtils.touch(jarFile); HashSet<String> jars = (HashSet) Utilities.getJarFilesByPath(f.getAbsolutePath()); Assert.assertEquals(Sets.newHashSet(jarFile.getAbsolutePath()), jars); File jarFile2 = new File(jarFileName2); FileUtils.touch(jarFile2); String newPath = "file://" + jarFileName1 + "," + "file://" + jarFileName2; jars = (HashSet) Utilities.getJarFilesByPath(newPath); Assert.assertEquals(Sets.newHashSet("file://" + jarFileName1, "file://" + jarFileName2), jars); } catch (IOException e) { LOG.error("failed to copy file to reloading folder", e); Assert.fail(e.getMessage()); } finally { FileUtils.deleteQuietly(f); } } public void testReplaceTaskId() { String taskID = "000000"; int bucketNum = 1; String newTaskID = Utilities.replaceTaskId(taskID, bucketNum); Assert.assertEquals("000001", newTaskID); taskID = "(ds%3D1)000001"; newTaskID = Utilities.replaceTaskId(taskID, 5); Assert.assertEquals("(ds%3D1)000005", newTaskID); } }