List of usage examples for java.lang Class toString
public String toString()
From source file:org.broadinstitute.gatk.engine.walkers.WalkerTest.java
/** * execute the test, given the following: * @param testName the name of the test * @param testClassName the name of the class that contains the test * @param args the argument list * @param expectedException the expected exception or null */// w ww . j av a 2 s . c o m private void executeTest(String testName, String testClassName, String args, Class expectedException) { CommandLineGATK instance = new CommandLineGATK(); String[] command = Utils.escapeExpressions(args); // run the executable boolean gotAnException = false; try { final String now = new SimpleDateFormat("HH:mm:ss").format(new Date()); final String cmdline = Utils.join(" ", command); System.out.println(String.format("[%s] Executing test %s:%s with GATK arguments: %s", now, testClassName, testName, cmdline)); // also write the command line to the HTML log for convenient follow-up // do the replaceAll so paths become relative to the current BaseTest.log(cmdline.replaceAll(publicTestDirRoot, "").replaceAll(privateTestDirRoot, "")); CommandLineExecutable.start(instance, command); } catch (Exception e) { gotAnException = true; if (expectedException != null) { // we expect an exception //System.out.println(String.format("Wanted exception %s, saw %s", expectedException, e.getClass())); if (expectedException.isInstance(e)) { // it's the type we expected //System.out.println(String.format(" => %s PASSED", name)); } else { final String message = String.format( "Test %s:%s expected exception %s but instead got %s with error message %s", testClassName, testName, expectedException, e.getClass(), e.getMessage()); if (e.getCause() != null) { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream ps = new PrintStream(baos); e.getCause().printStackTrace(ps); BaseTest.log(message); BaseTest.log(baos.toString()); } Assert.fail(message); } } else { // we didn't expect an exception but we got one :-( throw new RuntimeException(e); } } // catch failures from the integration test if (expectedException != null) { if (!gotAnException) // we expected an exception but didn't see it Assert.fail(String.format("Test %s:%s expected exception %s but none was thrown", testClassName, testName, expectedException.toString())); } else { if (CommandLineExecutable.result != 0) { throw new RuntimeException("Error running the GATK with arguments: " + args); } } }
From source file:net.starschema.clouddb.jdbc.BQDatabaseMetadata.java
/** * <p>/* ww w .j ava2s .co m*/ * <h1>Implementation Details:</h1><br> * No wrappers in the current version * </p> * * @throws SQLException */ @Override public <T> T unwrap(Class<T> iface) throws SQLException { throw new BQSQLException("no object found that implements " + iface.toString()); }
From source file:com.microsoft.tfs.client.clc.commands.Command.java
/** * Creates an instance of an option of the given type that must exist in the * given options map./* w ww . j a va 2s .co m*/ * * @param optionClass * the class of the option type to create (not null). * @param optionsMap * the map that contains the option (not null). * @return the instance of the option requested. */ private Option instantiateOptionForSyntaxString(final Class optionClass, final OptionsMap optionsMap) { Check.notNull(optionClass, "optionClass"); //$NON-NLS-1$ Check.notNull(optionsMap, "optionsMap"); //$NON-NLS-1$ // Look up the canonical name of the option by the class. final String canonicalName = (String) optionsMap.getOptionsToCanonicalNamesMap().get(optionClass); if (canonicalName == null) { final String messageFormat = Messages.getString("Command.OptionNotRegisteredFormat"); //$NON-NLS-1$ final String message = MessageFormat.format(messageFormat, optionClass.toString(), Main.VENDOR_NAME); log.error(message); throw new RuntimeException(message); } final Option option = optionsMap.instantiateOption(canonicalName); option.setMatchedAlias(canonicalName); return option; }
From source file:org.openhab.binding.paradox.internal.ParadoxBinding.java
private State queryDataFromDevice(String deviceId, ParadoxCommandType commmandType, Class<? extends Item> itemType) { DeviceConfig device = deviceConfigCache.get(deviceId); if (device == null) { logger.error("Could not find device '{}'", deviceId); return null; }/* ww w .j a v a2s . c o m*/ ParadoxInterface remoteController = device.getConnection(); if (remoteController == null) { logger.error("Could not find device '{}'", deviceId); return null; } try { if (remoteController.isConnected() == false) remoteController.connect(); switch (commmandType) { case ZONE1: ZoneStatus zoneState1 = remoteController.getZoneStatus(1); return new DecimalType(zoneState1.toInt()); case ZONE2: ZoneStatus zoneState2 = remoteController.getZoneStatus(2); return new DecimalType(zoneState2.toInt()); case ZONE3: ZoneStatus zoneState3 = remoteController.getZoneStatus(3); return new DecimalType(zoneState3.toInt()); case ZONE10: ZoneStatus zoneState10 = remoteController.getZoneStatus(10); return new DecimalType(zoneState10.toInt()); /* case USER1: ZoneLabel userState1 = remoteController.getUserStatus(); return new StringType(userState1.toString()); case USER2: ZoneLabel userState2 = remoteController.getUserStatus(); return new StringType(userState2.toString()); */ case ERR_CODE: int err = remoteController.getError(); logger.warn("Get '{}' not implemented!", commmandType.toString()); return new DecimalType(err); case ERR_MSG: String errString = remoteController.getErrorString(); logger.warn("Get '{}' not implemented!", commmandType.toString()); return new StringType(errString); case POWER_STATE: int pwr = remoteController.getPowerState(); logger.warn("Get '{}' not implemented!", commmandType.toString()); return new DecimalType(pwr); default: logger.warn("Unknown '{}' command!", commmandType); return null; } } catch (ParadoxException e) { logger.warn("Couldn't execute command '{}', {}", commmandType.toString(), e); } catch (Exception e) { logger.warn("Couldn't create state of type '{}'", itemType.toString()); return null; } return null; }
From source file:cern.c2mon.shared.common.datatag.address.impl.HardwareAddressImpl.java
/** * Create a HardwareAddress object from its XML representation. * * @param pElement DOM element containing the XML representation of a HardwareAddress object, as created by the * toConfigXML() method. * @throws RuntimeException if unable to instantiate the Hardware address * @see cern.c2mon.shared.common.datatag.address.HardwareAddress#toConfigXML() *///from w w w.ja v a 2s. c om public final synchronized HardwareAddress fromConfigXML(Element pElement) { Class hwAddressClass = null; HardwareAddressImpl hwAddress = null; try { hwAddressClass = Class.forName(pElement.getAttribute("class")); hwAddress = (HardwareAddressImpl) hwAddressClass.newInstance(); } catch (ClassNotFoundException cnfe) { cnfe.printStackTrace(); throw new RuntimeException("Exception caught when instantiating a hardware address from XML", cnfe); } catch (IllegalAccessException iae) { iae.printStackTrace(); throw new RuntimeException("Exception caught when instantiating a hardware address from XML", iae); } catch (InstantiationException ie) { ie.printStackTrace(); throw new RuntimeException("Exception caught when instantiating a hardware address from XML", ie); } NodeList fields = pElement.getChildNodes(); Node fieldNode = null; int fieldsCount = fields.getLength(); String fieldName; String fieldValueString; String fieldTypeName = ""; for (int i = 0; i < fieldsCount; i++) { fieldNode = fields.item(i); if (fieldNode.getNodeType() == Node.ELEMENT_NODE) { fieldName = fieldNode.getNodeName(); if (fieldNode.getFirstChild() != null) { fieldValueString = fieldNode.getFirstChild().getNodeValue(); } else { fieldValueString = ""; } try { Field field = hwAddressClass.getDeclaredField(decodeFieldName(fieldName)); fieldTypeName = field.getType().getName(); if (fieldTypeName.equals("short")) { field.setShort(hwAddress, Short.parseShort(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Short")) { field.set(hwAddress, new Integer(Integer.parseInt(fieldValueString))); } else if (fieldTypeName.equals("int")) { field.setInt(hwAddress, Integer.parseInt(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Integer")) { field.set(hwAddress, new Integer(Integer.parseInt(fieldValueString))); } else if (fieldTypeName.equals("float")) { field.setFloat(hwAddress, Float.parseFloat(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Float")) { field.set(hwAddress, new Float(Float.parseFloat(fieldValueString))); } else if (fieldTypeName.equals("double")) { field.setDouble(hwAddress, Double.parseDouble(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Double")) { field.set(hwAddress, new Double(Double.parseDouble(fieldValueString))); } else if (fieldTypeName.equals("long")) { field.setLong(hwAddress, Long.parseLong(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Long")) { field.set(hwAddress, new Long(Long.parseLong(fieldValueString))); } else if (fieldTypeName.equals("byte")) { field.setByte(hwAddress, Byte.parseByte(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Byte")) { field.set(hwAddress, new Byte(Byte.parseByte(fieldValueString))); } else if (fieldTypeName.equals("char")) { field.setChar(hwAddress, fieldValueString.charAt(0)); } else if (fieldTypeName.equals("java.lang.Character")) { field.set(hwAddress, new Character(fieldValueString.charAt(0))); } else if (fieldTypeName.equals("boolean")) { field.setBoolean(hwAddress, Boolean.getBoolean(fieldValueString)); } else if (fieldTypeName.equals("java.lang.Boolean")) { field.set(hwAddress, new Boolean(Boolean.getBoolean(fieldValueString))); } else if (fieldTypeName.equals("java.util.HashMap")) { field.set(hwAddress, SimpleXMLParser.domNodeToMap(fieldNode)); } else if (field.getType().isEnum()) { Object[] enumConstants = field.getType().getEnumConstants(); for (Object enumConstant : enumConstants) { if (enumConstant.toString().equals(fieldValueString)) { field.set(hwAddress, enumConstant); } } } else { field.set(hwAddress, fieldValueString); } } catch (NoSuchFieldException nsfe) { String errorMsg = "fromConfigXML(...) - Error occured while parsing XML <HardwareAddress> tag. " + "The following variable does not exist in " + hwAddressClass.toString() + ": \"" + decodeFieldName(fieldName) + "\""; log.error(errorMsg); throw new IllegalArgumentException(errorMsg); } catch (IllegalAccessException iae) { iae.printStackTrace(); throw new RuntimeException(iae); } catch (NumberFormatException npe) { String errorMsg = "fromConfigXML(...) - Error occured while parsing XML <HardwareAddress> tag. Field \"" + fieldName + "\" shall not be empty since we expect a \"" + fieldTypeName + "\" value. Please correct the XML configuration for " + hwAddressClass.toString(); log.error(errorMsg); throw new IllegalArgumentException(errorMsg); } } } return hwAddress; }
From source file:org.apache.axis2.jaxws.utility.ConvertUtils.java
/** * Utility function to convert an Object to some desired Class. * <p/>// ww w .j av a 2 s. c o m * Normally this is used for T[] to List<T> processing. Other conversions are also done (i.e. * HashMap <->Hashtable, etc.) * <p/> * Use the isConvertable() method to determine if conversion is possible. Note that any changes * to convert() must also be accompanied by similar changes to isConvertable() * * @param arg the array to convert * @param destClass the actual class we want * @return object of destClass if conversion possible, otherwise returns arg */ public static Object convert(Object arg, Class destClass) throws WebServiceException { if (destClass == null) { return arg; } if (arg != null && destClass.isAssignableFrom(arg.getClass())) { return arg; } if (log.isDebugEnabled()) { String clsName = "null"; if (arg != null) clsName = arg.getClass().getName(); log.debug("Converting an object of type " + clsName + " to an object of type " + destClass.getName()); } // Convert between Calendar and Date if (arg instanceof Calendar && destClass == Date.class) { return ((Calendar) arg).getTime(); } // Convert between HashMap and Hashtable if (arg instanceof HashMap && destClass == Hashtable.class) { return new Hashtable((HashMap) arg); } if (arg instanceof InputStream && destClass == byte[].class) { try { InputStream is = (InputStream) arg; return getBytesFromStream(is); } catch (IOException e) { throw ExceptionFactory.makeWebServiceException(e); } } if (arg instanceof Source && destClass == byte[].class) { try { if (arg instanceof StreamSource) { InputStream is = ((StreamSource) arg).getInputStream(); if (is != null) { return getBytesFromStream(is); } } ByteArrayOutputStream out = new ByteArrayOutputStream(); Result result = new StreamResult(out); Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.transform((Source) arg, result); byte[] bytes = out.toByteArray(); return bytes; } catch (Exception e) { throw ExceptionFactory.makeWebServiceException(e); } } if (arg instanceof DataHandler) { try { InputStream is = ((DataHandler) arg).getInputStream(); if (destClass == Image.class) { return ImageIO.read(is); } else if (destClass == Source.class) { return new StreamSource(is); } byte[] bytes = getBytesFromStream(is); return convert(bytes, destClass); } catch (Exception e) { throw ExceptionFactory.makeWebServiceException(e); } } if (arg instanceof byte[] && destClass == String.class) { return new String((byte[]) arg); } // If the destination is an array and the source // is a suitable component, return an array with // the single item. /* REVIEW do we need to support atomic to array conversion ? if (arg != null && destClass.isArray() && !destClass.getComponentType().equals(Object.class) && destClass.getComponentType().isAssignableFrom(arg.getClass())) { Object array = Array.newInstance(destClass.getComponentType(), 1); Array.set(array, 0, arg); return array; } */ // Return if no conversion is available if (!(arg instanceof Collection || (arg != null && arg.getClass().isArray()))) { return arg; } if (arg == null) { return null; } // The arg may be an array or List Object destValue = null; int length = 0; if (arg.getClass().isArray()) { length = Array.getLength(arg); } else { length = ((Collection) arg).size(); } try { if (destClass.isArray()) { if (destClass.getComponentType().isPrimitive()) { Object array = Array.newInstance(destClass.getComponentType(), length); // Assign array elements if (arg.getClass().isArray()) { for (int i = 0; i < length; i++) { Array.set(array, i, Array.get(arg, i)); } } else { int idx = 0; for (Iterator i = ((Collection) arg).iterator(); i.hasNext();) { Array.set(array, idx++, i.next()); } } destValue = array; } else { Object[] array; try { array = (Object[]) Array.newInstance(destClass.getComponentType(), length); } catch (Exception e) { return arg; } // Use convert to assign array elements. if (arg.getClass().isArray()) { for (int i = 0; i < length; i++) { array[i] = convert(Array.get(arg, i), destClass.getComponentType()); } } else { int idx = 0; for (Iterator i = ((Collection) arg).iterator(); i.hasNext();) { array[idx++] = convert(i.next(), destClass.getComponentType()); } } destValue = array; } } else if (Collection.class.isAssignableFrom(destClass)) { Collection newList = null; try { // if we are trying to create an interface, build something // that implements the interface if (destClass == Collection.class || destClass == List.class) { newList = new ArrayList(); } else if (destClass == Set.class) { newList = new HashSet(); } else { newList = (Collection) destClass.newInstance(); } } catch (Exception e) { // No FFDC code needed // Couldn't build one for some reason... so forget it. return arg; } if (arg.getClass().isArray()) { for (int j = 0; j < length; j++) { newList.add(Array.get(arg, j)); } } else { for (Iterator j = ((Collection) arg).iterator(); j.hasNext();) { newList.add(j.next()); } } destValue = newList; } else { destValue = arg; } } catch (Throwable t) { throw ExceptionFactory.makeWebServiceException( Messages.getMessage("convertUtils", arg.getClass().toString(), destClass.toString()), t); } return destValue; }
From source file:org.opendatakit.services.database.utilities.ODKDatabaseImplUtils.java
private void insertValueIntoContentValues(Map<String, Object> cv, Class<?> theClass, String name, Object obj) { if (obj == null) { cv.put(name, null);/*from w ww.j a v a 2 s .c o m*/ return; } // Couldn't use the CursorUtils.getIndexAsType // because assigning the result to Object v // would not work for the currValues.put function if (theClass == Long.class) { cv.put(name, (Long) obj); } else if (theClass == Integer.class) { cv.put(name, (Integer) obj); } else if (theClass == Double.class) { cv.put(name, (Double) obj); } else if (theClass == String.class) { cv.put(name, (String) obj); } else if (theClass == Boolean.class) { // stored as integers Integer v = (Integer) obj; cv.put(name, Boolean.valueOf(v != 0)); } else if (theClass == ArrayList.class) { cv.put(name, (String) obj); } else if (theClass == HashMap.class) { // json deserialization of an object cv.put(name, (String) obj); } else { throw new IllegalStateException("Unexpected data type in SQLite table " + theClass.toString()); } }
From source file:com.jhh.hdb.sqlparser.MySemanticAnalyzer.java
@SuppressWarnings("nls") public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException { try {/*from w w w . j a va 2 s. c o m*/ LOG.info("Get metadata for source tables"); // Go over the tables and populate the related structures. // We have to materialize the table alias list since we might // modify it in the middle for view rewrite. List<String> tabAliases = new ArrayList<String>(qb.getTabAliases()); // Keep track of view alias to view name and read entity // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T // keeps track of full view name and read entity corresponding to alias V3, V3:V2, V3:V2:V1. // This is needed for tracking the dependencies for inputs, along with their parents. Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo = new HashMap<String, ObjectPair<String, ReadEntity>>(); /* * used to capture view to SQ conversions. This is used to check for * recursive CTE invocations. */ Map<String, String> sqAliasToCTEName = new HashMap<String, String>(); for (String alias : tabAliases) { String tab_name = qb.getTabNameForAlias(alias); Table tab = db.getTable(tab_name, false); if (tab == null) { /* * if this s a CTE reference: * Add its AST as a SubQuery to this QB. */ ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase()); if (cteNode != null) { String cte_name = tab_name.toLowerCase(); if (ctesExpanded.contains(cte_name)) { throw new SemanticException("Recursive cte " + tab_name + " detected (cycle: " + StringUtils.join(ctesExpanded, " -> ") + " -> " + tab_name + ")."); } addCTEAsSubQuery(qb, cte_name, alias); sqAliasToCTEName.put(alias, cte_name); continue; } ASTNode src = qb.getParseInfo().getSrcForAlias(alias); if (null != src) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src)); } else { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(alias)); } } // Disallow INSERT INTO on bucketized tables boolean isAcid = isAcidTable(tab); boolean isTableWrittenTo = qb.getParseInfo().isInsertIntoTable(tab.getDbName(), tab.getTableName()); if (isTableWrittenTo && tab.getNumBuckets() > 0 && !isAcid) { throw new SemanticException(ErrorMsg.INSERT_INTO_BUCKETIZED_TABLE.getMsg("Table: " + tab_name)); } // Disallow update and delete on non-acid tables if ((updating() || deleting()) && !isAcid && isTableWrittenTo) { //isTableWrittenTo: delete from acidTbl where a in (select id from nonAcidTable) //so only assert this if we are actually writing to this table // isAcidTable above also checks for whether we are using an acid compliant // transaction manager. But that has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, tab_name); } // We check offline of the table, as if people only select from an // non-existing partition of an offline table, the partition won't // be added to inputs and validate() won't have the information to // check the table's offline status. // TODO: Modify the code to remove the checking here and consolidate // it in validate() // if (tab.isOffline()) { throw new SemanticException(ErrorMsg.OFFLINE_TABLE_OR_PARTITION .getMsg("Table " + getUnescapedName(qb.getParseInfo().getSrcForAlias(alias)))); } if (tab.isView()) { if (qb.getParseInfo().isAnalyzeCommand()) { throw new SemanticException(ErrorMsg.ANALYZE_VIEW.getMsg()); } String fullViewName = tab.getDbName() + "." + tab.getTableName(); // Prevent view cycles if (viewsExpanded.contains(fullViewName)) { throw new SemanticException("Recursive view " + fullViewName + " detected (cycle: " + StringUtils.join(viewsExpanded, " -> ") + " -> " + fullViewName + ")."); } replaceViewReferenceWithDefinition(qb, tab, tab_name, alias); // This is the last time we'll see the Table objects for views, so add it to the inputs // now ReadEntity viewInput = new ReadEntity(tab, parentInput); viewInput = PlanUtils.addInput(inputs, viewInput); aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput)); viewAliasToInput.put(getAliasId(alias, qb), viewInput); continue; } if (!InputFormat.class.isAssignableFrom(tab.getInputFormatClass())) { throw new SemanticException(generateErrorMessage(qb.getParseInfo().getSrcForAlias(alias), ErrorMsg.INVALID_INPUT_FORMAT_TYPE.getMsg())); } qb.getMetaData().setSrcForAlias(alias, tab); if (qb.getParseInfo().isAnalyzeCommand()) { // allow partial partition specification for nonscan since noscan is fast. TableSpec ts = new TableSpec(db, conf, (ASTNode) ast.getChild(0), true, this.noscan); if (ts.specType == SpecType.DYNAMIC_PARTITION) { // dynamic partitions try { ts.partitions = db.getPartitionsByNames(ts.tableHandle, ts.partSpec); } catch (HiveException e) { throw new SemanticException( generateErrorMessage(qb.getParseInfo().getSrcForAlias(alias), "Cannot get partitions for " + ts.partSpec), e); } } // validate partial scan command QBParseInfo qbpi = qb.getParseInfo(); if (qbpi.isPartialScanAnalyzeCommand()) { Class<? extends InputFormat> inputFormatClass = null; switch (ts.specType) { case TABLE_ONLY: case DYNAMIC_PARTITION: inputFormatClass = ts.tableHandle.getInputFormatClass(); break; case STATIC_PARTITION: inputFormatClass = ts.partHandle.getInputFormatClass(); break; default: assert false; } // throw a HiveException for formats other than rcfile or orcfile. if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass.equals(OrcInputFormat.class))) { throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_RCFILE.getMsg()); } } tab.setTableSpec(ts); qb.getParseInfo().addTableSpec(alias, ts); } ReadEntity parentViewInfo = PlanUtils.getParentViewInfo(getAliasId(alias, qb), viewAliasToInput); PlanUtils.addInput(inputs, new ReadEntity(tab, parentViewInfo, parentViewInfo == null)); } LOG.info("Get metadata for subqueries"); // Go over the subqueries and getMetaData for these for (String alias : qb.getSubqAliases()) { boolean wasView = aliasToViewInfo.containsKey(alias); boolean wasCTE = sqAliasToCTEName.containsKey(alias); ReadEntity newParentInput = null; if (wasView) { viewsExpanded.add(aliasToViewInfo.get(alias).getFirst()); newParentInput = aliasToViewInfo.get(alias).getSecond(); } else if (wasCTE) { ctesExpanded.add(sqAliasToCTEName.get(alias)); } QBExpr qbexpr = qb.getSubqForAlias(alias); getMetaData(qbexpr, newParentInput); if (wasView) { viewsExpanded.remove(viewsExpanded.size() - 1); } else if (wasCTE) { ctesExpanded.remove(ctesExpanded.size() - 1); } } RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); LOG.info("Get metadata for destination tables"); // Go over all the destination structures and populate the related // metadata QBParseInfo qbp = qb.getParseInfo(); for (String name : qbp.getClauseNamesForDest()) { ASTNode ast = qbp.getDestForClause(name); switch (ast.getToken().getType()) { case HiveParser.TOK_TAB: { TableSpec ts = new TableSpec(db, conf, ast); if (ts.tableHandle.isView()) { throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); } Class<?> outputFormatClass = ts.tableHandle.getOutputFormatClass(); if (!ts.tableHandle.isNonNative() && !HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) { throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg(ast, "The class is " + outputFormatClass.toString())); } // TableSpec ts is got from the query (user specified), // which means the user didn't specify partitions in their query, // but whether the table itself is partitioned is not know. if (ts.specType != SpecType.STATIC_PARTITION) { // This is a table or dynamic partition qb.getMetaData().setDestForAlias(name, ts.tableHandle); // has dynamic as well as static partitions if (ts.partSpec != null && ts.partSpec.size() > 0) { qb.getMetaData().setPartSpecForAlias(name, ts.partSpec); } } else { // This is a partition qb.getMetaData().setDestForAlias(name, ts.partHandle); } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { // Set that variable to automatically collect stats during the MapReduce job qb.getParseInfo().setIsInsertToTable(true); // Add the table spec for the destination table. qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); } break; } case HiveParser.TOK_DIR: { // This is a dfs file String fname = stripQuotes(ast.getChild(0).getText()); if ((!qb.getParseInfo().getIsSubQ()) && (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) { if (qb.isCTAS()) { qb.setIsQuery(false); ctx.setResDir(null); ctx.setResFile(null); // allocate a temporary output dir on the location of the table String tableName = getUnescapedName((ASTNode) ast.getChild(0)); String[] names = Utilities.getDbTableName(tableName); Path location; try { Warehouse wh = new Warehouse(conf); location = wh.getDatabasePath(db.getDatabase(names[0])); } catch (MetaException e) { throw new SemanticException(e); } try { fname = ctx.getExtTmpPathRelTo(FileUtils.makeQualified(location, conf)).toString(); } catch (Exception e) { throw new SemanticException(generateErrorMessage(ast, "Error creating temporary folder on: " + location.toString()), e); } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { TableSpec ts = new TableSpec(db, conf, this.ast); // Set that variable to automatically collect stats during the MapReduce job qb.getParseInfo().setIsInsertToTable(true); // Add the table spec for the destination table. qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); } } else { // This is the only place where isQuery is set to true; it defaults to false. qb.setIsQuery(true); Path stagingPath = getStagingDirectoryPathname(qb); fname = stagingPath.toString(); ctx.setResDir(stagingPath); } } boolean isDfsFile = true; if (ast.getChildCount() >= 2 && ast.getChild(1).getText().toLowerCase().equals("local")) { isDfsFile = false; } qb.getMetaData().setDestForAlias(name, fname, isDfsFile); CreateTableDesc directoryDesc = new CreateTableDesc(); boolean directoryDescIsSet = false; int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); if (child != null) { if (storageFormat.fillStorageFormat(child)) { directoryDesc.setOutputFormat(storageFormat.getOutputFormat()); directoryDesc.setSerName(storageFormat.getSerde()); directoryDescIsSet = true; continue; } switch (child.getToken().getType()) { case HiveParser.TOK_TABLEROWFORMAT: rowFormatParams.analyzeRowFormat(child); directoryDesc.setFieldDelim(rowFormatParams.fieldDelim); directoryDesc.setLineDelim(rowFormatParams.lineDelim); directoryDesc.setCollItemDelim(rowFormatParams.collItemDelim); directoryDesc.setMapKeyDelim(rowFormatParams.mapKeyDelim); directoryDesc.setFieldEscape(rowFormatParams.fieldEscape); directoryDesc.setNullFormat(rowFormatParams.nullFormat); directoryDescIsSet = true; break; case HiveParser.TOK_TABLESERIALIZER: ASTNode serdeChild = (ASTNode) child.getChild(0); storageFormat.setSerde(unescapeSQLString(serdeChild.getChild(0).getText())); directoryDesc.setSerName(storageFormat.getSerde()); directoryDescIsSet = true; break; } } } if (directoryDescIsSet) { qb.setDirectoryDesc(directoryDesc); } break; } default: throw new SemanticException( generateErrorMessage(ast, "Unknown Token Type " + ast.getToken().getType())); } } } catch (HiveException e) { // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); } }
From source file:org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.java
@SuppressWarnings("nls") private void getMetaData(QB qb, ReadEntity parentInput) throws HiveException { LOG.info("Get metadata for source tables"); // Go over the tables and populate the related structures. // We have to materialize the table alias list since we might // modify it in the middle for view rewrite. List<String> tabAliases = new ArrayList<String>(qb.getTabAliases()); // Keep track of view alias to view name and read entity // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T // keeps track of full view name and read entity corresponding to alias V3, V3:V2, V3:V2:V1. // This is needed for tracking the dependencies for inputs, along with their parents. Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo = new HashMap<String, ObjectPair<String, ReadEntity>>(); /*/*from w w w .j a va 2 s . c o m*/ * used to capture view to SQ conversions. This is used to check for * recursive CTE invocations. */ Map<String, String> sqAliasToCTEName = new HashMap<String, String>(); for (String alias : tabAliases) { String tabName = qb.getTabNameForAlias(alias); String cteName = tabName.toLowerCase(); // Get table details from tabNameToTabObject cache Table tab = getTableObjectByName(tabName, false); if (tab != null) { // do a deep copy, in case downstream changes it. tab = new Table(tab.getTTable().deepCopy()); } if (tab == null || tab.getDbName().equals(SessionState.get().getCurrentDatabase())) { Table materializedTab = ctx.getMaterializedTable(cteName); if (materializedTab == null) { // we first look for this alias from CTE, and then from catalog. CTEClause cte = findCTEFromName(qb, cteName); if (cte != null) { if (!cte.materialize) { addCTEAsSubQuery(qb, cteName, alias); sqAliasToCTEName.put(alias, cteName); continue; } tab = materializeCTE(cteName, cte); } } else { tab = materializedTab; } } if (tab == null) { ASTNode src = qb.getParseInfo().getSrcForAlias(alias); if (null != src) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src)); } else { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(alias)); } } if (tab.isView()) { if (qb.getParseInfo().isAnalyzeCommand()) { throw new SemanticException(ErrorMsg.ANALYZE_VIEW.getMsg()); } String fullViewName = tab.getDbName() + "." + tab.getTableName(); // Prevent view cycles if (viewsExpanded.contains(fullViewName)) { throw new SemanticException("Recursive view " + fullViewName + " detected (cycle: " + StringUtils.join(viewsExpanded, " -> ") + " -> " + fullViewName + ")."); } replaceViewReferenceWithDefinition(qb, tab, tabName, alias); // This is the last time we'll see the Table objects for views, so add it to the inputs // now. isInsideView will tell if this view is embedded in another view. // If the view is Inside another view, it should have at least one parent if (qb.isInsideView() && parentInput == null) { parentInput = PlanUtils.getParentViewInfo(getAliasId(alias, qb), viewAliasToInput); } ReadEntity viewInput = new ReadEntity(tab, parentInput, !qb.isInsideView()); viewInput = PlanUtils.addInput(inputs, viewInput); aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput)); String aliasId = getAliasId(alias, qb); if (aliasId != null) { aliasId = aliasId.replace(SemanticAnalyzer.SUBQUERY_TAG_1, "") .replace(SemanticAnalyzer.SUBQUERY_TAG_2, ""); } viewAliasToInput.put(aliasId, viewInput); continue; } if (!InputFormat.class.isAssignableFrom(tab.getInputFormatClass())) { throw new SemanticException(generateErrorMessage(qb.getParseInfo().getSrcForAlias(alias), ErrorMsg.INVALID_INPUT_FORMAT_TYPE.getMsg())); } qb.getMetaData().setSrcForAlias(alias, tab); if (qb.getParseInfo().isAnalyzeCommand()) { // allow partial partition specification for nonscan since noscan is fast. TableSpec ts = new TableSpec(db, conf, (ASTNode) ast.getChild(0), true, this.noscan); if (ts.specType == SpecType.DYNAMIC_PARTITION) { // dynamic partitions try { ts.partitions = db.getPartitionsByNames(ts.tableHandle, ts.partSpec); } catch (HiveException e) { throw new SemanticException(generateErrorMessage(qb.getParseInfo().getSrcForAlias(alias), "Cannot get partitions for " + ts.partSpec), e); } } // validate partial scan command QBParseInfo qbpi = qb.getParseInfo(); if (qbpi.isPartialScanAnalyzeCommand()) { Class<? extends InputFormat> inputFormatClass = null; switch (ts.specType) { case TABLE_ONLY: case DYNAMIC_PARTITION: inputFormatClass = ts.tableHandle.getInputFormatClass(); break; case STATIC_PARTITION: inputFormatClass = ts.partHandle.getInputFormatClass(); break; default: assert false; } // throw a HiveException for formats other than rcfile or orcfile. if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass.equals(OrcInputFormat.class))) { throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_RCFILE.getMsg()); } } tab.setTableSpec(ts); qb.getParseInfo().addTableSpec(alias, ts); } ReadEntity parentViewInfo = PlanUtils.getParentViewInfo(getAliasId(alias, qb), viewAliasToInput); // Temporary tables created during the execution are not the input sources if (!PlanUtils.isValuesTempTable(alias)) { PlanUtils.addInput(inputs, new ReadEntity(tab, parentViewInfo, parentViewInfo == null), mergeIsDirect); } } LOG.info("Get metadata for subqueries"); // Go over the subqueries and getMetaData for these for (String alias : qb.getSubqAliases()) { boolean wasView = aliasToViewInfo.containsKey(alias); boolean wasCTE = sqAliasToCTEName.containsKey(alias); ReadEntity newParentInput = null; if (wasView) { viewsExpanded.add(aliasToViewInfo.get(alias).getFirst()); newParentInput = aliasToViewInfo.get(alias).getSecond(); } else if (wasCTE) { ctesExpanded.add(sqAliasToCTEName.get(alias)); } QBExpr qbexpr = qb.getSubqForAlias(alias); getMetaData(qbexpr, newParentInput); if (wasView) { viewsExpanded.remove(viewsExpanded.size() - 1); } else if (wasCTE) { ctesExpanded.remove(ctesExpanded.size() - 1); } } RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); LOG.info("Get metadata for destination tables"); // Go over all the destination structures and populate the related // metadata QBParseInfo qbp = qb.getParseInfo(); for (String name : qbp.getClauseNamesForDest()) { ASTNode ast = qbp.getDestForClause(name); switch (ast.getToken().getType()) { case HiveParser.TOK_TAB: { TableSpec ts = new TableSpec(db, conf, ast); if (ts.tableHandle.isView() || ts.tableHandle.isMaterializedView()) { throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); } Class<?> outputFormatClass = ts.tableHandle.getOutputFormatClass(); if (!ts.tableHandle.isNonNative() && !HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) { throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg(ast, "The class is " + outputFormatClass.toString())); } boolean isTableWrittenTo = qb.getParseInfo().isInsertIntoTable(ts.tableHandle.getDbName(), ts.tableHandle.getTableName()); isTableWrittenTo |= (qb.getParseInfo().getInsertOverwriteTables() .get(getUnescapedName((ASTNode) ast.getChild(0), ts.tableHandle.getDbName()) .toLowerCase()) != null); assert isTableWrittenTo : "Inconsistent data structure detected: we are writing to " + ts.tableHandle + " in " + name + " but it's not in isInsertIntoTable() or getInsertOverwriteTables()"; // Disallow update and delete on non-acid tables boolean isAcid = AcidUtils.isAcidTable(ts.tableHandle); if ((updating(name) || deleting(name)) && !isAcid) { // Whether we are using an acid compliant transaction manager has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.tableName); } // TableSpec ts is got from the query (user specified), // which means the user didn't specify partitions in their query, // but whether the table itself is partitioned is not know. if (ts.specType != SpecType.STATIC_PARTITION) { // This is a table or dynamic partition qb.getMetaData().setDestForAlias(name, ts.tableHandle); // has dynamic as well as static partitions if (ts.partSpec != null && ts.partSpec.size() > 0) { qb.getMetaData().setPartSpecForAlias(name, ts.partSpec); } } else { // This is a partition qb.getMetaData().setDestForAlias(name, ts.partHandle); } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { // Add the table spec for the destination table. qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); } break; } case HiveParser.TOK_DIR: { // This is a dfs file String fname = stripQuotes(ast.getChild(0).getText()); if ((!qb.getParseInfo().getIsSubQ()) && (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) { if (qb.isCTAS() || qb.isMaterializedView()) { qb.setIsQuery(false); ctx.setResDir(null); ctx.setResFile(null); Path location; // If the CTAS query does specify a location, use the table location, else use the db location if (qb.getTableDesc() != null && qb.getTableDesc().getLocation() != null) { location = new Path(qb.getTableDesc().getLocation()); } else { // allocate a temporary output dir on the location of the table String tableName = getUnescapedName((ASTNode) ast.getChild(0)); String[] names = Utilities.getDbTableName(tableName); try { Warehouse wh = new Warehouse(conf); //Use destination table's db location. String destTableDb = qb.getTableDesc() != null ? qb.getTableDesc().getDatabaseName() : null; if (destTableDb == null) { destTableDb = names[0]; } location = wh.getDatabasePath(db.getDatabase(destTableDb)); } catch (MetaException e) { throw new SemanticException(e); } } try { fname = ctx.getExtTmpPathRelTo(FileUtils.makeQualified(location, conf)).toString(); } catch (Exception e) { throw new SemanticException(generateErrorMessage(ast, "Error creating temporary folder on: " + location.toString()), e); } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { TableSpec ts = new TableSpec(db, conf, this.ast); // Add the table spec for the destination table. qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); } } else { // This is the only place where isQuery is set to true; it defaults to false. qb.setIsQuery(true); Path stagingPath = getStagingDirectoryPathname(qb); fname = stagingPath.toString(); ctx.setResDir(stagingPath); } } boolean isDfsFile = true; if (ast.getChildCount() >= 2 && ast.getChild(1).getText().toLowerCase().equals("local")) { isDfsFile = false; } // Set the destination for the SELECT query inside the CTAS qb.getMetaData().setDestForAlias(name, fname, isDfsFile); CreateTableDesc directoryDesc = new CreateTableDesc(); boolean directoryDescIsSet = false; int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); if (child != null) { if (storageFormat.fillStorageFormat(child)) { directoryDesc.setOutputFormat(storageFormat.getOutputFormat()); directoryDesc.setSerName(storageFormat.getSerde()); directoryDescIsSet = true; continue; } switch (child.getToken().getType()) { case HiveParser.TOK_TABLEROWFORMAT: rowFormatParams.analyzeRowFormat(child); directoryDesc.setFieldDelim(rowFormatParams.fieldDelim); directoryDesc.setLineDelim(rowFormatParams.lineDelim); directoryDesc.setCollItemDelim(rowFormatParams.collItemDelim); directoryDesc.setMapKeyDelim(rowFormatParams.mapKeyDelim); directoryDesc.setFieldEscape(rowFormatParams.fieldEscape); directoryDesc.setNullFormat(rowFormatParams.nullFormat); directoryDescIsSet = true; break; case HiveParser.TOK_TABLESERIALIZER: ASTNode serdeChild = (ASTNode) child.getChild(0); storageFormat.setSerde(unescapeSQLString(serdeChild.getChild(0).getText())); directoryDesc.setSerName(storageFormat.getSerde()); if (serdeChild.getChildCount() > 1) { directoryDesc.setSerdeProps(new HashMap<String, String>()); readProps((ASTNode) serdeChild.getChild(1).getChild(0), directoryDesc.getSerdeProps()); } directoryDescIsSet = true; break; } } } if (directoryDescIsSet) { qb.setDirectoryDesc(directoryDesc); } break; } default: throw new SemanticException( generateErrorMessage(ast, "Unknown Token Type " + ast.getToken().getType())); } } }