List of usage examples for java.util Properties size
@Override public int size()
From source file:org.apache.gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator.java
/*** * Generate DDL query to create a different format (default: ORC) Hive table for a given Avro Schema * @param schema Avro schema to use to generate the DDL for new Hive table * @param tblName New Hive table name/*from www. j a va 2 s. c o m*/ * @param tblLocation New hive table location * @param optionalDbName Optional DB name, if not specified it defaults to 'default' * @param optionalPartitionDDLInfo Optional partition info in form of map of partition key, partition type pair * If not specified, the table is assumed to be un-partitioned ie of type snapshot * @param optionalClusterInfo Optional cluster info * @param optionalSortOrderInfo Optional sort order * @param optionalNumOfBuckets Optional number of buckets * @param optionalRowFormatSerde Optional row format serde, default is ORC * @param optionalInputFormat Optional input format serde, default is ORC * @param optionalOutputFormat Optional output format serde, default is ORC * @param tableProperties Optional table properties * @param isEvolutionEnabled If schema evolution is turned on * @param destinationTableMeta Optional destination table metadata @return Generated DDL query to create new Hive table */ public static String generateCreateTableDDL(Schema schema, String tblName, String tblLocation, Optional<String> optionalDbName, Optional<Map<String, String>> optionalPartitionDDLInfo, Optional<List<String>> optionalClusterInfo, Optional<Map<String, COLUMN_SORT_ORDER>> optionalSortOrderInfo, Optional<Integer> optionalNumOfBuckets, Optional<String> optionalRowFormatSerde, Optional<String> optionalInputFormat, Optional<String> optionalOutputFormat, Properties tableProperties, boolean isEvolutionEnabled, Optional<Table> destinationTableMeta, Map<String, String> hiveColumns) { Preconditions.checkNotNull(schema); Preconditions.checkArgument(StringUtils.isNotBlank(tblName)); Preconditions.checkArgument(StringUtils.isNotBlank(tblLocation)); String dbName = optionalDbName.isPresent() ? optionalDbName.get() : DEFAULT_DB_NAME; String rowFormatSerde = optionalRowFormatSerde.isPresent() ? optionalRowFormatSerde.get() : DEFAULT_ROW_FORMAT_SERDE; String inputFormat = optionalInputFormat.isPresent() ? optionalInputFormat.get() : DEFAULT_ORC_INPUT_FORMAT; String outputFormat = optionalOutputFormat.isPresent() ? optionalOutputFormat.get() : DEFAULT_ORC_OUTPUT_FORMAT; tableProperties = getTableProperties(tableProperties); // Start building Hive DDL // Refer to Hive DDL manual for explanation of clauses: // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-Create/Drop/TruncateTable StringBuilder ddl = new StringBuilder(); // Create statement ddl.append(String.format("CREATE EXTERNAL TABLE IF NOT EXISTS `%s`.`%s` ", dbName, tblName)); // .. open bracket for CREATE ddl.append("( \n"); // 1. If evolution is enabled, and destination table does not exists // .. use columns from new schema // (evolution does not matter if its new destination table) // 2. If evolution is enabled, and destination table does exists // .. use columns from new schema // (alter table will be used before moving data from staging to final table) // 3. If evolution is disabled, and destination table does not exists // .. use columns from new schema // (evolution does not matter if its new destination table) // 4. If evolution is disabled, and destination table does exists // .. use columns from destination schema if (isEvolutionEnabled || !destinationTableMeta.isPresent()) { log.info("Generating DDL using source schema"); ddl.append(generateAvroToHiveColumnMapping(schema, Optional.of(hiveColumns), true, dbName + "." + tblName)); } else { log.info("Generating DDL using destination schema"); ddl.append( generateDestinationToHiveColumnMapping(Optional.of(hiveColumns), destinationTableMeta.get())); } // .. close bracket for CREATE ddl.append(") \n"); // Partition info if (optionalPartitionDDLInfo.isPresent() && optionalPartitionDDLInfo.get().size() > 0) { ddl.append("PARTITIONED BY ( "); boolean isFirst = true; Map<String, String> partitionInfoMap = optionalPartitionDDLInfo.get(); for (Map.Entry<String, String> partitionInfo : partitionInfoMap.entrySet()) { if (isFirst) { isFirst = false; } else { ddl.append(", "); } ddl.append(String.format("`%s` %s", partitionInfo.getKey(), partitionInfo.getValue())); } ddl.append(" ) \n"); } if (optionalClusterInfo.isPresent()) { if (!optionalNumOfBuckets.isPresent()) { throw new IllegalArgumentException((String.format( "CLUSTERED BY requested, but no NUM_BUCKETS specified for table %s.%s", dbName, tblName))); } ddl.append("CLUSTERED BY ( "); boolean isFirst = true; for (String clusterByCol : optionalClusterInfo.get()) { if (!hiveColumns.containsKey(clusterByCol)) { throw new IllegalArgumentException(String.format( "Requested CLUSTERED BY column: %s " + "is not present in schema for table %s.%s", clusterByCol, dbName, tblName)); } if (isFirst) { isFirst = false; } else { ddl.append(", "); } ddl.append(String.format("`%s`", clusterByCol)); } ddl.append(" ) "); if (optionalSortOrderInfo.isPresent() && optionalSortOrderInfo.get().size() > 0) { Map<String, COLUMN_SORT_ORDER> sortOrderInfoMap = optionalSortOrderInfo.get(); ddl.append("SORTED BY ( "); isFirst = true; for (Map.Entry<String, COLUMN_SORT_ORDER> sortOrderInfo : sortOrderInfoMap.entrySet()) { if (!hiveColumns.containsKey(sortOrderInfo.getKey())) { throw new IllegalArgumentException(String.format( "Requested SORTED BY column: %s " + "is not present in schema for table %s.%s", sortOrderInfo.getKey(), dbName, tblName)); } if (isFirst) { isFirst = false; } else { ddl.append(", "); } ddl.append(String.format("`%s` %s", sortOrderInfo.getKey(), sortOrderInfo.getValue())); } ddl.append(" ) "); } ddl.append(String.format(" INTO %s BUCKETS %n", optionalNumOfBuckets.get())); } else { if (optionalSortOrderInfo.isPresent()) { throw new IllegalArgumentException(String.format( "SORTED BY requested, but no CLUSTERED BY specified for table %s.%s", dbName, tblName)); } } // Field Terminal ddl.append("ROW FORMAT SERDE \n"); ddl.append(String.format(" '%s' %n", rowFormatSerde)); // Stored as ORC ddl.append("STORED AS INPUTFORMAT \n"); ddl.append(String.format(" '%s' %n", inputFormat)); ddl.append("OUTPUTFORMAT \n"); ddl.append(String.format(" '%s' %n", outputFormat)); // Location ddl.append("LOCATION \n"); ddl.append(String.format(" '%s' %n", tblLocation)); // Table properties if (null != tableProperties && tableProperties.size() > 0) { ddl.append("TBLPROPERTIES ( \n"); boolean isFirst = true; for (String property : tableProperties.stringPropertyNames()) { if (isFirst) { isFirst = false; } else { ddl.append(", \n"); } ddl.append(String.format(" '%s'='%s'", property, tableProperties.getProperty(property))); } ddl.append(") \n"); } return ddl.toString(); }
From source file:edu.ku.brc.specify.utilapps.BuildSampleDatabase.java
/** * Creates the dialog to find out what database and what database driver to use. *///from w w w . j a v a 2 s . c om public void buildSetup(final String[] args) { boolean doEmptyBuild = false; setEmbeddedDBPath(getDefaultEmbeddedDBPath()); // on the local machine if (args != null && args.length > 0) { for (String arg : args) { String[] pair = StringUtils.split(arg, "="); if (pair.length == 2) { String option = pair[0]; String value = pair[1]; if (option.equals("-Dappdir")) { setDefaultWorkingPath(value); } else if (option.equals("-Dappdatadir")) { setBaseAppDataDir(value); } else if (option.equals("-Dembeddeddbdir")) { setEmbeddedDBPath(value); } else if (option.equals("-Dmobile")) { setEmbeddedDBPath(getDefaultMobileEmbeddedDBPath()); } } } } if (StringUtils.isEmpty(getAppName())) { setAppName("Specify"); } if (hideFrame) { System.out.println("Embedded DB Path [ " + getEmbeddedDBPath() + " ]"); } // Then set this IconManager.setApplicationClass(Specify.class); IconManager.aliasImages("SpBuilder", // Source //$NON-NLS-1$ "AppIcon"); // Dest //$NON-NLS-1$ createProgressFrame("Building Specify Database"); System.setProperty(AppPreferences.factoryName, "edu.ku.brc.specify.config.AppPrefsDBIOIImpl"); // Needed by AppReferences System.setProperty("edu.ku.brc.dbsupport.DataProvider", "edu.ku.brc.specify.dbsupport.HibernateDataProvider"); // Needed By the Form System and any Data Get/Set System.setProperty(SecurityMgr.factoryName, "edu.ku.brc.af.auth.specify.SpecifySecurityMgr"); // Needed for Tree Field Names //$NON-NLS-1$ AppPrefsCache.setUseLocalOnly(true); AppPreferences localPrefs = AppPreferences.getLocalPrefs(); localPrefs.setDirPath(getAppDataDir()); localPrefs.load(); backstopPrefs = getInitializePrefs(null); String driverName = backstopPrefs.getProperty("initializer.drivername", "MySQL"); String databaseName = backstopPrefs.getProperty("initializer.databasename", "testfish"); Properties props = getInitializePrefs(databaseName); if (props.size() > 0) { initPrefs = props; } else { initPrefs = backstopPrefs; } Pair<String, String> dbUser = new Pair<String, String>( initPrefs.getProperty("initializer.dbUserName", "Specify"), initPrefs.getProperty("initializer.dbPassword", "Specify")); Pair<String, String> saUser = new Pair<String, String>( initPrefs.getProperty("initializer.saUserName", "Master"), initPrefs.getProperty("initializer.saPassword", "Master")); Pair<String, String> cmUser = new Pair<String, String>( initPrefs.getProperty("useragent.username", "testuser"), initPrefs.getProperty("useragent.password", "testuser")); if (doEmptyBuild) { /*ensureDerbyDirectory(driverName); DisciplineType disciplineType = DisciplineType.getDiscipline("fish"); DatabaseDriverInfo driverInfo = DatabaseDriverInfo.getDriver(driverName); DBConfigInfo config = new DBConfigInfo(driverInfo, "localhost", "WorkBench", "guest", "guest", "guest", "guest", "guest@ku.edu", disciplineType, "Institution", "Division"); buildEmptyDatabase(config); */ } else { setupDlg = new SetUpBuildDlg(databaseName, driverName, dbUser, saUser, cmUser, this); UIHelper.centerAndShow(setupDlg); } }
From source file:org.openTwoFactor.client.util.TwoFactorClientCommonUtils.java
/** * read properties from a resource, dont modify the properties returned since they are cached * @param resourceName/* w ww. j a va 2 s .c o m*/ * @param useCache * @param exceptionIfNotExist * @param classInJar if not null, then look for the jar where this file is, and look in the same dir * @param callingLog * @return the properties or null if not exist */ public synchronized static Properties propertiesFromResourceName(String resourceName, boolean useCache, boolean exceptionIfNotExist, Class<?> classInJar, StringBuilder callingLog) { Properties properties = resourcePropertiesCache.get(resourceName); if (!useCache || !resourcePropertiesCache.containsKey(resourceName)) { properties = new Properties(); boolean success = false; URL url = computeUrl(resourceName, true); InputStream inputStream = null; try { inputStream = url.openStream(); properties.load(inputStream); success = true; String theLog = "Reading resource: " + resourceName + ", from: " + url.toURI(); if (LOG != null) { LOG.debug(theLog); } if (callingLog != null) { callingLog.append(theLog); } } catch (Exception e) { //clear out just in case properties.clear(); //lets look next to jar File jarFile = classInJar == null ? null : jarFile(classInJar); File parentDir = jarFile == null ? null : jarFile.getParentFile(); String fileName = parentDir == null ? null : (stripLastSlashIfExists(fileCanonicalPath(parentDir)) + File.separator + resourceName); File configFile = fileName == null ? null : new File(fileName); try { //looks like we have a match if (configFile != null && configFile.exists() && configFile.isFile()) { inputStream = new FileInputStream(configFile); properties.load(inputStream); success = true; String theLog = "Reading resource: " + resourceName + ", from: " + fileCanonicalPath(configFile); if (LOG != null) { LOG.debug(theLog); } if (callingLog != null) { callingLog.append(theLog); } } } catch (Exception e2) { if (LOG != null) { LOG.debug("Error reading from file for resource: " + resourceName + ", file: " + fileName, e2); } } if (!success) { properties = null; if (exceptionIfNotExist) { throw new RuntimeException("Problem with resource: '" + resourceName + "'", e); } } } finally { closeQuietly(inputStream); if (useCache && properties != null && properties.size() > 0) { resourcePropertiesCache.put(resourceName, properties); } } } return properties; }