List of usage examples for org.apache.hadoop.conf Configuration getInt
public int getInt(String name, int defaultValue)
name
property as an int
. From source file:com.qubole.rubix.spi.CacheConfig.java
License:Apache License
public static int getClientTimeout(Configuration conf) { return conf.getInt(clientTimeoutConf, 60000); //ms }
From source file:com.qubole.rubix.spi.CacheConfig.java
License:Apache License
public static int getMaxRetries(Configuration conf) { return conf.getInt(maxRetriesConf, 3); }
From source file:com.qubole.rubix.spi.CacheConfig.java
License:Apache License
static int getCacheDataChosenColumns(Configuration c) { return c.getInt(DATA_CACHE_TABLE_COLS_CHOSEN, 0); }
From source file:com.qubole.rubix.spi.CacheConfig.java
License:Apache License
static int getCacheDataMinColumns(Configuration c) { return c.getInt(DATA_CACHE_TABLE_MIN_COLS, 0); }
From source file:com.qubole.rubix.spi.ClusterManager.java
License:Apache License
public void initialize(Configuration conf) { splitSize = conf.getLong(splitSizeConf, splitSize); nodeRefreshTime = conf.getInt(nodeRefreshTimeConf, nodeRefreshTime); }
From source file:com.quest.orahive.HiveJdbcClient.java
License:Apache License
private static OraHiveOptions getOraHiveOptions(Configuration conf) { OraHiveOptions result = new OraHiveOptions(); result.hiveJdbcUrl = conf.get(Constants.CONF_HIVE_JDBC_URL); result.hiveUserName = conf.get(Constants.CONF_HIVE_JDBC_USER); result.oracleJdbcUrl = conf.get(Constants.CONF_ORACLE_JDBC_URL); result.oracleUserName = conf.get(Constants.CONF_ORACLE_JDBC_USER); result.oracleSchema = conf.get(Constants.CONF_ORACLE_SCHEMA, ""); result.oracleTable = conf.get(Constants.CONF_ORACLE_TABLENAME, ""); result.oracleTablespace = conf.get(Constants.CONF_ORACLE_TABLESPACE, ""); result.insertBatchSize = conf.getInt(Constants.CONF_ORACLE_INSERT_BATCH_SIZE, Constants.DEFAULT_ORACLE_INSERT_BATCH_SIZE); result.commitBatchCount = conf.getInt(Constants.CONF_ORACLE_INSERT_COMMIT_BATCH_COUNT, Constants.DEFAULT_ORACLE_INSERT_COMMIT_BATCH_COUNT); String exportModeStr = conf.get(Constants.CONF_EXPORT_MODE); if (exportModeStr != null) { try {// w w w.j av a 2 s. com result.exportMode = Constants.ExportMode.valueOf(exportModeStr.trim().toUpperCase()); } catch (IllegalArgumentException e) { LOG.error("Invalid export mode specified.", e); throw new IllegalArgumentException("Invalid export mode specified.", e); } } result.hql = conf.get(Constants.CONF_HIVE_QUERY); if (result.hql == null || result.hql.trim().isEmpty()) { LOG.debug(String.format("No HQL was provided via the \"%s\" argument.", Constants.CONF_HIVE_QUERY)); String hqlFileName = conf.get(Constants.CONF_HIVE_QUERY_FILENAME); if (hqlFileName == null || hqlFileName.isEmpty()) { LOG.error(String.format("A Hive HQL statement must be provided.\n" + "Please specify one of the following command-line arguments:\n" + "\t" + Constants.CONF_HIVE_QUERY + "\n" + "\t" + Constants.CONF_HIVE_QUERY_FILENAME)); System.exit(1); } try { result.hql = getHql(hqlFileName); } catch (FileNotFoundException ex) { LOG.error(String.format("Unable to load the HQL file named \"%s\".", hqlFileName), ex); System.exit(1); } catch (IOException ex) { LOG.error(String.format("Unable to load the HQL file named \"%s\".", hqlFileName), ex); System.exit(1); } } result.hivePassword = getPassword(conf, Constants.CONF_HIVE_JDBC_PASSWORD, String .format("Enter the password for the Hive Service at %s :", conf.get(Constants.CONF_HIVE_JDBC_URL))); result.oraclePassword = getPassword(conf, Constants.CONF_ORACLE_JDBC_PASSWORD, String.format( "Enter the password for the Oracle database at %s :", conf.get(Constants.CONF_ORACLE_JDBC_URL))); return result; }
From source file:com.quixey.hadoop.fs.oss.MultiPartUploader.java
License:Apache License
MultiPartUploader(OSSClient client, String bucket, Configuration conf) { this.client = checkNotNull(client); this.bucket = checkNotNull(bucket); checkNotNull(conf);/*from w w w. ja v a 2s . co m*/ multipartEnabled = conf.getBoolean(OSSFileSystemConfigKeys.OSS_MULTIPART_UPLOADS_ENABLED, true); partSize = conf.getLong(OSS_MULTIPART_UPLOADS_BLOCK_SIZE_PROPERTY, DEFAULT_MULTIPART_BLOCK_SIZE); maxThreads = conf.getInt(OSS_MULTIPART_UPLOADS_MAX_THREADS_PROPERTY, Integer.MAX_VALUE); checkArgument(partSize <= MAX_PART_SIZE, "%s must be at most %s", OSS_MULTIPART_UPLOADS_BLOCK_SIZE_PROPERTY, MAX_PART_SIZE); }
From source file:com.quixey.hadoop.fs.oss.OSSFileSystem.java
License:Apache License
private static FileSystemStore createDefaultStore(Configuration conf) { FileSystemStore store = new CloudOSSFileSystemStore(); RetryPolicy basePolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep( conf.getInt(OSS_MAX_RETRIES_PROPERTY, 4), conf.getLong(OSS_SLEEP_TIME_SECONDS_PROPERTY, 10), TimeUnit.SECONDS);//from ww w. j a va 2 s . com Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = new HashMap<>(); exceptionToPolicyMap.put(IOException.class, basePolicy); RetryPolicy methodPolicy = RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap); Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<>(); methodNameToPolicyMap.put("storeFile", methodPolicy); methodNameToPolicyMap.put("rename", methodPolicy); return (FileSystemStore) RetryProxy.create(FileSystemStore.class, store, methodNameToPolicyMap); }
From source file:com.quixey.hadoop.fs.oss.OSSFileSystem.java
License:Apache License
@Override public void initialize(URI uri, Configuration conf) throws IOException { super.initialize(checkNotNull(uri), checkNotNull(conf)); if (null == store) store = createDefaultStore(conf); store.initialize(uri, conf);/*from www . ja va2 s . c om*/ setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDir = makeQualified(new Path("/user", System.getProperty("user.name"))); this.maxListingLength = conf.getInt(OSS_MAX_LISTING_LENGTH_PROPERTY, 1000); }
From source file:com.ricemap.spateDB.mapred.RandomShapeGenerator.java
License:Apache License
/** * Initialize from a FileSplit/*from w ww. j av a2s. c o m*/ * * @param job * @param split * @throws IOException */ @SuppressWarnings("unchecked") public RandomShapeGenerator(Configuration job, RandomInputFormat.GeneratedSplit split) throws IOException { this(split.length, SpatialSite.getPrism(job, GenerationMBR), DistributionType.valueOf(job.get(GenerationType)), job.getInt(GenerationRectSize, 100), split.index + job.getLong(GenerationSeed, System.currentTimeMillis())); setShape((S) SpatialSite.createStockShape(job)); }