Example usage for java.util Properties stringPropertyNames

List of usage examples for java.util Properties stringPropertyNames

Introduction

In this page you can find the example usage for java.util Properties stringPropertyNames.

Prototype

public Set<String> stringPropertyNames() 

Source Link

Document

Returns an unmodifiable set of keys from this property list where the key and its corresponding value are strings, including distinct keys in the default property list if a key of the same name has not already been found from the main properties list.

Usage

From source file:org.apache.hive.beeline.BeeLine.java

private String getDefaultConnectionUrl() throws BeelineHS2ConnectionFileParseException {
    HS2ConnectionFileParser userHS2ConnFileParser = getUserHS2ConnFileParser();
    if (!userHS2ConnFileParser.configExists()) {
        // nothing to do if there is no user HS2 connection configuration file
        return null;
    }/*from  w w w .  jav a 2 s.  c  o  m*/
    // get the connection properties from user specific config file
    Properties userConnectionProperties = userHS2ConnFileParser.getConnectionProperties();
    // load the HS2 connection url properties from hive-site.xml if it is present in the classpath
    HS2ConnectionFileParser hiveSiteParser = getHiveSiteHS2ConnectionFileParser();
    Properties hiveSiteConnectionProperties = hiveSiteParser.getConnectionProperties();
    // add/override properties found from hive-site with user-specific properties
    for (String key : userConnectionProperties.stringPropertyNames()) {
        if (hiveSiteConnectionProperties.containsKey(key)) {
            debug("Overriding connection url property " + key + " from user connection configuration file");
        }
        hiveSiteConnectionProperties.setProperty(key, userConnectionProperties.getProperty(key));
    }
    // return the url based on the aggregated connection properties
    return HS2ConnectionFileUtils.getUrl(hiveSiteConnectionProperties);
}

From source file:org.apache.hadoop.hive.ql.exec.Utilities.java

/**
 * Copies the storage handler proeprites configured for a table descriptor to a runtime job
 * configuration.  This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)}
 * in that it does not allow parameters already set in the job to override the values from the
 * table.  This is important for setting the config up for reading,
 * as the job may already have values in it from another table.
 * @param tbl//from   ww  w . j a v  a2  s. c  om
 * @param job
 */
public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) throws HiveException {
    Properties tblProperties = tbl.getProperties();
    for (String name : tblProperties.stringPropertyNames()) {
        String val = (String) tblProperties.get(name);
        if (val != null) {
            job.set(name, StringEscapeUtils.escapeJava(val));
        }
    }
    Map<String, String> jobProperties = tbl.getJobProperties();
    if (jobProperties != null) {
        for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
            job.set(entry.getKey(), entry.getValue());
        }
    }

    try {
        Map<String, String> jobSecrets = tbl.getJobSecrets();
        if (jobSecrets != null) {
            for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
                job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
                UserGroupInformation.getCurrentUser().getCredentials().addSecretKey(new Text(entry.getKey()),
                        entry.getValue().getBytes());
            }
        }
    } catch (IOException e) {
        throw new HiveException(e);
    }
}

From source file:org.apache.hadoop.hive.ql.exec.Utilities.java

/**
 * Copies the storage handler properties configured for a table descriptor to a runtime job
 * configuration./*  ww  w  .  jav a  2s  .  c  o  m*/
 *
 * @param tbl
 *          table descriptor from which to read
 *
 * @param job
 *          configuration which receives configured properties
 */
public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) throws HiveException {
    Properties tblProperties = tbl.getProperties();
    for (String name : tblProperties.stringPropertyNames()) {
        if (job.get(name) == null) {
            String val = (String) tblProperties.get(name);
            if (val != null) {
                job.set(name, StringEscapeUtils.escapeJava(val));
            }
        }
    }
    Map<String, String> jobProperties = tbl.getJobProperties();
    if (jobProperties != null) {
        for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
            job.set(entry.getKey(), entry.getValue());
        }
    }

    try {
        Map<String, String> jobSecrets = tbl.getJobSecrets();
        if (jobSecrets != null) {
            for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
                job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
                UserGroupInformation.getCurrentUser().getCredentials().addSecretKey(new Text(entry.getKey()),
                        entry.getValue().getBytes());
            }
        }
    } catch (IOException e) {
        throw new HiveException(e);
    }
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.JdbcSource.java

@Override
protected List<ConfigIssue> init() {
    if (disableValidation) {
        LOG.warn("JDBC Origin initialized with Validation Disabled.");
    }//from w  w  w  .  j  a v a 2s  .c  o m

    List<ConfigIssue> issues = new ArrayList<>();
    Source.Context context = getContext();

    errorRecordHandler = new DefaultErrorRecordHandler(context);
    issues = hikariConfigBean.validateConfigs(context, issues);

    if (queryIntervalMillis < 0) {
        issues.add(getContext().createConfigIssue(Groups.JDBC.name(), QUERY_INTERVAL_EL, JdbcErrors.JDBC_27));
    }

    issues = commonSourceConfigBean.validateConfigs(context, issues);

    // Incremental mode have special requirements for the query form
    if (isIncrementalMode) {
        if (StringUtils.isEmpty(offsetColumn)) {
            issues.add(context.createConfigIssue(Groups.JDBC.name(), OFFSET_COLUMN, JdbcErrors.JDBC_51,
                    "Can't be empty"));
        }
        if (StringUtils.isEmpty(initialOffset)) {
            issues.add(context.createConfigIssue(Groups.JDBC.name(), INITIAL_OFFSET, JdbcErrors.JDBC_51,
                    "Can't be empty"));
        }

        final String formattedOffsetColumn = Pattern.quote(offsetColumn.toUpperCase());
        Pattern offsetColumnInWhereAndOrderByClause = Pattern
                .compile(String.format("(?s).*\\bWHERE\\b.*(\\b%s\\b).*\\bORDER BY\\b.*\\b%s\\b.*",
                        formattedOffsetColumn, formattedOffsetColumn));

        if (!disableValidation) {
            String upperCaseQuery = query.toUpperCase();
            boolean checkOffsetColumnInWhereOrder = true;
            if (!upperCaseQuery.contains("WHERE")) {
                issues.add(context.createConfigIssue(Groups.JDBC.name(), QUERY, JdbcErrors.JDBC_38, "WHERE"));
                checkOffsetColumnInWhereOrder = false;
            }
            if (!upperCaseQuery.contains("ORDER BY")) {
                issues.add(
                        context.createConfigIssue(Groups.JDBC.name(), QUERY, JdbcErrors.JDBC_38, "ORDER BY"));
                checkOffsetColumnInWhereOrder = false;
            }
            if (checkOffsetColumnInWhereOrder
                    && !offsetColumnInWhereAndOrderByClause.matcher(upperCaseQuery).matches()) {
                issues.add(
                        context.createConfigIssue(Groups.JDBC.name(), QUERY, JdbcErrors.JDBC_29, offsetColumn));
            }
        }
    }

    if (txnMaxSize < 0) {
        issues.add(context.createConfigIssue(Groups.ADVANCED.name(), TXN_MAX_SIZE, JdbcErrors.JDBC_10,
                txnMaxSize, 0));
    }

    if (createJDBCNsHeaders && !jdbcNsHeaderPrefix.endsWith(".")) {
        issues.add(
                context.createConfigIssue(Groups.ADVANCED.name(), JDBC_NS_HEADER_PREFIX, JdbcErrors.JDBC_15));
    }

    Properties driverProps = new Properties();
    try {
        driverProps = hikariConfigBean.getDriverProperties();
        if (null == dataSource) {
            dataSource = jdbcUtil.createDataSourceForRead(hikariConfigBean);
        }
    } catch (StageException e) {
        LOG.error(JdbcErrors.JDBC_00.getMessage(), e.toString(), e);
        issues.add(context.createConfigIssue(Groups.JDBC.name(), CONNECTION_STRING, JdbcErrors.JDBC_00,
                e.toString()));
    }

    // Don't proceed with validation query if there are issues or if validation is disabled
    if (!issues.isEmpty() || disableValidation) {
        return issues;
    }

    try (Connection validationConnection = dataSource.getConnection()) { // NOSONAR
        DatabaseMetaData dbMetadata = validationConnection.getMetaData();
        // If CDC is enabled, scrollable cursors must be supported by JDBC driver.
        supportsScrollableCursor(issues, context, dbMetadata);
        try (Statement statement = validationConnection.createStatement()) {
            statement.setFetchSize(1);
            statement.setMaxRows(1);
            final String preparedQuery = prepareQuery(query, initialOffset);
            executeValidationQuery(issues, context, statement, preparedQuery);
        }
    } catch (SQLException e) {
        String formattedError = jdbcUtil.formatSqlException(e);
        LOG.error(formattedError);
        LOG.debug(formattedError, e);
        issues.add(context.createConfigIssue(Groups.JDBC.name(), CONNECTION_STRING, JdbcErrors.JDBC_00,
                formattedError));
    }

    LineageEvent event = getContext().createLineageEvent(LineageEventType.ENTITY_READ);
    // TODO: add the per-event specific details here.
    event.setSpecificAttribute(LineageSpecificAttribute.DESCRIPTION, query);
    event.setSpecificAttribute(LineageSpecificAttribute.ENDPOINT_TYPE, EndPointType.JDBC.name());
    Map<String, String> props = new HashMap<>();
    props.put("Connection String", hikariConfigBean.getConnectionString());
    props.put("Offset Column", offsetColumn);
    props.put("Is Incremental Mode", isIncrementalMode ? "true" : "false");
    if (!StringUtils.isEmpty(tableNames)) {
        event.setSpecificAttribute(LineageSpecificAttribute.ENTITY_NAME,
                hikariConfigBean.getConnectionString() + " " + tableNames);
        props.put("Table Names", tableNames);

    } else {
        event.setSpecificAttribute(LineageSpecificAttribute.ENTITY_NAME,
                hikariConfigBean.getConnectionString());

    }

    for (final String n : driverProps.stringPropertyNames()) {
        props.put(n, driverProps.getProperty(n));
    }
    event.setProperties(props);
    getContext().publishLineageEvent(event);
    shouldFire = true;
    firstTime = true;

    return issues;
}

From source file:org.apache.geode.distributed.internal.DistributionConfigImpl.java

/**
 * Creates a new <code>DistributionConfigImpl</code> with the given non-default configuration
 * properties. See {@link org.apache.geode.distributed.DistributedSystem#connect} for a list of
 * exceptions that may be thrown./*from www.  j  ava2s.  c  o  m*/
 * 
 * @param nonDefault The configuration properties specified by the caller
 * @param ignoreGemFirePropsFile whether to skip loading distributed system properties from
 *        gemfire.properties file
 * @param isConnected whether to skip Validation for SSL properties and copy of ssl properties to
 *        other ssl properties. This parameter will be used till we provide support for ssl-*
 *        properties.
 *
 * @since GemFire 8.0
 */
public DistributionConfigImpl(Properties nonDefault, boolean ignoreGemFirePropsFile, boolean isConnected) {
    HashMap props = new HashMap();
    if (!ignoreGemFirePropsFile) {// For admin bug #40434
        props.putAll(loadPropertiesFromURL(DistributedSystem.getPropertyFileURL(), false));
    }
    props.putAll(loadPropertiesFromURL(DistributedSystem.getSecurityPropertiesFileURL(), true));

    // Now override values picked up from the file with values passed
    // in from the caller's code
    if (nonDefault != null) {
        props.putAll(nonDefault);
        setSource(nonDefault, ConfigSource.api());
    }
    // Now remove all user defined properties from props.
    for (Object entry : props.entrySet()) {
        Map.Entry<String, String> ent = (Map.Entry<String, String>) entry;
        if (((String) ent.getKey()).startsWith(USERDEFINED_PREFIX_NAME)) {
            userDefinedProps.put(ent.getKey(), ent.getValue());
        }
    }
    // Now override values picked up from the file or code with values
    // from the system properties.
    String[] attNames = getAttributeNames();

    // For gemfire.security-* properties, we will need to look at
    // all the system properties instead of looping through attNames
    Set attNameSet = new HashSet();
    for (int index = 0; index < attNames.length; ++index) {
        attNameSet.add(GEMFIRE_PREFIX + attNames[index]);
    }

    /* clone() is a synchronized method for Properties (actually in Hashtable) */
    Properties sysProps = (Properties) System.getProperties().clone();
    Iterator<?> sysPropsIter = sysProps.entrySet().iterator();
    while (sysPropsIter.hasNext()) {
        Map.Entry sysEntry = (Map.Entry) sysPropsIter.next();
        String sysName = (String) sysEntry.getKey();
        if (attNameSet.contains(sysName) || sysName.startsWith(GEMFIRE_PREFIX + SECURITY_PREFIX_NAME)
                || sysName.startsWith(GEMFIRE_PREFIX + SSL_SYSTEM_PROPS_NAME)) {
            String sysValue = (String) sysEntry.getValue();
            if (sysValue != null) {
                String attName = sysName.substring(GEMFIRE_PREFIX.length());
                props.put(attName, sysValue);
                this.sourceMap.put(attName, ConfigSource.sysprop());
            }
        }
    }
    sysProps.clear(); // clearing cloned SysProps

    final Properties overriddenDefaults = ProcessLauncherContext.getOverriddenDefaults();
    if (!overriddenDefaults.isEmpty()) {
        for (String key : overriddenDefaults.stringPropertyNames()) {
            // only apply the overridden default if it's not already specified in props
            final String property = key.substring(ProcessLauncherContext.OVERRIDDEN_DEFAULTS_PREFIX.length());
            if (!props.containsKey((property))) {
                props.put(property, overriddenDefaults.getProperty(key));
                this.sourceMap.put(property, ConfigSource.launcher());
            }
        }
    }

    initialize(props);

    if (securityPeerAuthInit != null && securityPeerAuthInit.length() > 0) {
        System.setProperty(SECURITY_SYSTEM_PREFIX + SECURITY_PEER_AUTH_INIT, securityPeerAuthInit);
    }
    if (securityPeerAuthenticator != null && securityPeerAuthenticator.length() > 0) {
        System.setProperty(SECURITY_SYSTEM_PREFIX + SECURITY_PEER_AUTHENTICATOR, securityPeerAuthenticator);
    }

    Iterator iter = security.entrySet().iterator();
    while (iter.hasNext()) {
        Map.Entry entry = (Map.Entry) iter.next();
        System.setProperty(SECURITY_SYSTEM_PREFIX + (String) entry.getKey(), (String) entry.getValue());
    }
    if (!isConnected) {
        copySSLPropsToServerSSLProps();
        copySSLPropsToJMXSSLProps();
        copyClusterSSLPropsToGatewaySSLProps();
        copySSLPropsToHTTPSSLProps();
    }

    // Make attributes writeable only
    this.modifiable = true;
    validateConfigurationProperties(props);
    validateSSLEnabledComponentsConfiguration();
    // Make attributes read only
    this.modifiable = false;

}

From source file:dinistiq.Dinistiq.java

/**
 * Injects all available dependencies into a given bean and records all dependencies.
 *
 * @param key key / name/ id of the bean
 * @param bean bean instance/*from ww w  .  j a  v  a  2s  . c  o  m*/
 * @param dependencies dependencies map where the dependecies of the bean are recorded with the given key
 * @throws Exception
 */
private void injectDependencies(Map<String, Set<Object>> dependencies, String key, Object bean)
        throws Exception {
    // Prepare values from properties files
    Properties beanProperties = getProperties(key);
    LOG.debug("injectDependencies({}) bean properties {}", key, beanProperties.keySet());

    // fill injected fields
    Class<? extends Object> beanClass = bean.getClass();
    String beanClassName = beanClass.getName();
    while (beanClass != Object.class) {
        if (bean instanceof Map) {
            fillMap(bean, getProperties(key));
            LOG.info("injectDependencies() filled map '{}' {}", key, bean);
            return; // If it's a map we don't need to inject anything beyond some map properties files.
        } // if
        for (Field field : beanClass.getDeclaredFields()) {
            LOG.debug("injectDependencies({}) field {}", key, field.getName());
            if (field.getAnnotation(Inject.class) != null) {
                Named named = field.getAnnotation(Named.class);
                String name = (named == null) ? null
                        : (StringUtils.isBlank(named.value()) ? field.getName() : named.value());
                LOG.info("injectDependencies({}) {} :{} needs injection with name {}", key, field.getName(),
                        field.getGenericType(), name);
                Object b = getValue(beanProperties, dependencies, key, field.getType(), field.getGenericType(),
                        name);
                final boolean accessible = field.isAccessible();
                try {
                    field.setAccessible(true);
                    field.set(bean, b);
                } catch (SecurityException | IllegalArgumentException | IllegalAccessException e) {
                    LOG.error("injectDependencies() error setting field " + field.getName() + " :"
                            + field.getType().getName() + " at '" + key + "' :" + beanClassName, e);
                } finally {
                    field.setAccessible(accessible);
                } // try/catch
            } // if
        } // for
        beanClass = beanClass.getSuperclass();
    } // while

    // call methods with annotated injections
    for (Method m : bean.getClass().getMethods()) {
        if (m.getAnnotation(Inject.class) != null) {
            LOG.debug("injectDependencies({}) inject parameters on method {}", key, m.getName());
            Class<? extends Object>[] parameterTypes = m.getParameterTypes();
            Type[] genericParameterTypes = m.getGenericParameterTypes();
            Annotation[][] parameterAnnotations = m.getParameterAnnotations();
            Object[] parameters = getParameters(beanProperties, dependencies, key, parameterTypes,
                    genericParameterTypes, parameterAnnotations);
            try {
                m.invoke(bean, parameters);
            } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
                LOG.error("injectDependencies() error injecting for method " + m.getName() + " at '" + key
                        + "' :" + beanClassName, ex);
            } // try/catch
        } // if
    } // for

    // Fill in manually set values from properties file
    for (String property : beanProperties.stringPropertyNames()) {
        String methodName = "set" + property.substring(0, 1).toUpperCase() + property.substring(1);
        LOG.debug("injectDependencies({}) {} -> {}", key, property, methodName);
        Method m = null;
        // Have to find it just by name
        for (Method me : bean.getClass().getMethods()) {
            if (me.getName().equals(methodName) && (me.getParameterTypes().length > 0)) {
                m = me;
            } // if
        } // for
        if (m == null) {
            LOG.warn("injectDependencies({}) no setter method found for property {}", key, property);
        } else {
            String propertyName = Introspector.decapitalize(m.getName().substring(3));
            Class<?> parameterType = m.getParameterTypes()[0];
            Type genericType = m.getGenericParameterTypes()[0];
            LOG.debug("injectDependencies({}) writable property found {} :{} {}", key, propertyName,
                    parameterType, genericType);
            String propertyValue = beanProperties.getProperty(propertyName); // Must definetely be there without additional check
            boolean isBoolean = (parameterType == Boolean.class) || (m.getParameterTypes()[0] == Boolean.TYPE);
            boolean isCollection = Collection.class.isAssignableFrom(parameterType);
            Object[] parameters = new Object[1];
            LOG.debug("injectDependencies({}) trying to set value {} (bool {}) (collection {}) '{}'", key,
                    propertyName, isBoolean, isCollection, propertyValue);
            try {
                parameters[0] = getReferenceValue(propertyValue);
                if (isBoolean && (parameters[0] instanceof String)) {
                    parameters[0] = Boolean.valueOf(propertyValue);
                } // if
                if ("long".equals(parameterType.getName())) {
                    parameters[0] = new Long(propertyValue);
                } // if
                if ("int".equals(parameterType.getName())) {
                    parameters[0] = new Integer(propertyValue);
                } // if
                if ("float".equals(parameterType.getName())) {
                    parameters[0] = new Float(propertyValue);
                } // if
                if ("double".equals(parameterType.getName())) {
                    parameters[0] = new Double(propertyValue);
                } // if
                if (isCollection) {
                    if (!Collection.class.isAssignableFrom(parameters[0].getClass())) {
                        Collection<Object> values = List.class.isAssignableFrom(parameterType)
                                ? new ArrayList<>()
                                : new HashSet<>();
                        for (String value : propertyValue.split(",")) {
                            values.add(getReferenceValue(value));
                        } // for
                        parameters[0] = values;
                    } // if
                    if (dependencies != null) {
                        for (Object d : (Collection<?>) parameters[0]) {
                            if (beans.containsValue(d)) {
                                dependencies.get(key).add(d);
                            } // if
                        } // if
                    } // if
                } else {
                    if ((dependencies != null) && (beans.containsValue(parameters[0]))) {
                        dependencies.get(key).add(parameters[0]);
                    } // if
                } // if
                LOG.debug("injectDependencies({}) setting value {} '{}' :{}", key, propertyName, parameters[0],
                        parameters[0].getClass());
                m.invoke(bean, parameters);
            } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
                LOG.error("injectDependencies() error setting property " + propertyName + " to '"
                        + propertyValue + "' at " + key + " :" + beanClassName, ex);
            } // try/catch
        } // if
    } // for
}

From source file:org.regenstrief.util.Util.java

private final static Properties loadProperties1(Properties prop, final String location) {
    InputStream is = null;/*from ww  w . ja  v  a 2 s  .  c  o m*/

    try {
        /*
        CORE-1439
        Properties in general aren't required.
        DataSource might be specified with a setter instead of a property.
        Could be injected by Spring.
        Most other properties have defaults or aren't required.
        */
        is = getStream(location);
        if (prop == null) {
            prop = new Properties();
        }
        if (is == null) {
            log.warn(getNotFoundMessage(location));
            return prop;
        }

        if (log.isDebugEnabled()) {
            final Properties sourceProperties = new Properties();
            sourceProperties.load(is);
            for (final String key : sourceProperties.stringPropertyNames()) {
                if (prop.get(key) != null) {
                    log.debug("Overriding property with key: " + key);
                }
                prop.setProperty(key, sourceProperties.getProperty(key));
            }
        } else {
            prop.load(is);
        }

        return prop;
    } catch (final Exception e) {
        throw toRuntimeException(e);
    } finally {
        IoUtil.close(is);
    }
}

From source file:gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator.java

/***
 * Generate DDL query to create a different format (default: ORC) Hive table for a given Avro Schema
 * @param schema Avro schema to use to generate the DDL for new Hive table
 * @param tblName New Hive table name//  w  w  w .  ja v  a  2  s .c om
 * @param tblLocation New hive table location
 * @param optionalDbName Optional DB name, if not specified it defaults to 'default'
 * @param optionalPartitionDDLInfo Optional partition info in form of map of partition key, partition type pair
 *                                 If not specified, the table is assumed to be un-partitioned ie of type snapshot
 * @param optionalClusterInfo Optional cluster info
 * @param optionalSortOrderInfo Optional sort order
 * @param optionalNumOfBuckets Optional number of buckets
 * @param optionalRowFormatSerde Optional row format serde, default is ORC
 * @param optionalInputFormat Optional input format serde, default is ORC
 * @param optionalOutputFormat Optional output format serde, default is ORC
 * @param tableProperties Optional table properties
 * @param isEvolutionEnabled If schema evolution is turned on
 * @param destinationTableMeta Optional destination table metadata  @return Generated DDL query to create new Hive table
 */
public static String generateCreateTableDDL(Schema schema, String tblName, String tblLocation,
        Optional<String> optionalDbName, Optional<Map<String, String>> optionalPartitionDDLInfo,
        Optional<List<String>> optionalClusterInfo,
        Optional<Map<String, COLUMN_SORT_ORDER>> optionalSortOrderInfo, Optional<Integer> optionalNumOfBuckets,
        Optional<String> optionalRowFormatSerde, Optional<String> optionalInputFormat,
        Optional<String> optionalOutputFormat, Properties tableProperties, boolean isEvolutionEnabled,
        Optional<Table> destinationTableMeta, Map<String, String> hiveColumns) {

    Preconditions.checkNotNull(schema);
    Preconditions.checkArgument(StringUtils.isNotBlank(tblName));
    Preconditions.checkArgument(StringUtils.isNotBlank(tblLocation));

    String dbName = optionalDbName.isPresent() ? optionalDbName.get() : DEFAULT_DB_NAME;
    String rowFormatSerde = optionalRowFormatSerde.isPresent() ? optionalRowFormatSerde.get()
            : DEFAULT_ROW_FORMAT_SERDE;
    String inputFormat = optionalInputFormat.isPresent() ? optionalInputFormat.get() : DEFAULT_ORC_INPUT_FORMAT;
    String outputFormat = optionalOutputFormat.isPresent() ? optionalOutputFormat.get()
            : DEFAULT_ORC_OUTPUT_FORMAT;
    tableProperties = getTableProperties(tableProperties);

    // Start building Hive DDL
    // Refer to Hive DDL manual for explanation of clauses:
    // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-Create/Drop/TruncateTable
    StringBuilder ddl = new StringBuilder();

    // Create statement
    ddl.append(String.format("CREATE EXTERNAL TABLE IF NOT EXISTS `%s`.`%s` ", dbName, tblName));
    // .. open bracket for CREATE
    ddl.append("( \n");

    // 1. If evolution is enabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 2. If evolution is enabled, and destination table does exists
    //    .. use columns from new schema
    //    (alter table will be used before moving data from staging to final table)
    // 3. If evolution is disabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 4. If evolution is disabled, and destination table does exists
    //    .. use columns from destination schema
    if (isEvolutionEnabled || !destinationTableMeta.isPresent()) {
        log.info("Generating DDL using source schema");
        ddl.append(generateAvroToHiveColumnMapping(schema, Optional.of(hiveColumns), true));
    } else {
        log.info("Generating DDL using destination schema");
        ddl.append(
                generateDestinationToHiveColumnMapping(Optional.of(hiveColumns), destinationTableMeta.get()));
    }

    // .. close bracket for CREATE
    ddl.append(") \n");

    // Partition info
    if (optionalPartitionDDLInfo.isPresent() && optionalPartitionDDLInfo.get().size() > 0) {
        ddl.append("PARTITIONED BY ( ");
        boolean isFirst = true;
        Map<String, String> partitionInfoMap = optionalPartitionDDLInfo.get();
        for (Map.Entry<String, String> partitionInfo : partitionInfoMap.entrySet()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s` %s", partitionInfo.getKey(), partitionInfo.getValue()));
        }
        ddl.append(" ) \n");
    }

    if (optionalClusterInfo.isPresent()) {
        if (!optionalNumOfBuckets.isPresent()) {
            throw new IllegalArgumentException(("CLUSTERED BY requested, but no NUM_BUCKETS specified"));
        }
        ddl.append("CLUSTERED BY ( ");
        boolean isFirst = true;
        for (String clusterByCol : optionalClusterInfo.get()) {
            if (!hiveColumns.containsKey(clusterByCol)) {
                throw new IllegalArgumentException(String.format(
                        "Requested CLUSTERED BY column: %s " + "is not present in schema", clusterByCol));
            }
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s`", clusterByCol));
        }
        ddl.append(" ) ");

        if (optionalSortOrderInfo.isPresent() && optionalSortOrderInfo.get().size() > 0) {
            Map<String, COLUMN_SORT_ORDER> sortOrderInfoMap = optionalSortOrderInfo.get();
            ddl.append("SORTED BY ( ");
            isFirst = true;
            for (Map.Entry<String, COLUMN_SORT_ORDER> sortOrderInfo : sortOrderInfoMap.entrySet()) {
                if (!hiveColumns.containsKey(sortOrderInfo.getKey())) {
                    throw new IllegalArgumentException(
                            String.format("Requested SORTED BY column: %s " + "is not present in schema",
                                    sortOrderInfo.getKey()));
                }
                if (isFirst) {
                    isFirst = false;
                } else {
                    ddl.append(", ");
                }
                ddl.append(String.format("`%s` %s", sortOrderInfo.getKey(), sortOrderInfo.getValue()));
            }
            ddl.append(" ) ");
        }
        ddl.append(String.format(" INTO %s BUCKETS %n", optionalNumOfBuckets.get()));
    } else {
        if (optionalSortOrderInfo.isPresent()) {
            throw new IllegalArgumentException("SORTED BY requested, but no CLUSTERED BY specified");
        }
    }

    // Field Terminal
    ddl.append("ROW FORMAT SERDE \n");
    ddl.append(String.format("  '%s' %n", rowFormatSerde));

    // Stored as ORC
    ddl.append("STORED AS INPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", inputFormat));
    ddl.append("OUTPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", outputFormat));

    // Location
    ddl.append("LOCATION \n");
    ddl.append(String.format("  '%s' %n", tblLocation));

    // Table properties
    if (null != tableProperties && tableProperties.size() > 0) {
        ddl.append("TBLPROPERTIES ( \n");
        boolean isFirst = true;
        for (String property : tableProperties.stringPropertyNames()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", \n");
            }
            ddl.append(String.format("  '%s'='%s'", property, tableProperties.getProperty(property)));
        }
        ddl.append(") \n");
    }

    return ddl.toString();
}

From source file:org.apache.gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator.java

/***
 * Generate DDL query to create a different format (default: ORC) Hive table for a given Avro Schema
 * @param schema Avro schema to use to generate the DDL for new Hive table
 * @param tblName New Hive table name/* w  ww. ja v  a 2  s  . co m*/
 * @param tblLocation New hive table location
 * @param optionalDbName Optional DB name, if not specified it defaults to 'default'
 * @param optionalPartitionDDLInfo Optional partition info in form of map of partition key, partition type pair
 *                                 If not specified, the table is assumed to be un-partitioned ie of type snapshot
 * @param optionalClusterInfo Optional cluster info
 * @param optionalSortOrderInfo Optional sort order
 * @param optionalNumOfBuckets Optional number of buckets
 * @param optionalRowFormatSerde Optional row format serde, default is ORC
 * @param optionalInputFormat Optional input format serde, default is ORC
 * @param optionalOutputFormat Optional output format serde, default is ORC
 * @param tableProperties Optional table properties
 * @param isEvolutionEnabled If schema evolution is turned on
 * @param destinationTableMeta Optional destination table metadata  @return Generated DDL query to create new Hive table
 */
public static String generateCreateTableDDL(Schema schema, String tblName, String tblLocation,
        Optional<String> optionalDbName, Optional<Map<String, String>> optionalPartitionDDLInfo,
        Optional<List<String>> optionalClusterInfo,
        Optional<Map<String, COLUMN_SORT_ORDER>> optionalSortOrderInfo, Optional<Integer> optionalNumOfBuckets,
        Optional<String> optionalRowFormatSerde, Optional<String> optionalInputFormat,
        Optional<String> optionalOutputFormat, Properties tableProperties, boolean isEvolutionEnabled,
        Optional<Table> destinationTableMeta, Map<String, String> hiveColumns) {

    Preconditions.checkNotNull(schema);
    Preconditions.checkArgument(StringUtils.isNotBlank(tblName));
    Preconditions.checkArgument(StringUtils.isNotBlank(tblLocation));

    String dbName = optionalDbName.isPresent() ? optionalDbName.get() : DEFAULT_DB_NAME;
    String rowFormatSerde = optionalRowFormatSerde.isPresent() ? optionalRowFormatSerde.get()
            : DEFAULT_ROW_FORMAT_SERDE;
    String inputFormat = optionalInputFormat.isPresent() ? optionalInputFormat.get() : DEFAULT_ORC_INPUT_FORMAT;
    String outputFormat = optionalOutputFormat.isPresent() ? optionalOutputFormat.get()
            : DEFAULT_ORC_OUTPUT_FORMAT;
    tableProperties = getTableProperties(tableProperties);

    // Start building Hive DDL
    // Refer to Hive DDL manual for explanation of clauses:
    // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-Create/Drop/TruncateTable
    StringBuilder ddl = new StringBuilder();

    // Create statement
    ddl.append(String.format("CREATE EXTERNAL TABLE IF NOT EXISTS `%s`.`%s` ", dbName, tblName));
    // .. open bracket for CREATE
    ddl.append("( \n");

    // 1. If evolution is enabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 2. If evolution is enabled, and destination table does exists
    //    .. use columns from new schema
    //    (alter table will be used before moving data from staging to final table)
    // 3. If evolution is disabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 4. If evolution is disabled, and destination table does exists
    //    .. use columns from destination schema
    if (isEvolutionEnabled || !destinationTableMeta.isPresent()) {
        log.info("Generating DDL using source schema");
        ddl.append(generateAvroToHiveColumnMapping(schema, Optional.of(hiveColumns), true,
                dbName + "." + tblName));
    } else {
        log.info("Generating DDL using destination schema");
        ddl.append(
                generateDestinationToHiveColumnMapping(Optional.of(hiveColumns), destinationTableMeta.get()));
    }

    // .. close bracket for CREATE
    ddl.append(") \n");

    // Partition info
    if (optionalPartitionDDLInfo.isPresent() && optionalPartitionDDLInfo.get().size() > 0) {
        ddl.append("PARTITIONED BY ( ");
        boolean isFirst = true;
        Map<String, String> partitionInfoMap = optionalPartitionDDLInfo.get();
        for (Map.Entry<String, String> partitionInfo : partitionInfoMap.entrySet()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s` %s", partitionInfo.getKey(), partitionInfo.getValue()));
        }
        ddl.append(" ) \n");
    }

    if (optionalClusterInfo.isPresent()) {
        if (!optionalNumOfBuckets.isPresent()) {
            throw new IllegalArgumentException((String.format(
                    "CLUSTERED BY requested, but no NUM_BUCKETS specified for table %s.%s", dbName, tblName)));
        }
        ddl.append("CLUSTERED BY ( ");
        boolean isFirst = true;
        for (String clusterByCol : optionalClusterInfo.get()) {
            if (!hiveColumns.containsKey(clusterByCol)) {
                throw new IllegalArgumentException(String.format(
                        "Requested CLUSTERED BY column: %s " + "is not present in schema for table %s.%s",
                        clusterByCol, dbName, tblName));
            }
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s`", clusterByCol));
        }
        ddl.append(" ) ");

        if (optionalSortOrderInfo.isPresent() && optionalSortOrderInfo.get().size() > 0) {
            Map<String, COLUMN_SORT_ORDER> sortOrderInfoMap = optionalSortOrderInfo.get();
            ddl.append("SORTED BY ( ");
            isFirst = true;
            for (Map.Entry<String, COLUMN_SORT_ORDER> sortOrderInfo : sortOrderInfoMap.entrySet()) {
                if (!hiveColumns.containsKey(sortOrderInfo.getKey())) {
                    throw new IllegalArgumentException(String.format(
                            "Requested SORTED BY column: %s " + "is not present in schema for table %s.%s",
                            sortOrderInfo.getKey(), dbName, tblName));
                }
                if (isFirst) {
                    isFirst = false;
                } else {
                    ddl.append(", ");
                }
                ddl.append(String.format("`%s` %s", sortOrderInfo.getKey(), sortOrderInfo.getValue()));
            }
            ddl.append(" ) ");
        }
        ddl.append(String.format(" INTO %s BUCKETS %n", optionalNumOfBuckets.get()));
    } else {
        if (optionalSortOrderInfo.isPresent()) {
            throw new IllegalArgumentException(String.format(
                    "SORTED BY requested, but no CLUSTERED BY specified for table %s.%s", dbName, tblName));
        }
    }

    // Field Terminal
    ddl.append("ROW FORMAT SERDE \n");
    ddl.append(String.format("  '%s' %n", rowFormatSerde));

    // Stored as ORC
    ddl.append("STORED AS INPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", inputFormat));
    ddl.append("OUTPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", outputFormat));

    // Location
    ddl.append("LOCATION \n");
    ddl.append(String.format("  '%s' %n", tblLocation));

    // Table properties
    if (null != tableProperties && tableProperties.size() > 0) {
        ddl.append("TBLPROPERTIES ( \n");
        boolean isFirst = true;
        for (String property : tableProperties.stringPropertyNames()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", \n");
            }
            ddl.append(String.format("  '%s'='%s'", property, tableProperties.getProperty(property)));
        }
        ddl.append(") \n");
    }

    return ddl.toString();
}

From source file:tv.phantombot.PhantomBot.java

public static void main(String[] args) throws IOException {
    // Move user files.
    moveUserConfig();//w  w w  . j  a  v a 2s . c o  m

    /* List of properties that must exist. */
    String requiredProperties[] = new String[] { "oauth", "channel", "owner", "user" };
    String requiredPropertiesErrorMessage = "";

    if (Float.valueOf(System.getProperty("java.specification.version")) < (float) 1.8
            || Float.valueOf(System.getProperty("java.specification.version")) >= (float) 1.9) {
        System.out.println("Detected Java " + System.getProperty("java.version") + ". "
                + "PhantomBot requires Java 8. Java 9 and above will NOT work.");
        System.exit(1);
    }

    /* Properties configuration */
    Properties startProperties = new Properties();

    /* Indicates that the botlogin.txt file should be overwritten/created. */
    Boolean changed = false;

    /* Print the user dir */
    com.gmt2001.Console.out.println("The working directory is: " + System.getProperty("user.dir"));

    com.gmt2001.Console.out.println("Detected Java " + System.getProperty("java.version") + " running on "
            + System.getProperty("os.name") + " " + System.getProperty("os.version") + " ("
            + System.getProperty("os.arch") + ")");

    /* If prompted, now that the version has been reported, exit. */
    if (args.length > 0) {
        if (args[0].equals("--version") || args[0].equals("-v")) {
            com.gmt2001.Console.out.println("PhantomBot Version: " + RepoVersion.getPhantomBotVersion() + " ("
                    + RepoVersion.getRepoVersion() + ")");
            System.exit(1);
        }
    }

    /* Load up the bot info from the bot login file */
    try {
        if (new File("./config/botlogin.txt").exists()) {
            FileInputStream inputStream = new FileInputStream("./config/botlogin.txt");
            startProperties.load(inputStream);
            inputStream.close();
        } else {
            /* Fill in the Properties object with some default values. Note that some values are left
             * unset to be caught in the upcoming logic to enforce settings.
             */
            startProperties.setProperty("baseport", "25000");
            startProperties.setProperty("usehttps", "false");
            startProperties.setProperty("webenable", "true");
            startProperties.setProperty("msglimit30", "19.0");
            startProperties.setProperty("musicenable", "true");
            startProperties.setProperty("whisperlimit60", "60.0");
        }
    } catch (IOException ex) {
        com.gmt2001.Console.err.printStackTrace(ex);
    } catch (Exception ex) {
        com.gmt2001.Console.err.printStackTrace(ex);
    }
    /* Load up the bot info from the environment */
    for (Entry<String, String> v : System.getenv().entrySet()) {
        String Prefix = "PHANTOMBOT_";
        String Key = v.getKey().toUpperCase();
        String Value = v.getValue();
        if (Key.startsWith(Prefix) && Prefix.length() < Key.length()) {
            Key = Key.substring(Prefix.length()).toLowerCase();
            startProperties.setProperty(Key, Value);
        }
    }
    /* Check to enable debug mode */
    if (startProperties.getProperty("debugon", "false").equals("true")) {
        com.gmt2001.Console.out.println("Debug Mode Enabled");
        PhantomBot.enableDebugging = true;
    }
    /* Check to enable debug to File */
    if (startProperties.getProperty("debuglog", "false").equals("true")) {
        com.gmt2001.Console.out.println("Debug Log Only Mode Enabled");
        PhantomBot.enableDebugging = true;
        PhantomBot.enableDebuggingLogOnly = true;
    }
    /* Check to enable Script Reloading */
    if (startProperties.getProperty("reloadscripts", "false").equals("true")) {
        com.gmt2001.Console.out.println("Enabling Script Reloading");
        PhantomBot.reloadScripts = true;
    }
    /* Check to enable Rhino Debugger */
    if (startProperties.getProperty("rhinodebugger", "false").equals("true")) {
        com.gmt2001.Console.out.println("Rhino Debugger will be launched if system supports it.");
        PhantomBot.enableRhinoDebugger = true;
    }
    /* Check to see if there's a webOauth set */
    if (startProperties.getProperty("webauth") == null) {
        startProperties.setProperty("webauth", generateWebAuth());
        com.gmt2001.Console.debug.println("New webauth key has been generated for ./config/botlogin.txt");
        changed = true;
    }
    /* Check to see if there's a webOAuthRO set */
    if (startProperties.getProperty("webauthro") == null) {
        startProperties.setProperty("webauthro", generateWebAuth());
        com.gmt2001.Console.debug
                .println("New webauth read-only key has been generated for ./config/botlogin.txt");
        changed = true;
    }
    /* Check to see if there's a panelUsername set */
    if (startProperties.getProperty("paneluser") == null) {
        com.gmt2001.Console.debug.println(
                "No Panel Username, using default value of 'panel' for Control Panel and YouTube Player");
        startProperties.setProperty("paneluser", "panel");
        changed = true;
    }
    /* Check to see if there's a panelPassword set */
    if (startProperties.getProperty("panelpassword") == null) {
        com.gmt2001.Console.debug.println(
                "No Panel Password, using default value of 'panel' for Control Panel and YouTube Player");
        startProperties.setProperty("panelpassword", "panel");
        changed = true;
    }
    /* Check to see if there's a youtubeOAuth set */
    if (startProperties.getProperty("ytauth") == null) {
        startProperties.setProperty("ytauth", generateWebAuth());
        com.gmt2001.Console.debug
                .println("New YouTube websocket key has been generated for ./config/botlogin.txt");
        changed = true;
    }
    /* Check to see if there's a youtubeOAuthThro set */
    if (startProperties.getProperty("ytauthro") == null) {
        startProperties.setProperty("ytauthro", generateWebAuth());
        com.gmt2001.Console.debug
                .println("New YouTube read-only websocket key has been generated for ./config/botlogin.txt");
        changed = true;
    }

    /* Make a new botlogin with the botName, oauth or channel is not found */
    if (startProperties.getProperty("user") == null || startProperties.getProperty("oauth") == null
            || startProperties.getProperty("channel") == null) {
        try {

            com.gmt2001.Console.out.print("\r\n");
            com.gmt2001.Console.out.print("Welcome to the PhantomBot setup process!\r\n");
            com.gmt2001.Console.out.print(
                    "If you have any issues please report them on our forum, Tweet at us, or join our Discord!\r\n");
            com.gmt2001.Console.out.print("Forum: https://community.phantombot.tv/\r\n");
            com.gmt2001.Console.out.print("Documentation: https://docs.phantombot.tv/\r\n");
            com.gmt2001.Console.out.print("Twitter: https://twitter.com/PhantomBot/\r\n");
            com.gmt2001.Console.out.print("Discord: https://discord.gg/rkPqDuK/\r\n");
            com.gmt2001.Console.out.print("Support PhantomBot on Patreon: https://phantombot.tv/support/\r\n");
            com.gmt2001.Console.out.print("\r\n");

            final String os = System.getProperty("os.name").toLowerCase();

            // Detect Windows, MacOS, Linux or any other operating system.
            if (os.startsWith("win")) {
                com.gmt2001.Console.out
                        .print("PhantomBot has detected that your device is running Windows.\r\n");
                com.gmt2001.Console.out.print(
                        "Here's the setup guide for Windows: https://community.phantombot.tv/t/windows-setup-guide/");
            } else if (os.startsWith("mac")) {
                com.gmt2001.Console.out.print("PhantomBot has detected that your device is running macOS.\r\n");
                com.gmt2001.Console.out.print(
                        "Here's the setup guide for macOS: https://community.phantombot.tv/t/macos-setup-guide/");
            } else {
                com.gmt2001.Console.out.print("PhantomBot has detected that your device is running Linux.\r\n");
                com.gmt2001.Console.out.print(
                        "Here's the setup guide for Ubuntu: https://community.phantombot.tv/t/ubuntu-16-04-lts-setup-guide/\r\n");
                com.gmt2001.Console.out.print(
                        "Here's the setup guide for CentOS: https://community.phantombot.tv/t/centos-7-setup-guide/");
            }

            com.gmt2001.Console.out.print("\r\n\r\n\r\n");

            // Bot name.
            do {
                com.gmt2001.Console.out.print("1. Please enter the bot's Twitch username: ");

                startProperties.setProperty("user", System.console().readLine().trim().toLowerCase());
            } while (startProperties.getProperty("user", "").length() <= 0);

            // Twitch oauth.
            do {
                com.gmt2001.Console.out.print("\r\n");
                com.gmt2001.Console.out
                        .print("2. You will now need a OAuth token for the bot to be able to chat.\r\n");
                com.gmt2001.Console.out.print(
                        "Please note, this OAuth token needs to be generated while you're logged in into the bot's Twitch account.\r\n");
                com.gmt2001.Console.out.print(
                        "If you're not logged in as the bot, please go to https://twitch.tv/ and login as the bot.\r\n");
                com.gmt2001.Console.out
                        .print("Get the bot's OAuth token here: https://twitchapps.com/tmi/\r\n");
                com.gmt2001.Console.out.print("Please enter the bot's OAuth token: ");

                startProperties.setProperty("oauth", System.console().readLine().trim());
            } while (startProperties.getProperty("oauth", "").length() <= 0);

            // api oauth.
            do {
                com.gmt2001.Console.out.print("\r\n");
                com.gmt2001.Console.out.print(
                        "3. You will now need your channel OAuth token for the bot to be able to change your title and game.\r\n");
                com.gmt2001.Console.out.print(
                        "Please note, this OAuth token needs to be generated while you're logged in into your caster account.\r\n");
                com.gmt2001.Console.out.print(
                        "If you're not logged in as the caster, please go to https://twitch.tv/ and login as the caster.\r\n");
                com.gmt2001.Console.out
                        .print("Get the your OAuth token here: https://phantombot.tv/oauth/\r\n");
                com.gmt2001.Console.out.print("Please enter your OAuth token: ");

                startProperties.setProperty("apioauth", System.console().readLine().trim());
            } while (startProperties.getProperty("apioauth", "").length() <= 0);

            // Channel name.
            do {
                com.gmt2001.Console.out.print("\r\n");
                com.gmt2001.Console.out
                        .print("4. Please enter the name of the Twitch channel the bot should join: ");

                startProperties.setProperty("channel", System.console().readLine().trim());
            } while (startProperties.getProperty("channel", "").length() <= 0);

            // Panel username.
            do {
                com.gmt2001.Console.out.print("\r\n");
                com.gmt2001.Console.out.print("5. Please enter a custom username for the web panel: ");

                startProperties.setProperty("paneluser", System.console().readLine().trim());
            } while (startProperties.getProperty("paneluser", "").length() <= 0);

            // Panel password.
            do {
                com.gmt2001.Console.out.print("\r\n");
                com.gmt2001.Console.out.print("6. Please enter a custom password for the web panel: ");

                startProperties.setProperty("panelpassword", System.console().readLine().trim());
            } while (startProperties.getProperty("panelpassword", "").length() <= 0);

            com.gmt2001.Console.out.print("\r\n");
            com.gmt2001.Console.out.print("PhantomBot will launch in 10 seconds.\r\n");
            com.gmt2001.Console.out.print(
                    "If you're hosting the bot locally you can access the control panel here: http://localhost:25000/panel \r\n");
            com.gmt2001.Console.out.print(
                    "If you're running the bot on a server, make sure to open the following ports: \r\n");
            com.gmt2001.Console.out.print(
                    "25000, 25003, and 25004. You have to change 'localhost' to your server ip to access the panel. \r\n");

            try {
                Thread.sleep(10000);
            } catch (InterruptedException ex) {
                com.gmt2001.Console.debug.println("Failed to sleep in setup: " + ex.getMessage());
            }

            changed = true;
            newSetup = true;
        } catch (NullPointerException ex) {
            com.gmt2001.Console.err.printStackTrace(ex);
            com.gmt2001.Console.out.println("[ERROR] Failed to setup PhantomBot. Now exiting...");
            System.exit(0);
        }
    }

    /* Make sure the oauth has been set correctly */
    if (startProperties.getProperty("oauth") != null) {
        if (!startProperties.getProperty("oauth").startsWith("oauth")
                && !startProperties.getProperty("oauth").isEmpty()) {
            startProperties.setProperty("oauth", "oauth:" + startProperties.getProperty("oauth"));
            changed = true;
        }
    }

    /* Make sure the apiOAuth has been set correctly */
    if (startProperties.getProperty("apioauth") != null) {
        if (!startProperties.getProperty("apioauth").startsWith("oauth")
                && !startProperties.getProperty("apioauth").isEmpty()) {
            startProperties.setProperty("apioauth", "oauth:" + startProperties.getProperty("apioauth"));
            changed = true;
        }
    }

    /* Make sure the channelName does not have a # */
    if (startProperties.getProperty("channel").startsWith("#")) {
        startProperties.setProperty("channel", startProperties.getProperty("channel").substring(1));
        changed = true;
    } else if (startProperties.getProperty("channel").contains(".tv")) {
        startProperties.setProperty("channel", startProperties.getProperty("channel")
                .substring(startProperties.getProperty("channel").indexOf(".tv/") + 4).replaceAll("/", ""));
        changed = true;
    }

    /* Check for the owner after the channel check is done. */
    if (startProperties.getProperty("owner") == null) {
        if (startProperties.getProperty("channel") != null) {
            if (!startProperties.getProperty("channel").isEmpty()) {
                startProperties.setProperty("owner", startProperties.getProperty("channel"));
                changed = true;
            }
        }
    }

    /* Iterate the properties and delete entries for anything that does not have a
     * value.
     */
    for (String propertyKey : startProperties.stringPropertyNames()) {
        if (startProperties.getProperty(propertyKey).isEmpty()) {
            changed = true;
            startProperties.remove(propertyKey);
        }
    }

    /*
     * Check for required settings.
     */
    for (String requiredProperty : requiredProperties) {
        if (startProperties.getProperty(requiredProperty) == null) {
            requiredPropertiesErrorMessage += requiredProperty + " ";
        }
    }

    if (!requiredPropertiesErrorMessage.isEmpty()) {
        com.gmt2001.Console.err.println();
        com.gmt2001.Console.err.println("Missing Required Properties: " + requiredPropertiesErrorMessage);
        com.gmt2001.Console.err.println("Exiting PhantomBot");
        System.exit(0);
    }

    /* Check to see if anything changed */
    if (changed) {
        Properties outputProperties = new Properties() {
            @Override
            public synchronized Enumeration<Object> keys() {
                return Collections.enumeration(new TreeSet<>(super.keySet()));
            }
        };

        try {
            try (FileOutputStream outputStream = new FileOutputStream("./config/botlogin.txt")) {
                outputProperties.putAll(startProperties);
                outputProperties.store(outputStream, "PhantomBot Configuration File");
            }
        } catch (IOException ex) {
            com.gmt2001.Console.err.printStackTrace(ex);
        }
    }

    /* Start PhantomBot */
    PhantomBot.instance = new PhantomBot(startProperties);
}