List of usage examples for java.util Properties entrySet
@Override
public Set<Map.Entry<Object, Object>> entrySet()
From source file:com.cloudera.beeswax.BeeswaxServiceImpl.java
@Override public List<ConfigVariable> get_default_configuration(boolean includeHadoop) throws TException { HiveConf conf = new HiveConf(BeeswaxServiceImpl.class); Properties p; if (includeHadoop) { p = conf.getAllProperties();/*from ww w . jav a2 s . com*/ } else { p = conf.getChangedProperties(); } List<ConfigVariable> ret = new ArrayList<ConfigVariable>(); for (Entry<Object, Object> e : p.entrySet()) { String key = (String) e.getKey(); String value = (String) e.getValue(); ConfigVariable cv = new ConfigVariable(); cv.setKey(key); cv.setValue(value); cv.setDescription(configDescriptions.lookup(key)); ret.add(cv); } return ret; }
From source file:org.apache.nifi.registry.bootstrap.RunNiFiRegistry.java
public void env() { final Logger logger = cmdLogger; final Status status = getStatus(logger); if (status.getPid() == null) { logger.info("Apache NiFi Registry is not running"); return;//from www. j a va 2 s. c om } final Class<?> virtualMachineClass; try { virtualMachineClass = Class.forName("com.sun.tools.attach.VirtualMachine"); } catch (final ClassNotFoundException cnfe) { logger.error( "Seems tools.jar (Linux / Windows JDK) or classes.jar (Mac OS) is not available in classpath"); return; } final Method attachMethod; final Method detachMethod; try { attachMethod = virtualMachineClass.getMethod("attach", String.class); detachMethod = virtualMachineClass.getDeclaredMethod("detach"); } catch (final Exception e) { logger.error("Methods required for getting environment not available", e); return; } final Object virtualMachine; try { virtualMachine = attachMethod.invoke(null, status.getPid()); } catch (final Throwable t) { logger.error("Problem attaching to NiFi", t); return; } try { final Method getSystemPropertiesMethod = virtualMachine.getClass().getMethod("getSystemProperties"); final Properties sysProps = (Properties) getSystemPropertiesMethod.invoke(virtualMachine); for (Entry<Object, Object> syspropEntry : sysProps.entrySet()) { logger.info(syspropEntry.getKey().toString() + " = " + syspropEntry.getValue().toString()); } } catch (Throwable t) { throw new RuntimeException(t); } finally { try { detachMethod.invoke(virtualMachine); } catch (final Exception e) { logger.warn("Caught exception detaching from process", e); } } }
From source file:gobblin.metrics.event.sla.SlaEventSubmitter.java
/** * Construct an {@link SlaEventSubmitter} by extracting Sla event metadata from the properties. See * {@link SlaEventKeys} for keys to set in properties * <p>// w w w. j a v a 2 s. com * The <code>props</code> MUST have required property {@link SlaEventKeys#DATASET_URN_KEY} set.<br> * All properties with prefix {@link SlaEventKeys#EVENT_ADDITIONAL_METADATA_PREFIX} will be automatically added as * event metadata * </p> * <p> * Use {@link SlaEventSubmitter#builder()} to build an {@link SlaEventSubmitter} directly with event metadata. * </p> * * @param submitter used to submit the event * @param name of the event * @param props reference that contains event metadata */ public SlaEventSubmitter(EventSubmitter submitter, String name, Properties props) { this.eventName = name; this.eventSubmitter = submitter; this.datasetUrn = props.getProperty(SlaEventKeys.DATASET_URN_KEY); if (props.containsKey(SlaEventKeys.DATASET_URN_KEY)) { this.datasetUrn = props.getProperty(SlaEventKeys.DATASET_URN_KEY); } else { this.datasetUrn = props.getProperty(ConfigurationKeys.DATASET_URN_KEY); } this.partition = props.getProperty(SlaEventKeys.PARTITION_KEY); this.originTimestamp = props.getProperty(SlaEventKeys.ORIGIN_TS_IN_MILLI_SECS_KEY); this.upstreamTimestamp = props.getProperty(SlaEventKeys.UPSTREAM_TS_IN_MILLI_SECS_KEY); this.completenessPercentage = props.getProperty(SlaEventKeys.COMPLETENESS_PERCENTAGE_KEY); this.recordCount = props.getProperty(SlaEventKeys.RECORD_COUNT_KEY); this.previousPublishTimestamp = props.getProperty(SlaEventKeys.PREVIOUS_PUBLISH_TS_IN_MILLI_SECS_KEY); this.dedupeStatus = props.getProperty(SlaEventKeys.DEDUPE_STATUS_KEY); this.isFirstPublish = props.getProperty(SlaEventKeys.IS_FIRST_PUBLISH); this.additionalMetadata = Maps.newHashMap(); for (Entry<Object, Object> entry : props.entrySet()) { if (StringUtils.startsWith(entry.getKey().toString(), SlaEventKeys.EVENT_ADDITIONAL_METADATA_PREFIX)) { this.additionalMetadata.put(StringUtils.removeStart(entry.getKey().toString(), SlaEventKeys.EVENT_ADDITIONAL_METADATA_PREFIX), entry.getValue().toString()); } } }
From source file:org.apache.ivory.workflow.engine.OozieWorkflowEngine.java
@Override public void reRun(String cluster, String jobId, Properties props) throws IvoryException { OozieClient client = OozieClientFactory.get(cluster); try {/* www. j ava2 s. co m*/ WorkflowJob jobInfo = client.getJobInfo(jobId); Properties jobprops = OozieUtils.toProperties(jobInfo.getConf()); if (props == null || props.isEmpty()) jobprops.put(OozieClient.RERUN_FAIL_NODES, "false"); else for (Entry<Object, Object> entry : props.entrySet()) { jobprops.put(entry.getKey(), entry.getValue()); } jobprops.remove(OozieClient.COORDINATOR_APP_PATH); jobprops.remove(OozieClient.BUNDLE_APP_PATH); client.reRun(jobId, jobprops); assertStatus(cluster, jobId, WorkflowJob.Status.RUNNING); LOG.info("Rerun job " + jobId + " on cluster " + cluster); } catch (Exception e) { LOG.error("Unable to rerun workflows", e); throw new IvoryException(e); } }
From source file:com.idtmatter.insta4j.client.config.DefaultInstaClientConfig.java
private void init() { final Properties PROPERTIES = new Properties(); FileInputStream st = null;/*from w ww .j a v a2 s . co m*/ try { //TODO: Avoid using ResourceUtils. st = new FileInputStream(ResourceUtils.getFile("classpath:" + PROPERTY_JINSTAPAPER_PROPERTY_NAME)); PROPERTIES.load(st); } catch (IOException e) { // ignore } finally { if (st != null) { try { st.close(); } catch (IOException ex) { // ignore } } } for (final String name : new String[] { PROPERTY_CONSUMER_KEY, PROPERTY_CONSUMER_SECRET }) { final String value = System.getProperty(name); if (value != null) { PROPERTIES.setProperty(name, value); } } if (PROPERTIES.getProperty(PROPERTY_CONSUMER_KEY) == null || PROPERTIES.getProperty(PROPERTY_CONSUMER_SECRET) == null) { throw new IllegalArgumentException(String.format( "No consumerKey and/or consumerSecret found in %s file. " + "You have to provide these as system properties.", PROPERTY_JINSTAPAPER_PROPERTY_NAME)); } for (final Map.Entry<Object, Object> entry : PROPERTIES.entrySet()) { properties.put((String) entry.getKey(), entry.getValue()); } }
From source file:org.apache.hadoop.conf.Configuration.java
public void write(DataOutput out) throws IOException { Properties props = getProps(); WritableUtils.writeVInt(out, props.size()); for (Map.Entry<Object, Object> item : props.entrySet()) { org.apache.hadoop.io.Text.writeString(out, (String) item.getKey()); org.apache.hadoop.io.Text.writeString(out, (String) item.getValue()); }/*from w ww.j a va2 s.c om*/ }
From source file:org.apache.hadoop.hive.ql.exec.FileSinkOperator.java
private void logOutputFormatError(Configuration hconf, HiveException ex) { StringWriter errorWriter = new StringWriter(); errorWriter.append("Failed to create output format; configuration: "); try {//from w ww .j a va 2 s . c om Configuration.dumpConfiguration(hconf, errorWriter); } catch (IOException ex2) { errorWriter.append("{ failed to dump configuration: " + ex2.getMessage() + " }"); } Properties tdp = null; if (this.conf.getTableInfo() != null && (tdp = this.conf.getTableInfo().getProperties()) != null) { errorWriter.append(";\n table properties: { "); for (Map.Entry<Object, Object> e : tdp.entrySet()) { errorWriter.append(e.getKey() + ": " + e.getValue() + ", "); } errorWriter.append('}'); } LOG.error(errorWriter.toString(), ex); }
From source file:org.apache.nifi.bootstrap.RunNiFi.java
public void env() { final Logger logger = cmdLogger; final Status status = getStatus(logger); if (status.getPid() == null) { logger.info("Apache NiFi is not running"); return;//www . j av a 2s . c o m } final Class<?> virtualMachineClass; try { virtualMachineClass = Class.forName("com.sun.tools.attach.VirtualMachine"); } catch (final ClassNotFoundException cnfe) { logger.error( "Seems tools.jar (Linux / Windows JDK) or classes.jar (Mac OS) is not available in classpath"); return; } final Method attachMethod; final Method detachMethod; try { attachMethod = virtualMachineClass.getMethod("attach", String.class); detachMethod = virtualMachineClass.getDeclaredMethod("detach"); } catch (final Exception e) { logger.error("Methods required for getting environment not available", e); return; } final Object virtualMachine; try { virtualMachine = attachMethod.invoke(null, status.getPid()); } catch (final Throwable t) { logger.error("Problem attaching to NiFi", t); return; } try { final Method getSystemPropertiesMethod = virtualMachine.getClass().getMethod("getSystemProperties"); final Properties sysProps = (Properties) getSystemPropertiesMethod.invoke(virtualMachine); for (Entry<Object, Object> syspropEntry : sysProps.entrySet()) { logger.info(syspropEntry.getKey().toString() + " = " + syspropEntry.getValue().toString()); } } catch (Throwable t) { throw new RuntimeException(t); } finally { try { detachMethod.invoke(virtualMachine); } catch (final Exception e) { logger.warn("Caught exception detaching from process", e); } } }