List of usage examples for java.net InetAddress getHostName
public String getHostName()
From source file:org.apache.hadoop.mapred.TestEmptyJob.java
/** * Simple method running a MapReduce job with no input data. Used to test that * such a job is successful./*from www. j a v a2s .c o m*/ * * @param fileSys * @param numMaps * @param numReduces * @return true if the MR job is successful, otherwise false * @throws IOException */ private boolean launchEmptyJob(URI fileSys, int numMaps, int numReduces) throws IOException { // create an empty input dir final Path inDir = new Path(TEST_ROOT_DIR, "testing/empty/input"); final Path outDir = new Path(TEST_ROOT_DIR, "testing/empty/output"); final Path inDir2 = new Path(TEST_ROOT_DIR, "testing/dummy/input"); final Path outDir2 = new Path(TEST_ROOT_DIR, "testing/dummy/output"); final Path share = new Path(TEST_ROOT_DIR, "share"); JobConf conf = mr.createJobConf(); FileSystem fs = FileSystem.get(fileSys, conf); fs.delete(new Path(TEST_ROOT_DIR), true); fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { LOG.warn("Can't create " + inDir); return false; } // use WordCount example FileSystem.setDefaultUri(conf, fileSys); conf.setJobName("empty"); // use an InputFormat which returns no split conf.setInputFormat(EmptyInputFormat.class); conf.setOutputCommitter(CommitterWithDelayCleanup.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); conf.setMapperClass(IdentityMapper.class); conf.setReducerClass(IdentityReducer.class); FileInputFormat.setInputPaths(conf, inDir); FileOutputFormat.setOutputPath(conf, outDir); conf.setNumMapTasks(numMaps); conf.setNumReduceTasks(numReduces); conf.set("share", share.toString()); // run job and wait for completion JobClient jc = new JobClient(conf); RunningJob runningJob = jc.submitJob(conf); JobInProgress job = mr.getJobTrackerRunner().getJobTracker().getJob(runningJob.getID()); InetAddress ip = InetAddress.getLocalHost(); if (ip != null) { assertTrue(job.getJobSubmitHostAddress().equalsIgnoreCase(ip.getHostAddress())); assertTrue(job.getJobSubmitHostName().equalsIgnoreCase(ip.getHostName())); } while (true) { if (job.isCleanupLaunched()) { LOG.info("Waiting for cleanup to be launched for job " + runningJob.getID()); break; } UtilsForTests.waitFor(100); } // submit another job so that the map load increases and scheduling happens LOG.info("Launching dummy job "); RunningJob dJob = null; try { JobConf dConf = new JobConf(conf); dConf.setOutputCommitter(FileOutputCommitter.class); dJob = UtilsForTests.runJob(dConf, inDir2, outDir2, 2, 0); } catch (Exception e) { LOG.info("Exception ", e); throw new IOException(e); } while (true) { LOG.info("Waiting for job " + dJob.getID() + " to complete"); try { Thread.sleep(100); } catch (InterruptedException e) { } if (dJob.isComplete()) { break; } } // check if the second job is successful assertTrue(dJob.isSuccessful()); // signal the cleanup fs.create(share).close(); while (true) { LOG.info("Waiting for job " + runningJob.getID() + " to complete"); try { Thread.sleep(100); } catch (InterruptedException e) { } if (runningJob.isComplete()) { break; } } assertTrue(runningJob.isComplete()); assertTrue(runningJob.isSuccessful()); JobID jobID = runningJob.getID(); TaskReport[] jobSetupTasks = jc.getSetupTaskReports(jobID); assertTrue("Number of job-setup tips is not 2!", jobSetupTasks.length == 2); assertTrue("Setup progress is " + runningJob.setupProgress() + " and not 1.0", runningJob.setupProgress() == 1.0); assertTrue("Setup task is not finished!", mr.getJobTrackerRunner().getJobTracker().getJob(jobID).isSetupFinished()); assertTrue("Number of maps is not zero!", jc.getMapTaskReports(runningJob.getID()).length == 0); assertTrue("Map progress is " + runningJob.mapProgress() + " and not 1.0!", runningJob.mapProgress() == 1.0); assertTrue("Reduce progress is " + runningJob.reduceProgress() + " and not 1.0!", runningJob.reduceProgress() == 1.0); assertTrue("Number of reduces is not " + numReduces, jc.getReduceTaskReports(runningJob.getID()).length == numReduces); TaskReport[] jobCleanupTasks = jc.getCleanupTaskReports(jobID); assertTrue("Number of job-cleanup tips is not 2!", jobCleanupTasks.length == 2); assertTrue("Cleanup progress is " + runningJob.cleanupProgress() + " and not 1.0", runningJob.cleanupProgress() == 1.0); assertTrue("Job output directory doesn't exit!", fs.exists(outDir)); FileStatus[] list = fs.listStatus(outDir, new Utils.OutputFileUtils.OutputFilesFilter()); assertTrue("Number of part-files is " + list.length + " and not " + numReduces, list.length == numReduces); // cleanup fs.delete(outDir, true); // return job result LOG.info("job is complete: " + runningJob.isSuccessful()); return (runningJob.isSuccessful()); }
From source file:org.elasticsearch.river.kafka.KafkaRiver.java
@SuppressWarnings({ "unchecked" }) @Inject/*from w ww .jav a 2 s. c o m*/ public KafkaRiver(RiverName riverName, RiverSettings settings, Client client, ScriptService scriptService) { super(riverName, settings); this.client = client; if (settings.settings().containsKey("kafka")) { Map<String, Object> kafkaSettings = (Map<String, Object>) settings.settings().get("kafka"); zkAddress = XContentMapValues.nodeStringValue(kafkaSettings.get("zkaddress"), "localhost:2181"); kafkaTopic = XContentMapValues.nodeStringValue(kafkaSettings.get("topic"), "info"); kafkaGroupId = XContentMapValues.nodeStringValue(kafkaSettings.get("groupid"), "default"); zkSessionTimeout = XContentMapValues.nodeIntegerValue(kafkaSettings.get("zk_session_timeout"), 400); zkSyncTime = XContentMapValues.nodeIntegerValue(kafkaSettings.get("zk_sync_time"), 200); zkAutocommitInterval = XContentMapValues.nodeIntegerValue(kafkaSettings.get("zk_autocommit_interval"), 1000); fetchSize = XContentMapValues.nodeLongValue(kafkaSettings.get("fetch_size"), 307200); kafkaMsgPack = XContentMapValues.nodeBooleanValue(kafkaSettings.get("msgpack"), false); } else { logger.warn("using localhost zookeeper"); zkAddress = new String("localhost:2181"); kafkaTopic = "helloworld"; kafkaGroupId = "helloworld"; zkSessionTimeout = 5000; zkSyncTime = 200; zkAutocommitInterval = 1000; fetchSize = 307200; kafkaMsgPack = false; } if (settings.settings().containsKey("index")) { Map<String, Object> indexSettings = (Map<String, Object>) settings.settings().get("index"); indexName = XContentMapValues.nodeStringValue(indexSettings.get("name"), kafkaTopic); bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get("bulk_size"), 100); if (indexSettings.containsKey("bulk_timeout")) { bulkTimeout = TimeValue.parseTimeValue( XContentMapValues.nodeStringValue(indexSettings.get("bulk_timeout"), "1000ms"), TimeValue.timeValueMillis(1000)); } else { bulkTimeout = TimeValue.timeValueMillis(1000); } ordered = XContentMapValues.nodeBooleanValue(indexSettings.get("ordered"), false); if (indexSettings.containsKey("ttl")) { ttl = TimeValue.parseTimeValue(XContentMapValues.nodeStringValue(indexSettings.get("ttl"), "0ms"), TimeValue.timeValueMillis(0)); } else { ttl = TimeValue.timeValueMillis(0); } } else { indexName = kafkaTopic; bulkSize = 100; bulkTimeout = TimeValue.timeValueMillis(1000); ordered = false; ttl = TimeValue.timeValueMillis(0); } if (settings.settings().containsKey("custom")) { Map<String, Object> customSettings = (Map<String, Object>) settings.settings().get("custom"); String[] statsdString = XContentMapValues.nodeStringValue(customSettings.get("statsd"), null) .split(":"); if (statsdString.length > 1) { statsdServer = statsdString[0]; statsdPort = Integer.parseInt(statsdString[1]); } else { statsdServer = null; statsdPort = 0; } typeField = XContentMapValues.nodeStringValue(customSettings.get("type_field"), null); uidField = XContentMapValues.nodeStringValue(customSettings.get("uid_field"), null); dailyIndex = XContentMapValues.nodeBooleanValue(customSettings.get("daily_index"), false); if (dailyIndex) { dailyIndexField = XContentMapValues.nodeStringValue(customSettings.get("daily_index_field"), null); } else { dailyIndexField = null; } ttlField = XContentMapValues.nodeStringValue(customSettings.get("ttl_field"), null); } else { typeField = null; uidField = null; ttlField = null; statsdServer = null; statsdPort = 0; dailyIndex = false; dailyIndexField = null; } String hostname; try { InetAddress addr = InetAddress.getLocalHost(); hostname = addr.getHostName().replace('.', '-'); logger.info("Hostname: " + hostname); } catch (java.net.UnknownHostException e) { logger.info("Exception: " + e); hostname = new String("unknown-host"); } statsdPrefix = new String("system.elastic-river." + hostname + ".river." + kafkaTopic); }
From source file:hudson.plugins.dimensionsscm.ArtifactUploader.java
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { long key = -1; Logger.Debug("Invoking perform callout " + this.getClass().getName()); FilePath workspace = build.getWorkspace(); boolean bRet = false; boolean isStream = false; try {//from ww w.ja v a2s. c o m if (!(build.getProject().getScm() instanceof DimensionsSCM)) { listener.fatalError("[DIMENSIONS] This plugin only works with the Dimensions SCM engine."); build.setResult(Result.FAILURE); throw new IOException("[DIMENSIONS] This plugin only works with a Dimensions SCM engine"); } if (build.getResult() == Result.SUCCESS) { DimensionsSCM scm = (DimensionsSCM) build.getProject().getScm(); DimensionsAPI dmSCM = new DimensionsAPI(); Logger.Debug("Calculating version of Dimensions..."); int version = 2009; key = dmSCM.login(scm.getJobUserName(), scm.getJobPasswd(), scm.getJobDatabase(), scm.getJobServer()); if (key > 0) { // Get the server version Logger.Debug("Login worked."); version = dmSCM.getDmVersion(); if (version == 0) { version = 2009; } if (version != 10) { isStream = dmSCM.isStream(key, scm.getProject()); } dmSCM.logout(key); } // Get the details of the master InetAddress netAddr = InetAddress.getLocalHost(); byte[] ipAddr = netAddr.getAddress(); String hostname = netAddr.getHostName(); String projectName = build.getProject().getName(); int buildNo = build.getNumber(); Logger.Debug("Checking if master or slave..."); boolean master = false; GetHostDetailsTask buildHost = new GetHostDetailsTask(hostname); master = workspace.act(buildHost); if (master) { // Running on master... listener.getLogger().println("[DIMENSIONS] Running checkin on master..."); listener.getLogger().flush(); // Using Java API because this allows the plugin to work on platforms // where Dimensions has not been ported, e.g. MAC OS, which is what // I use CheckInAPITask task = new CheckInAPITask(build, scm, buildNo, projectName, version, this, workspace, listener); bRet = workspace.act(task); } else { // Running on slave... Have to use the command line as Java API will not // work on remote hosts. Cannot serialise it... { // VariableResolver does not appear to be serialisable either, so... VariableResolver<String> myResolver = build.getBuildVariableResolver(); String requests = myResolver.resolve("DM_TARGET_REQUEST"); listener.getLogger().println("[DIMENSIONS] Running checkin on slave..."); listener.getLogger().flush(); CheckInCmdTask task = new CheckInCmdTask(scm.getJobUserName(), scm.getJobPasswd(), scm.getJobDatabase(), scm.getJobServer(), scm.getProject(), requests, isForceCheckIn(), isForceTip(), getPatterns(), version, isStream, buildNo, projectName, getOwningPart(), workspace, listener); bRet = workspace.act(task); } } } else { bRet = true; } if (!bRet) { build.setResult(Result.FAILURE); } } catch (Exception e) { listener.fatalError("Unable to load build artifacts into Dimensions - " + e.getMessage()); build.setResult(Result.FAILURE); return false; } finally { } return bRet; }
From source file:edu.vt.middleware.gator.log4j.SocketServer.java
/** * Gets first project to which the host possessing the given IP address is a * member./*from w ww . j ava 2 s . co m*/ * * @param addr IP address. * * @return First project to which the client at the given IP address is a * member or null if client does not belong to project. */ public ProjectConfig getProject(final InetAddress addr) { ProjectConfig project = null; List<ProjectConfig> projects = configManager.findProjectsByClientName(addr.getHostName()); if (projects.size() > 0) { project = projects.get(0); } else { projects = configManager.findProjectsByClientName(addr.getHostAddress()); if (projects.size() > 0) { project = projects.get(0); } } return project; }
From source file:com.sshtools.daemon.SshServer.java
/** * * * @param socket/*from w w w .j a va 2 s . co m*/ * * @return * * @throws IOException */ protected TransportProtocolServer createSession(Socket socket) throws IOException { log.debug("Initializing connection"); InetAddress address = socket.getInetAddress(); /*( (InetSocketAddress) socket .getRemoteSocketAddress()).getAddress();*/ log.debug("Remote Hostname: " + address.getHostName()); log.debug("Remote IP: " + address.getHostAddress()); TransportProtocolServer transport = new TransportProtocolServer(); // Create the Authentication Protocol AuthenticationProtocolServer authentication = new AuthenticationProtocolServer(); // Create the Connection Protocol ConnectionProtocol connection = new ConnectionProtocol(); // Configure the connections services configureServices(connection); // Allow the Connection Protocol to be accepted by the Authentication Protocol authentication.acceptService(connection); // Allow the Authentication Protocol to be accepted by the Transport Protocol transport.acceptService(authentication); transport.startTransportProtocol(new ConnectedSocketTransportProvider(socket), new SshConnectionProperties()); return transport; }
From source file:org.jboss.dashboard.profiler.Profiler.java
protected String getSubjectPrefix() { try {//from w ww . jav a2s . co m InetAddress localhost = InetAddress.getLocalHost(); return "[" + localhost.getHostName() + "] "; } catch (UnknownHostException e) { return ""; } }
From source file:de.sjka.logstash.osgi.internal.LogstashSender.java
@SuppressWarnings("unchecked") private void addIps(JSONObject values) { List<String> ip4s = new ArrayList<>(); List<String> ip6s = new ArrayList<>(); String ip = "unknown"; try {//from w w w . jav a2s. c om Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces(); while (networkInterfaces.hasMoreElements()) { NetworkInterface networkInterface = networkInterfaces.nextElement(); Enumeration<InetAddress> inetAddresses = networkInterface.getInetAddresses(); while (inetAddresses.hasMoreElements()) { InetAddress address = inetAddresses.nextElement(); if (address instanceof Inet4Address) { ip4s.add(address.getHostAddress() + "_" + address.getHostName()); if (!address.isLinkLocalAddress() && !address.isAnyLocalAddress() && !address.isLoopbackAddress()) { ip = address.getHostAddress(); } } if (address instanceof Inet6Address) { ip6s.add(address.getHostAddress() + "_" + address.getHostName()); } } } ip4s.add("LOC_" + InetAddress.getLocalHost().getHostAddress() + "_" + InetAddress.getLocalHost().getHostName()); if (!ip4s.isEmpty()) { values.put("ip", ip); values.put("ip4s", ip4s); } if (!ip6s.isEmpty()) { values.put("ip6s", ip6s); } } catch (UnknownHostException | SocketException e) { values.put("ip", "offline_" + e.getMessage()); } }
From source file:org.rdv.rbnb.MetadataManager.java
/** * Create the class using the RBNBController for connection information. * /*from w w w .j a v a2 s.com*/ * @param rbnbController the RBNBController to use */ public MetadataManager(RBNBController rbnbController) { this.rbnbController = rbnbController; try { InetAddress addr = InetAddress.getLocalHost(); String hostname = addr.getHostName(); rbnbSinkName += "@" + hostname; } catch (UnknownHostException e) { } metadataListeners = new ArrayList<MetadataListener>(); channels = new HashMap<String, Channel>(); ctree = null; update = false; sleeping = false; updateThread = null; markerListeners = new ArrayList<DataListener>(); }
From source file:com.ms.commons.log.MsSyslogAppender.java
/** * Get the host name used to identify this appender. * /*from ww w .j a v a 2s.c o m*/ * @return local host name * @since 1.2.15 */ private String getLocalHostname() { if (localHostname == null) { try { InetAddress addr = InetAddress.getLocalHost(); localHostname = addr.getHostName(); } catch (UnknownHostException uhe) { localHostname = "UNKNOWN_HOST"; } } return localHostname; }
From source file:org.eredlab.g4.ccl.net.smtp.SMTPClient.java
/*** * Login to the SMTP server by sending the HELO command with the * client hostname as an argument. Before performing any mail commands, * you must first login./*w ww. j a va 2 s .co m*/ * <p> * @return True if successfully completed, false if not. * @exception SMTPConnectionClosedException * If the SMTP server prematurely closes the connection as a result * of the client being idle or some other reason causing the server * to send SMTP reply code 421. This exception may be caught either * as an IOException or independently as itself. * @exception IOException If an I/O error occurs while either sending a * command to the server or receiving a reply from the server. ***/ public boolean login() throws IOException { String name; InetAddress host; host = getLocalAddress(); name = host.getHostName(); if (name == null) return false; return SMTPReply.isPositiveCompletion(helo(name)); }