List of usage examples for java.io BufferedReader ready
public boolean ready() throws IOException
From source file:com.netscape.cms.servlet.csadmin.ConfigurationUtils.java
public static void importLDIFS(String param, LDAPConnection conn, boolean suppressErrors) throws IOException, EPropertyNotFound, EBaseException { IConfigStore cs = CMS.getConfigStore(); logger.debug("importLDIFS: param=" + param); String v = cs.getString(param); String baseDN = cs.getString("internaldb.basedn"); String database = cs.getString("internaldb.database"); String instancePath = cs.getString("instanceRoot"); String instanceId = cs.getString("instanceId"); String cstype = cs.getString("cs.type"); String dbuser = cs.getString("preop.internaldb.dbuser", "uid=" + DBUSER + ",ou=people," + baseDN); String configDir = instancePath + File.separator + cstype.toLowerCase() + File.separator + "conf"; StringTokenizer tokenizer = new StringTokenizer(v, ","); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken().trim(); int index = token.lastIndexOf("/"); String name = token;// w w w.j a v a2 s. c o m if (index != -1) { name = token.substring(index + 1); } logger.debug("importLDIFS(): ldif file = " + token); String filename = configDir + File.separator + name; logger.debug("importLDIFS(): ldif file copy to " + filename); PrintStream ps = null; BufferedReader in = null; in = new BufferedReader(new InputStreamReader(new FileInputStream(token), "UTF-8")); ps = new PrintStream(filename, "UTF-8"); while (in.ready()) { String s = in.readLine(); int n = s.indexOf("{"); if (n == -1) { ps.println(s); } else { boolean endOfline = false; while (n != -1) { ps.print(s.substring(0, n)); int n1 = s.indexOf("}"); String tok = s.substring(n + 1, n1); if (tok.equals("instanceId")) { ps.print(instanceId); } else if (tok.equals("rootSuffix")) { ps.print(baseDN); } else if (tok.equals("database")) { ps.print(database); } else if (tok.equals("dbuser")) { ps.print(dbuser); } if ((s.length() + 1) == n1) { endOfline = true; break; } s = s.substring(n1 + 1); n = s.indexOf("{"); } if (!endOfline) { ps.println(s); } } } in.close(); ps.close(); ArrayList<String> errors = new ArrayList<String>(); LDAPUtil.importLDIF(conn, filename, errors); if (!errors.isEmpty()) { logger.error("importLDIFS(): LDAP Errors in importing " + filename); for (String error : errors) { logger.error(error); } if (!suppressErrors) { throw new EBaseException("LDAP Errors in importing " + filename); } } } }
From source file:org.cesecore.util.CertTools.java
/** * Reads certificates in PEM-format from an InputStream. * The stream may contain other things between the different certificates. * //from w w w.ja v a2s . c o m * @param certstream the input stream containing the certificates in PEM-format * @return Ordered List of Certificates, first certificate first, or empty List * @exception CertificateParsingException if the stream contains an incorrect certificate. */ public static List<Certificate> getCertsFromPEM(InputStream certstream) throws CertificateParsingException { if (log.isTraceEnabled()) { log.trace(">getCertfromPEM"); } ArrayList<Certificate> ret = new ArrayList<Certificate>(); String beginKeyTrust = "-----BEGIN TRUSTED CERTIFICATE-----"; String endKeyTrust = "-----END TRUSTED CERTIFICATE-----"; BufferedReader bufRdr = null; ByteArrayOutputStream ostr = null; PrintStream opstr = null; try { try { bufRdr = new BufferedReader(new InputStreamReader(certstream)); while (bufRdr.ready()) { ostr = new ByteArrayOutputStream(); opstr = new PrintStream(ostr); String temp; while ((temp = bufRdr.readLine()) != null && !(temp.equals(CertTools.BEGIN_CERTIFICATE) || temp.equals(beginKeyTrust))) { continue; } if (temp == null) { if (ret.isEmpty()) { // There was no certificate in the file throw new CertificateParsingException("Error in " + certstream.toString() + ", missing " + CertTools.BEGIN_CERTIFICATE + " boundary"); } else { // There were certificates, but some blank lines or something in the end // anyhow, the file has ended so we can break here. break; } } while ((temp = bufRdr.readLine()) != null && !(temp.equals(CertTools.END_CERTIFICATE) || temp.equals(endKeyTrust))) { opstr.print(temp); } if (temp == null) { throw new IllegalArgumentException("Error in " + certstream.toString() + ", missing " + CertTools.END_CERTIFICATE + " boundary"); } opstr.close(); byte[] certbuf = Base64.decode(ostr.toByteArray()); ostr.close(); // Phweeew, were done, now decode the cert from file back to Certificate object Certificate cert = getCertfromByteArray(certbuf); ret.add(cert); } } finally { if (bufRdr != null) { bufRdr.close(); } if (opstr != null) { opstr.close(); } if (ostr != null) { ostr.close(); } } } catch (IOException e) { throw new IllegalStateException( "Exception caught when attempting to read stream, see underlying IOException", e); } if (log.isTraceEnabled()) { log.trace("<getcertfromPEM:" + ret.size()); } return ret; }
From source file:stainingestimation.StainingEstimation.java
/** * Opens a dialog to let the user chose a table which is then loaded in the staining estimation parameters table view. *//*from w ww. j a va2s. co m*/ private void loadTable() { if (previewOriginal != null) { String currentDir = manager.getCurrentDir(); File file = FileChooser.chooseCSVFile(this, currentDir); if (file != null) { try { ((DefaultTableModel) jXTable1.getModel()).setRowCount(0); BufferedReader bfr = new BufferedReader(new FileReader(file)); String line = bfr.readLine(); int i = 0; while (bfr.ready()) { line = bfr.readLine(); String[] cells = line.split(";"); ((DefaultTableModel) jXTable1.getModel()).setRowCount(++i); for (int j = 0; j < jXTable1.getModel().getColumnCount(); j++) { if (((DefaultTableModel) jXTable1.getModel()).getColumnClass(j) == Double.class) { ((DefaultTableModel) jXTable1.getModel()).setValueAt(Double.parseDouble(cells[j]), i - 1, j); } else if (((DefaultTableModel) jXTable1.getModel()) .getColumnClass(j) == Integer.class) { ((DefaultTableModel) jXTable1.getModel()).setValueAt(Integer.parseInt(cells[j]), i - 1, j); } else if (((DefaultTableModel) jXTable1.getModel()) .getColumnClass(j) == String.class) { ((DefaultTableModel) jXTable1.getModel()).setValueAt(cells[j].replaceAll("\"", ""), i - 1, j); } else { ((DefaultTableModel) jXTable1.getModel()).setValueAt(cells[j], i - 1, j); } } } bfr.close(); } catch (Exception e) { Logger.getLogger(StainingEstimation.class.getName()).log(Level.SEVERE, null, e); } } } else { JOptionPane.showMessageDialog(this, "Please select first a TMA spot in TMARKER's main window.", "Select Image", JOptionPane.WARNING_MESSAGE); } }
From source file:ffx.potential.parsers.PDBFilter.java
@Override public boolean readNext(boolean resetPosition) { // ^ is beginning of line, \\s+ means "one or more whitespace", (\\d+) means match and capture one or more digits. Pattern modelPatt = Pattern.compile("^MODEL\\s+(\\d+)"); modelsRead = resetPosition ? 1 : modelsRead + 1; boolean eof = true; for (MolecularAssembly system : systems) { File file = system.getFile(); currentFile = file;//from www . j a va 2 s. c o m try { BufferedReader currentReader; if (readers.containsKey(system)) { currentReader = readers.get(system); if (!currentReader.ready()) { currentReader = new BufferedReader(new FileReader(currentFile)); readers.put(system, currentReader); } } else { currentReader = new BufferedReader(new FileReader(currentFile)); readers.put(system, currentReader); } // Skip to appropriate model. String line = currentReader.readLine(); while (line != null) { line = line.trim(); Matcher m = modelPatt.matcher(line); if (m.find()) { int modelNum = Integer.parseInt(m.group(1)); if (modelNum == modelsRead) { logger.log(Level.INFO, String.format(" Reading model %d for %s", modelNum, currentFile)); eof = false; break; } } line = currentReader.readLine(); } if (eof) { logger.log(Level.INFO, String.format(" End of file reached for %s", file)); currentReader.close(); return false; } // Begin parsing the model. boolean modelDone = false; line = currentReader.readLine(); while (line != null) { line = line.trim(); String recID = line.substring(0, Math.min(6, line.length())).trim(); try { Record record = Record.valueOf(recID); boolean hetatm = true; switch (record) { // ============================================================================= // // 7 - 11 Integer serial Atom serial number. // 13 - 16 Atom name Atom name. // 17 Character altLoc Alternate location indicator. // 18 - 20 Residue name resName Residue name. // 22 Character chainID Chain identifier. // 23 - 26 Integer resSeq Residue sequence number. // 27 AChar iCode Code for insertion of residues. // 31 - 38 Real(8.3) x Orthogonal coordinates for X in Angstroms. // 39 - 46 Real(8.3) y Orthogonal coordinates for Y in Angstroms. // 47 - 54 Real(8.3) z Orthogonal coordinates for Z in Angstroms. // 55 - 60 Real(6.2) occupancy Occupancy. // 61 - 66 Real(6.2) tempFactor Temperature factor. // 77 - 78 LString(2) element Element symbol, right-justified. // 79 - 80 LString(2) charge Charge on the atom. // ============================================================================= // 1 2 3 4 5 6 7 //123456789012345678901234567890123456789012345678901234567890123456789012345678 //ATOM 1 N ILE A 16 60.614 71.140 -10.592 1.00 7.38 N //ATOM 2 CA ILE A 16 60.793 72.149 -9.511 1.00 6.91 C case ATOM: hetatm = false; case HETATM: if (!line.substring(17, 20).trim().equals("HOH")) { //int serial = Hybrid36.decode(5, line.substring(6, 11)); String name = line.substring(12, 16).trim(); if (name.toUpperCase().contains("1H") || name.toUpperCase().contains("2H") || name.toUpperCase().contains("3H")) { // VERSION3_2 is presently just a placeholder for "anything non-standard". fileStandard = VERSION3_2; } Character altLoc = line.substring(16, 17).toUpperCase().charAt(0); if (!altLoc.equals(' ') && !altLoc.equals('A') && !altLoc.equals(currentAltLoc)) { break; } String resName = line.substring(17, 20).trim(); Character chainID = line.substring(21, 22).charAt(0); List<String> segIDList = segidMap.get(chainID); if (segIDList == null) { logger.log(Level.WARNING, String.format( " No " + "known segment ID corresponds to " + "chain ID %s", chainID.toString())); break; } String segID = segIDList.get(0); if (segIDList.size() > 1) { logger.log(Level.WARNING, String.format( " " + "Multiple segment IDs correspond to" + "chain ID %s; assuming %s", chainID.toString(), segID)); } int resSeq = Hybrid36.decode(4, line.substring(22, 26)); double[] d = new double[3]; d[0] = new Double(line.substring(30, 38).trim()); d[1] = new Double(line.substring(38, 46).trim()); d[2] = new Double(line.substring(46, 54).trim()); double occupancy = 1.0; double tempFactor = 1.0; Atom newAtom = new Atom(0, name, altLoc, d, resName, resSeq, chainID, occupancy, tempFactor, segID); newAtom.setHetero(hetatm); // Check if this is a modified residue. if (modres.containsKey(resName.toUpperCase())) { newAtom.setModRes(true); } Atom returnedAtom = activeMolecularAssembly.findAtom(newAtom); if (returnedAtom != null) { returnedAtom.setXYZ(d); double[] retXYZ = new double[3]; returnedAtom.getXYZ(retXYZ); } else { String message = String.format(" " + "Could not find atom %s in assembly", newAtom.toString()); if (dieOnMissingAtom) { logger.severe(message); } else { logger.warning(message); } } break; } case ENDMDL: case END: // Technically speaking, END should be at the end of the file, not end of the model. logger.log(Level.FINE, String.format(" Model %d successfully read", modelsRead)); modelDone = true; default: break; } } catch (Exception ex) { // Do nothing; it's not an ATOM/HETATM line. } if (modelDone) { break; } line = currentReader.readLine(); } return true; } catch (IOException ex) { logger.info(String.format(" Exception in parsing frame %d of %s:" + " %s", modelsRead, system.toString(), ex.toString())); } } return false; }
From source file:org.apache.flink.yarn.Client.java
public void run(String[] args) throws Exception { if (UserGroupInformation.isSecurityEnabled()) { throw new RuntimeException("Flink YARN client does not have security support right now." + "File a bug, we will fix it asap"); }// ww w .j a v a 2s.c om //Utils.logFilesInCurrentDirectory(LOG); // // Command Line Options // Options options = new Options(); options.addOption(VERBOSE); options.addOption(FLINK_CONF_DIR); options.addOption(FLINK_JAR); options.addOption(JM_MEMORY); options.addOption(TM_MEMORY); options.addOption(TM_CORES); options.addOption(CONTAINER); options.addOption(GEN_CONF); options.addOption(QUEUE); options.addOption(QUERY); options.addOption(SHIP_PATH); options.addOption(SLOTS); options.addOption(DYNAMIC_PROPERTIES); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (MissingOptionException moe) { System.out.println(moe.getMessage()); printUsage(); System.exit(1); } // Jar Path Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { localJarPath = new Path( "file://" + Client.class.getProtectionDomain().getCodeSource().getLocation().getPath()); } if (cmd.hasOption(GEN_CONF.getOpt())) { LOG.info("Placing default configuration in current directory"); File outFile = generateDefaultConf(localJarPath); LOG.info("File written to " + outFile.getAbsolutePath()); System.exit(0); } // Conf Path Path confPath = null; String confDirPath = ""; if (cmd.hasOption(FLINK_CONF_DIR.getOpt())) { confDirPath = cmd.getOptionValue(FLINK_CONF_DIR.getOpt()) + "/"; File confFile = new File(confDirPath + CONFIG_FILE_NAME); if (!confFile.exists()) { LOG.error("Unable to locate configuration file in " + confFile); System.exit(1); } confPath = new Path(confFile.getAbsolutePath()); } else { System.out.println("No configuration file has been specified"); // no configuration path given. // -> see if there is one in the current directory File currDir = new File("."); File[] candidates = currDir.listFiles(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { return name != null && name.endsWith(".yaml"); } }); if (candidates == null || candidates.length == 0) { System.out.println( "No configuration file has been found in current directory.\n" + "Copying default."); File outFile = generateDefaultConf(localJarPath); confPath = new Path(outFile.toURI()); } else { if (candidates.length > 1) { System.out.println("Multiple .yaml configuration files were found in the current directory\n" + "Please specify one explicitly"); System.exit(1); } else if (candidates.length == 1) { confPath = new Path(candidates[0].toURI()); } } } List<File> shipFiles = new ArrayList<File>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles = new ArrayList<File>(Arrays.asList(shipDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !(name.equals(".") || name.equals("..")); } }))); } else { LOG.warn("Ship directory is not a directory!"); } } boolean hasLogback = false; boolean hasLog4j = false; //check if there is a logback or log4j file if (confDirPath.length() > 0) { File logback = new File(confDirPath + "/logback.xml"); if (logback.exists()) { shipFiles.add(logback); hasLogback = true; } File log4j = new File(confDirPath + "/log4j.properties"); if (log4j.exists()) { shipFiles.add(log4j); hasLog4j = true; } } // queue String queue = "default"; if (cmd.hasOption(QUEUE.getOpt())) { queue = cmd.getOptionValue(QUEUE.getOpt()); } // JobManager Memory int jmMemory = 512; if (cmd.hasOption(JM_MEMORY.getOpt())) { jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); } if (jmMemory < MIN_JM_MEMORY) { System.out.println("The JobManager memory is below the minimum required memory amount " + "of " + MIN_JM_MEMORY + " MB"); System.exit(1); } // Task Managers memory int tmMemory = 1024; if (cmd.hasOption(TM_MEMORY.getOpt())) { tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt())); } if (tmMemory < MIN_TM_MEMORY) { System.out.println("The TaskManager memory is below the minimum required memory amount " + "of " + MIN_TM_MEMORY + " MB"); System.exit(1); } if (cmd.hasOption(SLOTS.getOpt())) { slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt())); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, CliFrontend.YARN_DYNAMIC_PROPERTIES_SEPARATOR); // Task Managers vcores int tmCores = 1; if (cmd.hasOption(TM_CORES.getOpt())) { tmCores = Integer.valueOf(cmd.getOptionValue(TM_CORES.getOpt())); } Utils.getFlinkConfiguration(confPath.toUri().getPath()); int jmPort = GlobalConfiguration.getInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, 0); if (jmPort == 0) { LOG.warn("Unable to find job manager port in configuration!"); jmPort = ConfigConstants.DEFAULT_JOB_MANAGER_IPC_PORT; } conf = Utils.initializeYarnConfiguration(); // intialize HDFS LOG.info("Copy App Master jar from local filesystem and add to local environment"); // Copy the application master jar to the filesystem // Create a local resource to point to the destination jar path final FileSystem fs = FileSystem.get(conf); // hard coded check for the GoogleHDFS client because its not overriding the getScheme() method. if (!fs.getClass().getSimpleName().equals("GoogleHadoopFileSystem") && fs.getScheme().startsWith("file")) { LOG.warn("The file system scheme is '" + fs.getScheme() + "'. This indicates that the " + "specified Hadoop configuration path is wrong and the sytem is using the default Hadoop configuration values." + "The Flink YARN client needs to store its files in a distributed file system"); } // Create yarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); // Query cluster for metrics if (cmd.hasOption(QUERY.getOpt())) { showClusterMetrics(yarnClient); } if (!cmd.hasOption(CONTAINER.getOpt())) { LOG.error("Missing required argument " + CONTAINER.getOpt()); printUsage(); yarnClient.stop(); System.exit(1); } // TM Count final int taskManagerCount = Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt())); System.out.println("Using values:"); System.out.println("\tContainer Count = " + taskManagerCount); System.out.println("\tJar Path = " + localJarPath.toUri().getPath()); System.out.println("\tConfiguration file = " + confPath.toUri().getPath()); System.out.println("\tJobManager memory = " + jmMemory); System.out.println("\tTaskManager memory = " + tmMemory); System.out.println("\tTaskManager cores = " + tmCores); // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); Resource maxRes = appResponse.getMaximumResourceCapability(); if (tmMemory > maxRes.getMemory() || tmCores > maxRes.getVirtualCores()) { LOG.error("The cluster does not have the requested resources for the TaskManagers available!\n" + "Maximum Memory: " + maxRes.getMemory() + ", Maximum Cores: " + tmCores); yarnClient.stop(); System.exit(1); } if (jmMemory > maxRes.getMemory()) { LOG.error("The cluster does not have the requested resources for the JobManager available!\n" + "Maximum Memory: " + maxRes.getMemory()); yarnClient.stop(); System.exit(1); } int totalMemoryRequired = jmMemory + tmMemory * taskManagerCount; ClusterResourceDescription freeClusterMem = getCurrentFreeClusterResources(yarnClient); if (freeClusterMem.totalFreeMemory < totalMemoryRequired) { LOG.error("This YARN session requires " + totalMemoryRequired + "MB of memory in the cluster. " + "There are currently only " + freeClusterMem.totalFreeMemory + "MB available."); yarnClient.stop(); System.exit(1); } if (tmMemory > freeClusterMem.containerLimit) { LOG.error("The requested amount of memory for the TaskManagers (" + tmMemory + "MB) is more than " + "the largest possible YARN container: " + freeClusterMem.containerLimit); yarnClient.stop(); System.exit(1); } if (jmMemory > freeClusterMem.containerLimit) { LOG.error("The requested amount of memory for the JobManager (" + jmMemory + "MB) is more than " + "the largest possible YARN container: " + freeClusterMem.containerLimit); yarnClient.stop(); System.exit(1); } // respect custom JVM options in the YAML file final String javaOpts = GlobalConfiguration.getString(ConfigConstants.FLINK_JVM_OPTIONS, ""); // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); String amCommand = "$JAVA_HOME/bin/java" + " -Xmx" + Utils.calculateHeapSize(jmMemory) + "M " + javaOpts; if (hasLogback || hasLog4j) { amCommand += " -Dlog.file=\"" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-main.log\""; } if (hasLogback) { amCommand += " -Dlogback.configurationFile=file:logback.xml"; } if (hasLog4j) { amCommand += " -Dlog4j.configuration=file:log4j.properties"; } amCommand += " " + ApplicationMaster.class.getName() + " " + " 1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-stdout.log" + " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-stderr.log"; amContainer.setCommands(Collections.singletonList(amCommand)); System.err.println("amCommand=" + amCommand); // Set-up ApplicationSubmissionContext for the application ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); final ApplicationId appId = appContext.getApplicationId(); /** * All network ports are offsetted by the application number * to avoid version port clashes when running multiple Flink sessions * in parallel */ int appNumber = appId.getId(); jmPort = Utils.offsetPort(jmPort, appNumber); // Setup jar for ApplicationMaster LocalResource appMasterJar = Records.newRecord(LocalResource.class); LocalResource flinkConf = Records.newRecord(LocalResource.class); Path remotePathJar = Utils.setupLocalResource(conf, fs, appId.toString(), localJarPath, appMasterJar, fs.getHomeDirectory()); Path remotePathConf = Utils.setupLocalResource(conf, fs, appId.toString(), confPath, flinkConf, fs.getHomeDirectory()); Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(2); localResources.put("flink.jar", appMasterJar); localResources.put("flink-conf.yaml", flinkConf); // setup security tokens (code from apache storm) final Path[] paths = new Path[3 + shipFiles.size()]; StringBuffer envShipFileList = new StringBuffer(); // upload ship files for (int i = 0; i < shipFiles.size(); i++) { File shipFile = shipFiles.get(i); LocalResource shipResources = Records.newRecord(LocalResource.class); Path shipLocalPath = new Path("file://" + shipFile.getAbsolutePath()); paths[3 + i] = Utils.setupLocalResource(conf, fs, appId.toString(), shipLocalPath, shipResources, fs.getHomeDirectory()); localResources.put(shipFile.getName(), shipResources); envShipFileList.append(paths[3 + i]); if (i + 1 < shipFiles.size()) { envShipFileList.append(','); } } paths[0] = remotePathJar; paths[1] = remotePathConf; sessionFilesDir = new Path(fs.getHomeDirectory(), ".flink/" + appId.toString() + "/"); FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL); fs.setPermission(sessionFilesDir, permission); // set permission for path. Utils.setTokensFor(amContainer, paths, this.conf); amContainer.setLocalResources(localResources); fs.close(); int amRPCPort = GlobalConfiguration.getInteger(ConfigConstants.YARN_AM_PRC_PORT, ConfigConstants.DEFAULT_YARN_AM_RPC_PORT); amRPCPort = Utils.offsetPort(amRPCPort, appNumber); // Setup CLASSPATH for ApplicationMaster Map<String, String> appMasterEnv = new HashMap<String, String>(); Utils.setupEnv(conf, appMasterEnv); // set configuration values appMasterEnv.put(Client.ENV_TM_COUNT, String.valueOf(taskManagerCount)); appMasterEnv.put(Client.ENV_TM_CORES, String.valueOf(tmCores)); appMasterEnv.put(Client.ENV_TM_MEMORY, String.valueOf(tmMemory)); appMasterEnv.put(Client.FLINK_JAR_PATH, remotePathJar.toString()); appMasterEnv.put(Client.ENV_APP_ID, appId.toString()); appMasterEnv.put(Client.ENV_CLIENT_HOME_DIR, fs.getHomeDirectory().toString()); appMasterEnv.put(Client.ENV_CLIENT_SHIP_FILES, envShipFileList.toString()); appMasterEnv.put(Client.ENV_CLIENT_USERNAME, UserGroupInformation.getCurrentUser().getShortUserName()); appMasterEnv.put(Client.ENV_AM_PRC_PORT, String.valueOf(amRPCPort)); appMasterEnv.put(Client.ENV_SLOTS, String.valueOf(slots)); appMasterEnv.put(Client.ENV_APP_NUMBER, String.valueOf(appNumber)); if (dynamicPropertiesEncoded != null) { appMasterEnv.put(Client.ENV_DYNAMIC_PROPERTIES, dynamicPropertiesEncoded); } amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(jmMemory); capability.setVirtualCores(1); appContext.setApplicationName("Flink"); // application name appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue(queue); // file that we write into the conf/ dir containing the jobManager address and the dop. yarnPropertiesFile = new File(confDirPath + CliFrontend.YARN_PROPERTIES_FILE); LOG.info("Submitting application master " + appId); yarnClient.submitApplication(appContext); ApplicationReport appReport = yarnClient.getApplicationReport(appId); YarnApplicationState appState = appReport.getYarnApplicationState(); boolean told = false; char[] el = { '/', '|', '\\', '-' }; int i = 0; int numTaskmanagers = 0; int numMessages = 0; BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); while (appState != YarnApplicationState.FINISHED && appState != YarnApplicationState.KILLED && appState != YarnApplicationState.FAILED) { if (!told && appState == YarnApplicationState.RUNNING) { System.err.println("Flink JobManager is now running on " + appReport.getHost() + ":" + jmPort); System.err.println("JobManager Web Interface: " + appReport.getTrackingUrl()); // write jobmanager connect information Properties yarnProps = new Properties(); yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_JOBMANAGER_KEY, appReport.getHost() + ":" + jmPort); if (slots != -1) { yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_DOP, Integer.toString(slots * taskManagerCount)); } // add dynamic properties if (dynamicProperties != null) { yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, dynamicPropertiesEncoded); } OutputStream out = new FileOutputStream(yarnPropertiesFile); yarnProps.store(out, "Generated YARN properties file"); out.close(); yarnPropertiesFile.setReadable(true, false); // readable for all. // connect RPC service cmc = new ClientMasterControl(new InetSocketAddress(appReport.getHost(), amRPCPort)); cmc.start(); Runtime.getRuntime().addShutdownHook(new ClientShutdownHook()); told = true; } if (!told) { System.err.print(el[i++] + "\r"); if (i == el.length) { i = 0; } Thread.sleep(500); // wait for the application to switch to RUNNING } else { int newTmCount = cmc.getNumberOfTaskManagers(); if (numTaskmanagers != newTmCount) { System.err.println("Number of connected TaskManagers changed to " + newTmCount + ". " + "Slots available: " + cmc.getNumberOfAvailableSlots()); numTaskmanagers = newTmCount; } // we also need to show new messages. if (cmc.getFailedStatus()) { System.err.println("The Application Master failed!\nMessages:\n"); for (Message m : cmc.getMessages()) { System.err.println("Message: " + m.getMessage()); } System.err.println("Requesting Application Master shutdown"); cmc.shutdownAM(); cmc.close(); System.err.println("Application Master closed."); } if (cmc.getMessages().size() != numMessages) { System.err.println("Received new message(s) from the Application Master"); List<Message> msg = cmc.getMessages(); while (msg.size() > numMessages) { System.err.println("Message: " + msg.get(numMessages).getMessage()); numMessages++; } } // wait until CLIENT_POLLING_INTERVALL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000 && !in.ready()) { Thread.sleep(200); } if (in.ready()) { String command = in.readLine(); evalCommand(command); } } appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); } LOG.info("Application " + appId + " finished with" + " state " + appState + " and " + "final state " + appReport.getFinalApplicationStatus() + " at " + appReport.getFinishTime()); if (appState == YarnApplicationState.FAILED || appState == YarnApplicationState.KILLED) { LOG.warn("Application failed. Diagnostics " + appReport.getDiagnostics()); LOG.warn("If log aggregation is activated in the Hadoop cluster, we recommend to retreive " + "the full application log using this command:\n" + "\tyarn logs -applicationId " + appReport.getApplicationId() + "\n" + "(It sometimes takes a few seconds until the logs are aggregated)"); } }