List of usage examples for java.lang StringBuilder delete
@Override public StringBuilder delete(int start, int end)
From source file:com.flexive.shared.scripting.groovy.GroovyScriptExporterTools.java
/** * This method generates the script code for a type * * @param type the FxType which should be exported * @param defaultsOnly use defaults only, do not analyse / script options * @param addWorkflow add the type's current workflow to the code * @return String returns the script code for a type *//*from w w w . jav a 2 s. c o m*/ public static String createType(FxType type, boolean defaultsOnly, boolean addWorkflow) { StringBuilder script = new StringBuilder(500); final int tabCount = 1; script.append("builder = new GroovyTypeBuilder()."); final String typeName = keyWordNameCheck(type.getName(), true); script.append(typeName.toLowerCase()).append("( "); // opening parenthesis + 1x \s if (!defaultsOnly) { script.append("\n"); // LABEL final long[] langs = type.getLabel().getTranslatedLanguages(); final long defLang = type.getLabel().getDefaultLanguage(); script.append("\tlabel: new FxString(true, ").append(defLang).append(", \"").append(type.getLabel()) .append("\")"); if (langs.length > 1) { // we have more than one language assignment for (long id : langs) { if (id != defLang) { script.append(".setTranslation(").append(id).append(", \"") .append(type.getLabel().getTranslation(id)).append("\")"); } } } script.append(",\n"); // sopts - a map for "simple" GroovyTypeBuilder options Map<String, String> sopts = new LinkedHashMap<String, String>(); // type acl String acl = type.getACL().getName(); // only set if different from the default structure ACL if (!CacheAdmin.getEnvironment().getACL(acl) .equals(CacheAdmin.getEnvironment().getACL(ACLCategory.STRUCTURE.getDefaultId()))) { sopts.put("acl", "\"" + acl + "\""); } // type defaultInstanceACL if (type.hasDefaultInstanceACL()) { String defInstACL = type.getDefaultInstanceACL().getName(); sopts.put("defaultInstanceACL", "\"" + defInstACL + "\""); } sopts.put("languageMode", type.getLanguage() == LanguageMode.Multiple ? "LanguageMode.Multiple" : "LanguageMode.Single"); sopts.put("trackHistory", type.isTrackHistory() + ""); if (type.isTrackHistory()) sopts.put("historyAge", type.getHistoryAge() + "L"); sopts.put("typeMode", "TypeMode." + type.getMode().name() + ""); // sopts.put("workFlow", ",\n") /* Left out for now, also needs to be added in GroovyTypeBuilder */ sopts.put("maxVersions", type.getMaxVersions() + "L"); sopts.put("storageMode", "TypeStorageMode." + type.getStorageMode().name() + ""); // not supported in FxTypeEdit, needs to be added to groovy builder sopts.put("useInstancePermissions", type.isUseInstancePermissions() + ""); sopts.put("usePropertyPermissions", type.isUsePropertyPermissions() + ""); sopts.put("useStepPermissions", type.isUseStepPermissions() + ""); sopts.put("useTypePermissions", type.isUseTypePermissions() + ""); sopts.put("usePermissions", type.isUsePermissions() + ""); if (addWorkflow) { sopts.put("workflow", "\"" + type.getWorkflow().getName() + "\""); } if (type.isDerived()) { // take out of !defaultsOnly option? // if clause necessary since rev. #2162 (all types derived from ROOT) if (!FxType.ROOT.equals(type.getParent().getName())) sopts.put("parentTypeName", "\"" + type.getParent().getName() + "\""); } // FxStructureOptions via the GroovyOptionbuilder script.append(getStructureOptions(type, tabCount)); // append options to script for (String option : sopts.keySet()) { script.append(simpleOption(option, sopts.get(option), tabCount)); } script.trimToSize(); if (script.indexOf(",\n", script.length() - 2) != -1) script.delete(script.length() - 2, script.length()); } script.append(")\n\n"); script.trimToSize(); return script.toString(); }
From source file:org.apache.maven.plugin.javadoc.AbstractFixJavadocMojo.java
private void writeParamTag(final StringBuilder sb, final JavaMethod javaMethod, final JavaEntityTags javaEntityTags, String[] params) { params = fixQdox173(params);/* w w w . j ava 2 s .c om*/ String paramName = params[0]; if (!fixTag(PARAM_TAG)) { // write original param tag if found String originalJavadocTag = javaEntityTags.getJavadocParamTag(paramName); if (originalJavadocTag != null) { sb.append(originalJavadocTag); } return; } boolean found = false; JavaParameter javaParam = javaMethod.getParameterByName(paramName); if (javaParam == null) { // is generic? TypeVariable[] typeParams = javaMethod.getTypeParameters(); for (TypeVariable typeParam : typeParams) { if (typeParam.getGenericValue().equals(paramName)) { found = true; } } } else { found = true; } if (!found) { if (getLog().isWarnEnabled()) { getLog().warn( "Fixed unknown param '" + paramName + "' defined in " + getJavaMethodAsString(javaMethod)); } if (sb.toString().endsWith(EOL)) { sb.delete(sb.toString().lastIndexOf(EOL), sb.toString().length()); } } else { String originalJavadocTag = javaEntityTags.getJavadocParamTag(paramName); if (originalJavadocTag != null) { sb.append(originalJavadocTag); String s = "@" + PARAM_TAG + " " + paramName; if (StringUtils.removeDuplicateWhitespace(originalJavadocTag).trim().endsWith(s)) { sb.append(" "); sb.append(getDefaultJavadocForType(javaParam.getType())); } } } }
From source file:edu.psu.iam.cpr.core.util.Utility.java
/** * <p>Convert IPv6 address into RFC 5952 form. * E.g. 2001:db8:0:1:0:0:0:1 -> 2001:db8:0:1::1</p> * <p/>/*ww w .j a v a 2s . c o m*/ * <p>Method is null safe, and if IPv4 address or host name is passed to the * method it is returned wihout any processing.</p> * <p/> * <p>Method also supports IPv4 in IPv6 (e.g. 0:0:0:0:0:ffff:192.0.2.1 -> * ::ffff:192.0.2.1), and zone ID (e.g. fe80:0:0:0:f0f0:c0c0:1919:1234%4 * -> fe80::f0f0:c0c0:1919:1234%4).</p> * * @param ipv6Address String representing valid IPv6 address. * @return String representing IPv6 in canonical form. * @throws IllegalArgumentException if IPv6 format is unacceptable. */ public static String canonicalizeAddress(final String ipv6Address) throws IllegalArgumentException { if (ipv6Address == null) { return null; } // Definitely not an IPv6, return untouched input. if (!mayBeIPv6Address(ipv6Address)) { return ipv6Address; } // Length without zone ID (%zone) or IPv4 address int ipv6AddressLength = ipv6Address.length(); if (ipv6Address.contains(":") && ipv6Address.contains(".")) { // IPv4 in IPv6 // e.g. 0:0:0:0:0:FFFF:127.0.0.1 final int lastColonPos = ipv6Address.lastIndexOf(':'); final int lastColonsPos = ipv6Address.lastIndexOf("::"); if (lastColonsPos >= 0 && lastColonPos == lastColonsPos + 1) { /* * IPv6 part ends with two consecutive colons, * last colon is part of IPv6 format. * e.g. ::127.0.0.1 */ ipv6AddressLength = lastColonPos + 1; } else { /* * IPv6 part ends with only one colon, * last colon is not part of IPv6 format. * e.g. ::FFFF:127.0.0.1 */ ipv6AddressLength = lastColonPos; } } else if (ipv6Address.contains(":") && ipv6Address.contains("%")) { // Zone ID // e.g. fe80:0:0:0:f0f0:c0c0:1919:1234%4 ipv6AddressLength = ipv6Address.lastIndexOf('%'); } final StringBuilder result = new StringBuilder(); final char[][] groups = new char[MAX_NUMBER_OF_GROUPS][MAX_GROUP_LENGTH]; int groupCounter = 0; int charInGroupCounter = 0; // Index of the current zeroGroup, -1 means not found. int zeroGroupIndex = -1; int zeroGroupLength = 0; // maximum length zero group, if there is more then one, then first one int maxZeroGroupIndex = -1; int maxZeroGroupLength = 0; boolean isZero = true; boolean groupStart = true; /* * Two consecutive colons, initial expansion. * e.g. 2001:db8:0:0:1::1 -> 2001:db8:0:0:1:0:0:1 */ final StringBuilder expanded = new StringBuilder(ipv6Address); final int colonsPos = ipv6Address.indexOf("::"); int length = ipv6AddressLength; int change = 0; if (colonsPos >= 0 && colonsPos < ipv6AddressLength - 2) { int colonCounter = 0; for (int i = 0; i < ipv6AddressLength; i++) { if (ipv6Address.charAt(i) == ':') { colonCounter++; } } if (colonsPos == 0) { expanded.insert(0, "0"); change = change + 1; } for (int i = 0; i < MAX_NUMBER_OF_GROUPS - colonCounter; i++) { expanded.insert(colonsPos + 1, "0:"); change = change + 2; } if (colonsPos == ipv6AddressLength - 2) { expanded.setCharAt(colonsPos + change + 1, '0'); } else { expanded.deleteCharAt(colonsPos + change + 1); change = change - 1; } length = length + change; } // Processing one char at the time for (int charCounter = 0; charCounter < length; charCounter++) { char c = expanded.charAt(charCounter); if (c >= 'A' && c <= 'F') { c = (char) (c + 32); } if (c != ':') { groups[groupCounter][charInGroupCounter] = c; if (!(groupStart && c == '0')) { ++charInGroupCounter; groupStart = false; } if (c != '0') { isZero = false; } } if (c == ':' || charCounter == length - 1) { // We reached end of current group if (isZero) { ++zeroGroupLength; if (zeroGroupIndex == -1) { zeroGroupIndex = groupCounter; } } if (!isZero || charCounter == length - 1) { // We reached end of zero group if (zeroGroupLength > maxZeroGroupLength) { maxZeroGroupLength = zeroGroupLength; maxZeroGroupIndex = zeroGroupIndex; } zeroGroupLength = 0; zeroGroupIndex = -1; } ++groupCounter; charInGroupCounter = 0; isZero = true; groupStart = true; } } final int numberOfGroups = groupCounter; // Output results for (groupCounter = 0; groupCounter < numberOfGroups; groupCounter++) { if (maxZeroGroupLength <= 1 || groupCounter < maxZeroGroupIndex || groupCounter >= maxZeroGroupIndex + maxZeroGroupLength) { for (int j = 0; j < MAX_GROUP_LENGTH; j++) { if (groups[groupCounter][j] != 0) { result.append(groups[groupCounter][j]); } } if (groupCounter < numberOfGroups - 1 && (groupCounter != maxZeroGroupIndex - 1 || maxZeroGroupLength <= 1)) { result.append(':'); } } else if (groupCounter == maxZeroGroupIndex) { result.append("::"); } } // Solve problem with three colons in IPv4 in IPv6 format // e.g. 0:0:0:0:0:0:127.0.0.1 -> :::127.0.0.1 -> ::127.0.0.1 final int resultLength = result.length(); if (result.charAt(resultLength - 1) == ':' && ipv6AddressLength < ipv6Address.length() && ipv6Address.charAt(ipv6AddressLength) == ':') { result.delete(resultLength - 1, resultLength); } /* * Append IPv4 from IPv4-in-IPv6 format or Zone ID */ for (int i = ipv6AddressLength; i < ipv6Address.length(); i++) { result.append(ipv6Address.charAt(i)); } return result.toString(); }
From source file:lcmc.crm.domain.CrmXml.java
private void initOCFResourceAgentsWithMetaData(final String command) { final SshOutput ret = host.captureCommand( new ExecCommandConfig().command(command).silentCommand().silentOutput().sshCommandTimeout(300000)); if (ret.getExitCode() != 0) { return;/* w w w . j a va2s . c o m*/ } final String output = ret.getOutput(); if (output == null) { return; } final String[] lines = output.split("\\r?\\n"); final Pattern pp = Pattern.compile("^provider:\\s*(.*?)\\s*$"); final Pattern mp = Pattern.compile("^master:\\s*(.*?)\\s*$"); final Pattern bp = Pattern.compile("<resource-agent.*\\s+name=\"(.*?)\".*"); final Pattern sp = Pattern.compile("^ra-name:\\s*(.*?)\\s*$"); final Pattern ep = Pattern.compile("</resource-agent>"); final StringBuilder xml = new StringBuilder(""); String provider = null; String serviceName = null; boolean nextRA = false; boolean masterSlave = false; /* is probably m/s ...*/ for (final String line : lines) { /* <resource-agent name="AudibleAlarm"> ... </resource-agent> */ final Matcher pm = pp.matcher(line); if (pm.matches()) { provider = pm.group(1); continue; } final Matcher mm = mp.matcher(line); if (mm.matches()) { masterSlave = !"".equals(mm.group(1)); continue; } final Matcher sm = sp.matcher(line); if (sm.matches()) { serviceName = sm.group(1); continue; } final Matcher m = bp.matcher(line); if (m.matches()) { nextRA = true; } if (nextRA) { xml.append(line); xml.append('\n'); final Matcher m2 = ep.matcher(line); if (m2.matches()) { parseMetaDataAndFillHashes(serviceName, provider, xml.toString(), masterSlave); serviceName = null; nextRA = false; xml.delete(0, xml.length()); } } } if (!drbddiskResourceAgentPresent) { LOG.appWarning("initOCFMetaData: drbddisk heartbeat script is not present"); } }
From source file:org.accada.epcis.repository.query.QueryOperationsBackendSQL.java
private PreparedStatement prepareMasterDataQuery(final QueryOperationsSession session, String vocType, MasterDataQueryDTO mdQuery) throws SQLException { StringBuilder sqlSelectFrom = new StringBuilder("SELECT uri FROM"); StringBuilder sqlWhereClause = new StringBuilder(" WHERE 1"); List<Object> sqlParams = new ArrayList<Object>(); // get the values from the query DTO List<String> attributeNames = mdQuery.getAttributeNames(); Map<String, List<String>> attributeNameAndValues = mdQuery.getAttributeNameAndValues(); List<String> vocabularyEqNames = mdQuery.getVocabularyEqNames(); List<String> vocabularyWdNames = mdQuery.getVocabularyWdNames(); boolean joinedAttribute = false; String vocTablename = getVocabularyTablename(vocType); sqlSelectFrom.append(" ").append(vocTablename).append(","); if ("voc_Any".equals(vocTablename)) { // this is not a standard vocabulary, we need to restrict by vtype // in the voc_Any table sqlWhereClause.append(" AND voc_Any.vtype=?"); sqlParams.add(vocType);// ww w . j a v a 2s . co m } // filter by attribute names if (attributeNames != null && !attributeNames.isEmpty()) { if (!joinedAttribute) { sqlSelectFrom.append(" ").append(vocTablename).append("_attr,"); sqlWhereClause.append(" AND ").append(vocTablename).append(".id="); sqlWhereClause.append(vocTablename).append("_attr.id"); } sqlWhereClause.append(" AND ").append(vocTablename).append("_attr.attribute IN (?"); sqlParams.add(attributeNames.get(0)); for (int i = 1; i < attributeNames.size(); i++) { sqlWhereClause.append(",?"); sqlParams.add(attributeNames.get(i)); } sqlWhereClause.append(")"); } // filter by attribute names and values if (attributeNameAndValues != null && !attributeNameAndValues.isEmpty()) { if (!joinedAttribute) { sqlSelectFrom.append(" ").append(vocTablename).append("_attr,"); sqlWhereClause.append(" AND ").append(vocTablename).append(".id="); sqlWhereClause.append(vocTablename).append("_attr.id"); } for (String attrName : attributeNameAndValues.keySet()) { sqlWhereClause.append(" AND ").append(vocTablename).append("_attr.attribute=?"); sqlParams.add(attrName); sqlWhereClause.append(" AND ").append(vocTablename).append("_attr.value IN (?"); List<String> attrValues = attributeNameAndValues.get(attrName); sqlParams.add(attrValues.get(0)); for (int i = 1; i < attrValues.size(); i++) { sqlWhereClause.append(",?"); sqlParams.add(attrValues.get(i)); } sqlWhereClause.append(")"); } } // filter by vocabulary names if (vocabularyEqNames != null && !vocabularyEqNames.isEmpty()) { sqlWhereClause.append(" AND ").append(vocTablename).append(".uri IN (?"); sqlParams.add(vocabularyEqNames.get(0)); for (int i = 1; i < vocabularyEqNames.size(); i++) { sqlWhereClause.append(",?"); sqlParams.add(vocabularyEqNames.get(i)); } sqlWhereClause.append(")"); } if (vocabularyWdNames != null && !vocabularyWdNames.isEmpty()) { sqlWhereClause.append(" AND (0"); for (String vocWdName : vocabularyWdNames) { sqlWhereClause.append(" OR ").append(vocTablename).append(".uri LIKE ?"); sqlParams.add(vocWdName + "%"); } sqlWhereClause.append(")"); } // remove last comma sqlSelectFrom.delete(sqlSelectFrom.length() - 1, sqlSelectFrom.length()); // set the complete query and pass it back to the caller String sqlSelect = sqlSelectFrom.append(sqlWhereClause).toString(); PreparedStatement ps = session.getConnection().prepareStatement(sqlSelect); LOG.debug("SQL: " + sqlSelect); for (int i = 0; i < sqlParams.size(); i++) { ps.setObject(i + 1, sqlParams.get(i)); if (LOG.isDebugEnabled()) { LOG.debug(" param" + i + " = " + sqlParams.get(i)); } } return ps; }
From source file:lcmc.data.CRMXML.java
/** Initialize resource agents with their meta data. */ private void initOCFMetaData(final String command) { final SSH.SSHOutput ret = Tools.execCommand(host, command, null, /* ExecCallback */ false, /* outputVisible */ 300000);//w ww . j a va 2 s .c o m if (ret.getExitCode() != 0) { return; } final String output = ret.getOutput(); if (output == null) { return; } final String[] lines = output.split("\\r?\\n"); final Pattern pp = Pattern.compile("^provider:\\s*(.*?)\\s*$"); final Pattern mp = Pattern.compile("^master:\\s*(.*?)\\s*$"); final Pattern bp = Pattern.compile("<resource-agent.*\\s+name=\"(.*?)\".*"); final Pattern ep = Pattern.compile("</resource-agent>"); final StringBuilder xml = new StringBuilder(""); String provider = null; String serviceName = null; boolean masterSlave = false; /* is probably m/s ...*/ for (int i = 0; i < lines.length; i++) { /* <resource-agent name="AudibleAlarm"> ... </resource-agent> */ final Matcher pm = pp.matcher(lines[i]); if (pm.matches()) { provider = pm.group(1); continue; } final Matcher mm = mp.matcher(lines[i]); if (mm.matches()) { if ("".equals(mm.group(1))) { masterSlave = false; } else { masterSlave = true; } continue; } final Matcher m = bp.matcher(lines[i]); if (m.matches()) { serviceName = m.group(1); } if (serviceName != null) { xml.append(lines[i]); xml.append('\n'); final Matcher m2 = ep.matcher(lines[i]); if (m2.matches()) { parseMetaData(serviceName, provider, xml.toString(), masterSlave); serviceName = null; xml.delete(0, xml.length()); } } } if (!drbddiskPresent) { Tools.appWarning("drbddisk heartbeat script is not present"); } }
From source file:loci.formats.in.MetamorphReader.java
@Override protected void initFile(String id) throws FormatException, IOException { if (checkSuffix(id, ND_SUFFIX)) { LOGGER.info("Initializing " + id); // find an associated STK file String stkFile = id.substring(0, id.lastIndexOf(".")); if (stkFile.indexOf(File.separatorChar) != -1) { stkFile = stkFile.substring(stkFile.lastIndexOf(File.separator) + 1); }/* w w w .j a v a 2s. co m*/ Location parent = new Location(id).getAbsoluteFile().getParentFile(); LOGGER.info("Looking for STK file in {}", parent.getAbsolutePath()); String[] dirList = parent.list(true); Arrays.sort(dirList); for (String f : dirList) { int underscore = f.indexOf('_'); if (underscore < 0) underscore = f.indexOf('.'); if (underscore < 0) underscore = f.length(); String prefix = f.substring(0, underscore); if ((f.equals(stkFile) || stkFile.startsWith(prefix)) && checkSuffix(f, STK_SUFFIX)) { stkFile = new Location(parent.getAbsolutePath(), f).getAbsolutePath(); break; } } if (!checkSuffix(stkFile, STK_SUFFIX)) { throw new FormatException("STK file not found in " + parent.getAbsolutePath() + "."); } super.initFile(stkFile); } else super.initFile(id); Location ndfile = null; if (checkSuffix(id, ND_SUFFIX)) ndfile = new Location(id); else if (canLookForND && isGroupFiles()) { // an STK file was passed to initFile // let's check the parent directory for an .nd file Location stk = new Location(id).getAbsoluteFile(); String stkName = stk.getName(); String stkPrefix = stkName; if (stkPrefix.indexOf('_') >= 0) { stkPrefix = stkPrefix.substring(0, stkPrefix.indexOf('_') + 1); } Location parent = stk.getParentFile(); String[] list = parent.list(true); int matchingChars = 0; for (String f : list) { if (checkSuffix(f, ND_SUFFIX)) { String prefix = f.substring(0, f.lastIndexOf(".")); if (prefix.indexOf('_') >= 0) { prefix = prefix.substring(0, prefix.indexOf('_') + 1); } if (stkName.startsWith(prefix) || prefix.equals(stkPrefix)) { int charCount = 0; for (int i = 0; i < f.length(); i++) { if (i >= stkName.length()) { break; } if (f.charAt(i) == stkName.charAt(i)) { charCount++; } else { break; } } if (charCount > matchingChars || (charCount == matchingChars && f.charAt(charCount) == '.')) { ndfile = new Location(parent, f).getAbsoluteFile(); matchingChars = charCount; } } } } } String creationTime = null; if (ndfile != null && ndfile.exists() && (fileGroupOption(id) == FormatTools.MUST_GROUP || isGroupFiles())) { // parse key/value pairs from .nd file int zc = getSizeZ(), cc = getSizeC(), tc = getSizeT(); int nstages = 0; String z = null, c = null, t = null; final List<Boolean> hasZ = new ArrayList<Boolean>(); waveNames = new ArrayList<String>(); stageNames = new ArrayList<String>(); boolean useWaveNames = true; ndFilename = ndfile.getAbsolutePath(); String[] lines = DataTools.readFile(ndFilename).split("\n"); boolean globalDoZ = true; boolean doTimelapse = false; StringBuilder currentValue = new StringBuilder(); String key = ""; for (String line : lines) { int comma = line.indexOf(','); if (comma <= 0) { currentValue.append("\n"); currentValue.append(line); continue; } String value = currentValue.toString(); addGlobalMeta(key, value); if (key.equals("NZSteps")) z = value; else if (key.equals("DoTimelapse")) { doTimelapse = Boolean.parseBoolean(value); } else if (key.equals("NWavelengths")) c = value; else if (key.equals("NTimePoints")) t = value; else if (key.startsWith("WaveDoZ")) { hasZ.add(Boolean.parseBoolean(value)); } else if (key.startsWith("WaveName")) { String waveName = value.substring(1, value.length() - 1); if (waveName.equals("Both lasers") || waveName.startsWith("DUAL")) { bizarreMultichannelAcquisition = true; } waveNames.add(waveName); } else if (key.startsWith("Stage")) { stageNames.add(value); } else if (key.startsWith("StartTime")) { creationTime = value; } else if (key.equals("ZStepSize")) { value = value.replace(',', '.'); stepSize = Double.parseDouble(value); } else if (key.equals("NStagePositions")) { nstages = Integer.parseInt(value); } else if (key.equals("WaveInFileName")) { useWaveNames = Boolean.parseBoolean(value); } else if (key.equals("DoZSeries")) { globalDoZ = Boolean.parseBoolean(value); } key = line.substring(1, comma - 1).trim(); currentValue.delete(0, currentValue.length()); currentValue.append(line.substring(comma + 1).trim()); } if (!globalDoZ) { for (int i = 0; i < hasZ.size(); i++) { hasZ.set(i, false); } } // figure out how many files we need if (z != null) zc = Integer.parseInt(z); if (c != null) cc = Integer.parseInt(c); if (t != null) tc = Integer.parseInt(t); else if (!doTimelapse) { tc = 1; } if (cc == 0) cc = 1; if (cc == 1 && bizarreMultichannelAcquisition) { cc = 2; } if (tc == 0) { tc = 1; } int numFiles = cc * tc; if (nstages > 0) numFiles *= nstages; // determine series count int stagesCount = nstages == 0 ? 1 : nstages; int seriesCount = stagesCount; firstSeriesChannels = new boolean[cc]; Arrays.fill(firstSeriesChannels, true); boolean differentZs = false; for (int i = 0; i < cc; i++) { boolean hasZ1 = i < hasZ.size() && hasZ.get(i); boolean hasZ2 = i != 0 && (i - 1 < hasZ.size()) && hasZ.get(i - 1); if (i > 0 && hasZ1 != hasZ2 && globalDoZ) { if (!differentZs) seriesCount *= 2; differentZs = true; } } int channelsInFirstSeries = cc; if (differentZs) { channelsInFirstSeries = 0; for (int i = 0; i < cc; i++) { if ((!hasZ.get(0) && i == 0) || (hasZ.get(0) && hasZ.get(i))) { channelsInFirstSeries++; } else firstSeriesChannels[i] = false; } } stks = new String[seriesCount][]; if (seriesCount == 1) stks[0] = new String[numFiles]; else if (differentZs) { for (int i = 0; i < stagesCount; i++) { stks[i * 2] = new String[channelsInFirstSeries * tc]; stks[i * 2 + 1] = new String[(cc - channelsInFirstSeries) * tc]; } } else { for (int i = 0; i < stks.length; i++) { stks[i] = new String[numFiles / stks.length]; } } String prefix = ndfile.getPath(); prefix = prefix.substring(prefix.lastIndexOf(File.separator) + 1, prefix.lastIndexOf(".")); // build list of STK files boolean anyZ = hasZ.contains(Boolean.TRUE); int[] pt = new int[seriesCount]; for (int i = 0; i < tc; i++) { for (int s = 0; s < stagesCount; s++) { for (int j = 0; j < cc; j++) { boolean validZ = j >= hasZ.size() || hasZ.get(j); int seriesNdx = s * (seriesCount / stagesCount); if ((seriesCount != 1 && (!validZ || (hasZ.size() > 0 && !hasZ.get(0)))) || (nstages == 0 && ((!validZ && cc > 1) || seriesCount > 1))) { if (anyZ && j > 0 && seriesNdx < seriesCount - 1 && (!validZ || !hasZ.get(0))) { seriesNdx++; } } if (seriesNdx >= stks.length || seriesNdx >= pt.length || pt[seriesNdx] >= stks[seriesNdx].length) { continue; } stks[seriesNdx][pt[seriesNdx]] = prefix; if (j < waveNames.size() && waveNames.get(j) != null) { stks[seriesNdx][pt[seriesNdx]] += "_w" + (j + 1); if (useWaveNames) { String waveName = waveNames.get(j); // If there are underscores in the wavelength name, translate // them to hyphens. (See #558) waveName = waveName.replace('_', '-'); // If there are slashes (forward or backward) in the wavelength // name, translate them to hyphens. (See #5922) waveName = waveName.replace('/', '-'); waveName = waveName.replace('\\', '-'); waveName = waveName.replace('(', '-'); waveName = waveName.replace(')', '-'); stks[seriesNdx][pt[seriesNdx]] += waveName; } } if (nstages > 0) { stks[seriesNdx][pt[seriesNdx]] += "_s" + (s + 1); } if (tc > 1 || doTimelapse) { stks[seriesNdx][pt[seriesNdx]] += "_t" + (i + 1) + ".STK"; } else stks[seriesNdx][pt[seriesNdx]] += ".STK"; pt[seriesNdx]++; } } } ndfile = ndfile.getAbsoluteFile(); // check that each STK file exists for (int s = 0; s < stks.length; s++) { for (int f = 0; f < stks[s].length; f++) { Location l = new Location(ndfile.getParent(), stks[s][f]); stks[s][f] = getRealSTKFile(l); } } String file = locateFirstValidFile(); if (file == null) { throw new FormatException("Unable to locate at least one valid STK file!"); } RandomAccessInputStream s = new RandomAccessInputStream(file, 16); TiffParser tp = new TiffParser(s); IFD ifd = tp.getFirstIFD(); CoreMetadata ms0 = core.get(0); s.close(); ms0.sizeX = (int) ifd.getImageWidth(); ms0.sizeY = (int) ifd.getImageLength(); if (bizarreMultichannelAcquisition) { ms0.sizeX /= 2; } ms0.sizeZ = hasZ.size() > 0 && !hasZ.get(0) ? 1 : zc; ms0.sizeC = cc; ms0.sizeT = tc; ms0.imageCount = getSizeZ() * getSizeC() * getSizeT(); if (isRGB()) { ms0.sizeC *= 3; } ms0.dimensionOrder = "XYZCT"; if (stks != null && stks.length > 1) { // Note that core can't be replaced with newCore until the end of this block. ArrayList<CoreMetadata> newCore = new ArrayList<CoreMetadata>(); for (int i = 0; i < stks.length; i++) { CoreMetadata ms = new CoreMetadata(); newCore.add(ms); ms.sizeX = getSizeX(); ms.sizeY = getSizeY(); ms.sizeZ = getSizeZ(); ms.sizeC = getSizeC(); ms.sizeT = getSizeT(); ms.pixelType = getPixelType(); ms.imageCount = getImageCount(); ms.dimensionOrder = getDimensionOrder(); ms.rgb = isRGB(); ms.littleEndian = isLittleEndian(); ms.interleaved = isInterleaved(); ms.orderCertain = true; } if (stks.length > nstages) { for (int j = 0; j < stagesCount; j++) { int idx = j * 2 + 1; CoreMetadata midx = newCore.get(idx); CoreMetadata pmidx = newCore.get(j * 2); pmidx.sizeC = stks[j * 2].length / getSizeT(); midx.sizeC = stks[idx].length / midx.sizeT; midx.sizeZ = hasZ.size() > 1 && hasZ.get(1) && core.get(0).sizeZ == 1 ? zc : 1; pmidx.imageCount = pmidx.sizeC * pmidx.sizeT * pmidx.sizeZ; midx.imageCount = midx.sizeC * midx.sizeT * midx.sizeZ; } } core = newCore; } } if (stks == null) { stkReaders = new MetamorphReader[1][1]; stkReaders[0][0] = new MetamorphReader(); stkReaders[0][0].setCanLookForND(false); } else { stkReaders = new MetamorphReader[stks.length][]; for (int i = 0; i < stks.length; i++) { stkReaders[i] = new MetamorphReader[stks[i].length]; for (int j = 0; j < stkReaders[i].length; j++) { stkReaders[i][j] = new MetamorphReader(); stkReaders[i][j].setCanLookForND(false); if (j > 0) { stkReaders[i][j].setMetadataOptions(new DynamicMetadataOptions(MetadataLevel.MINIMUM)); } } } } // check stage labels for plate data int rows = 0; int cols = 0; Map<String, Integer> rowMap = null; Map<String, Integer> colMap = null; isHCS = true; if (null == stageLabels) { isHCS = false; } else { Set<Map.Entry<Integer, Integer>> uniqueWells = new HashSet<Map.Entry<Integer, Integer>>(); rowMap = new HashMap<String, Integer>(); colMap = new HashMap<String, Integer>(); for (String label : stageLabels) { if (null == label) { isHCS = false; break; } Map.Entry<Integer, Integer> wellCoords = getWellCoords(label); if (null == wellCoords) { isHCS = false; break; } uniqueWells.add(wellCoords); rowMap.put(label, wellCoords.getKey()); colMap.put(label, wellCoords.getValue()); } if (uniqueWells.size() != stageLabels.length) { isHCS = false; } else { rows = Collections.max(rowMap.values()); cols = Collections.max(colMap.values()); CoreMetadata c = core.get(0); core.clear(); c.sizeZ = 1; c.sizeT = 1; c.imageCount = 1; for (int s = 0; s < uniqueWells.size(); s++) { CoreMetadata toAdd = new CoreMetadata(c); if (s > 0) { toAdd.seriesMetadata.clear(); } core.add(toAdd); } seriesToIFD = true; } } List<String> timestamps = null; MetamorphHandler handler = null; MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this, true); if (isHCS) { store.setPlateID(MetadataTools.createLSID("Plate", 0), 0); store.setPlateRows(new PositiveInteger(rows), 0); store.setPlateColumns(new PositiveInteger(cols), 0); store.setPlateRowNamingConvention(NamingConvention.LETTER, 0); store.setPlateColumnNamingConvention(NamingConvention.NUMBER, 0); } int nextObjective = 0; String instrumentID = MetadataTools.createLSID("Instrument", 0); String detectorID = MetadataTools.createLSID("Detector", 0, 0); store.setInstrumentID(instrumentID, 0); store.setDetectorID(detectorID, 0, 0); store.setDetectorType(getDetectorType("Other"), 0, 0); for (int i = 0; i < getSeriesCount(); i++) { setSeries(i); // do not reparse the same XML for every well if (i == 0 || !isHCS) { handler = new MetamorphHandler(getSeriesMetadata()); } if (isHCS) { String label = stageLabels[i]; String wellID = MetadataTools.createLSID("Well", 0, i); store.setWellID(wellID, 0, i); store.setWellColumn(new NonNegativeInteger(colMap.get(label)), 0, i); store.setWellRow(new NonNegativeInteger(rowMap.get(label)), 0, i); store.setWellSampleID(MetadataTools.createLSID("WellSample", 0, i, 0), 0, i, 0); store.setWellSampleImageRef(MetadataTools.createLSID("Image", i), 0, i, 0); store.setWellSampleIndex(new NonNegativeInteger(i), 0, i, 0); } store.setImageInstrumentRef(instrumentID, i); String comment = getFirstComment(i); if (i == 0 || !isHCS) { if (comment != null && comment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(comment), handler); } catch (IOException e) { } } } if (creationTime != null) { String date = DateTools.formatDate(creationTime, SHORT_DATE_FORMAT, "."); if (date != null) { store.setImageAcquisitionDate(new Timestamp(date), 0); } } store.setImageName(makeImageName(i).trim(), i); if (getMetadataOptions().getMetadataLevel() == MetadataLevel.MINIMUM) { continue; } store.setImageDescription("", i); store.setImagingEnvironmentTemperature(new Temperature(handler.getTemperature(), UNITS.CELSIUS), i); if (sizeX == null) sizeX = handler.getPixelSizeX(); if (sizeY == null) sizeY = handler.getPixelSizeY(); Length physicalSizeX = FormatTools.getPhysicalSizeX(sizeX); Length physicalSizeY = FormatTools.getPhysicalSizeY(sizeY); if (physicalSizeX != null) { store.setPixelsPhysicalSizeX(physicalSizeX, i); } if (physicalSizeY != null) { store.setPixelsPhysicalSizeY(physicalSizeY, i); } if (zDistances != null) { stepSize = zDistances[0]; } else { List<Double> zPositions = new ArrayList<Double>(); final List<Double> uniqueZ = new ArrayList<Double>(); for (IFD ifd : ifds) { MetamorphHandler zPlaneHandler = new MetamorphHandler(); String zComment = ifd.getComment(); if (zComment != null && zComment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(zComment), zPlaneHandler); } catch (IOException e) { } } zPositions = zPlaneHandler.getZPositions(); for (Double z : zPositions) { if (!uniqueZ.contains(z)) uniqueZ.add(z); } } if (uniqueZ.size() > 1 && uniqueZ.size() == getSizeZ()) { BigDecimal lastZ = BigDecimal.valueOf(uniqueZ.get(uniqueZ.size() - 1)); BigDecimal firstZ = BigDecimal.valueOf(uniqueZ.get(0)); BigDecimal zRange = (lastZ.subtract(firstZ)).abs(); BigDecimal zSize = BigDecimal.valueOf((double) (getSizeZ() - 1)); MathContext mc = new MathContext(10, RoundingMode.HALF_UP); stepSize = zRange.divide(zSize, mc).doubleValue(); } } Length physicalSizeZ = FormatTools.getPhysicalSizeZ(stepSize); if (physicalSizeZ != null) { store.setPixelsPhysicalSizeZ(physicalSizeZ, i); } if (handler.getLensNA() != 0 || handler.getLensRI() != 0) { String objectiveID = MetadataTools.createLSID("Objective", 0, nextObjective); store.setObjectiveID(objectiveID, 0, nextObjective); if (handler.getLensNA() != 0) { store.setObjectiveLensNA(handler.getLensNA(), 0, nextObjective); } store.setObjectiveSettingsID(objectiveID, i); if (handler.getLensRI() != 0) { store.setObjectiveSettingsRefractiveIndex(handler.getLensRI(), i); } nextObjective++; } int waveIndex = 0; for (int c = 0; c < getEffectiveSizeC(); c++) { if (firstSeriesChannels == null || (stageNames != null && stageNames.size() == getSeriesCount())) { waveIndex = c; } else if (firstSeriesChannels != null) { int s = i % 2; while (firstSeriesChannels[waveIndex] == (s == 1) && waveIndex < firstSeriesChannels.length) { waveIndex++; } } if (waveNames != null && waveIndex < waveNames.size()) { store.setChannelName(waveNames.get(waveIndex).trim(), i, c); } if (handler.getBinning() != null) binning = handler.getBinning(); if (binning != null) { store.setDetectorSettingsBinning(getBinning(binning), i, c); } if (handler.getReadOutRate() != 0) { store.setDetectorSettingsReadOutRate(new Frequency(handler.getReadOutRate(), UNITS.HERTZ), i, c); } if (gain == null) { gain = handler.getGain(); } if (gain != null) { store.setDetectorSettingsGain(gain, i, c); } store.setDetectorSettingsID(detectorID, i, c); if (wave != null && waveIndex < wave.length) { Length wavelength = FormatTools.getWavelength(wave[waveIndex]); if ((int) wave[waveIndex] >= 1) { // link LightSource to Image int laserIndex = i * getEffectiveSizeC() + c; String lightSourceID = MetadataTools.createLSID("LightSource", 0, laserIndex); store.setLaserID(lightSourceID, 0, laserIndex); store.setChannelLightSourceSettingsID(lightSourceID, i, c); store.setLaserType(getLaserType("Other"), 0, laserIndex); store.setLaserLaserMedium(getLaserMedium("Other"), 0, laserIndex); if (wavelength != null) { store.setChannelLightSourceSettingsWavelength(wavelength, i, c); } } } waveIndex++; } timestamps = handler.getTimestamps(); for (int t = 0; t < timestamps.size(); t++) { String date = DateTools.convertDate(DateTools.getTime(timestamps.get(t), SHORT_DATE_FORMAT, "."), DateTools.UNIX, SHORT_DATE_FORMAT + ".SSS"); addSeriesMetaList("timestamp", date); } long startDate = 0; if (timestamps.size() > 0) { startDate = DateTools.getTime(timestamps.get(0), SHORT_DATE_FORMAT, "."); } final Length positionX = handler.getStagePositionX(); final Length positionY = handler.getStagePositionY(); final List<Double> exposureTimes = handler.getExposures(); if (exposureTimes.size() == 0) { for (int p = 0; p < getImageCount(); p++) { exposureTimes.add(exposureTime); } } else if (exposureTimes.size() == 1 && exposureTimes.size() < getEffectiveSizeC()) { for (int c = 1; c < getEffectiveSizeC(); c++) { MetamorphHandler channelHandler = new MetamorphHandler(); String channelComment = getComment(i, c); if (channelComment != null && channelComment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(channelComment), channelHandler); } catch (IOException e) { } } final List<Double> channelExpTime = channelHandler.getExposures(); exposureTimes.add(channelExpTime.get(0)); } } int lastFile = -1; IFDList lastIFDs = null; IFD lastIFD = null; double distance = zStart; TiffParser tp = null; RandomAccessInputStream stream = null; for (int p = 0; p < getImageCount(); p++) { int[] coords = getZCTCoords(p); Double deltaT = 0d; Double expTime = exposureTime; Double xmlZPosition = null; int fileIndex = getIndex(0, coords[1], coords[2]) / getSizeZ(); if (fileIndex >= 0) { String file = stks == null ? currentId : stks[i][fileIndex]; if (file != null) { if (fileIndex != lastFile) { if (stream != null) { stream.close(); } stream = new RandomAccessInputStream(file, 16); tp = new TiffParser(stream); tp.checkHeader(); IFDList f = tp.getMainIFDs(); if (f.size() > 0) { lastFile = fileIndex; lastIFDs = f; } else { file = null; stks[i][fileIndex] = null; } } } if (file != null) { lastIFD = lastIFDs.get(p % lastIFDs.size()); Object commentEntry = lastIFD.get(IFD.IMAGE_DESCRIPTION); if (commentEntry != null) { if (commentEntry instanceof String) { comment = (String) commentEntry; } else if (commentEntry instanceof TiffIFDEntry) { comment = tp.getIFDValue((TiffIFDEntry) commentEntry).toString(); } } if (comment != null) comment = comment.trim(); if (comment != null && comment.startsWith("<MetaData>")) { String[] lines = comment.split("\n"); timestamps = new ArrayList<String>(); for (String line : lines) { line = line.trim(); if (line.startsWith("<prop")) { int firstQuote = line.indexOf("\"") + 1; int lastQuote = line.lastIndexOf("\""); String key = line.substring(firstQuote, line.indexOf("\"", firstQuote)); String value = line.substring(line.lastIndexOf("\"", lastQuote - 1) + 1, lastQuote); if (key.equals("z-position")) { xmlZPosition = new Double(value); } else if (key.equals("acquisition-time-local")) { timestamps.add(value); } } } } } } int index = 0; if (timestamps.size() > 0) { if (coords[2] < timestamps.size()) index = coords[2]; String stamp = timestamps.get(index); long ms = DateTools.getTime(stamp, SHORT_DATE_FORMAT, "."); deltaT = (ms - startDate) / 1000.0; } else if (internalStamps != null && p < internalStamps.length) { long delta = internalStamps[p] - internalStamps[0]; deltaT = delta / 1000.0; if (coords[2] < exposureTimes.size()) index = coords[2]; } if (index == 0 && p > 0 && exposureTimes.size() > 0) { index = coords[1] % exposureTimes.size(); } if (index < exposureTimes.size()) { expTime = exposureTimes.get(index); } if (deltaT != null) { store.setPlaneDeltaT(new Time(deltaT, UNITS.SECOND), i, p); } if (expTime != null) { store.setPlaneExposureTime(new Time(expTime, UNITS.SECOND), i, p); } if (stageX != null && p < stageX.length) { store.setPlanePositionX(stageX[p], i, p); } else if (positionX != null) { store.setPlanePositionX(positionX, i, p); } if (stageY != null && p < stageY.length) { store.setPlanePositionY(stageY[p], i, p); } else if (positionY != null) { store.setPlanePositionY(positionY, i, p); } if (zDistances != null && p < zDistances.length) { if (p > 0) { if (zDistances[p] != 0d) distance += zDistances[p]; else distance += zDistances[0]; } final Length zPos = new Length(distance, UNITS.REFERENCEFRAME); store.setPlanePositionZ(zPos, i, p); } else if (xmlZPosition != null) { final Length zPos = new Length(xmlZPosition, UNITS.REFERENCEFRAME); store.setPlanePositionZ(zPos, i, p); } } if (stream != null) { stream.close(); } } setSeries(0); }
From source file:org.apache.hadoop.yarn.server.resourcemanager.webapp.RMAppsBlock.java
@Override protected void renderData(Block html) { TBODY<TABLE<Hamlet>> tbody = html.table("#apps").thead().tr().th(".id", "ID").th(".user", "User") .th(".name", "Name").th(".type", "Application Type").th(".queue", "Queue") .th(".priority", "Application Priority").th(".starttime", "StartTime") .th(".finishtime", "FinishTime").th(".state", "State").th(".finalstatus", "FinalStatus") .th(".runningcontainer", "Running Containers").th(".allocatedCpu", "Allocated CPU VCores") .th(".allocatedMemory", "Allocated Memory MB").th(".allocatedGpu", "Allocated GPU") .th(".queuePercentage", "% of Queue").th(".clusterPercentage", "% of Cluster") .th(".progress", "Progress").th(".ui", "Tracking UI").th(".blacklisted", "Blacklisted Nodes")._() ._().tbody();/*from www. j a v a 2s . c o m*/ StringBuilder appsTableData = new StringBuilder("[\n"); for (ApplicationReport appReport : appReports) { // TODO: remove the following condition. It is still here because // the history side implementation of ApplicationBaseProtocol // hasn't filtering capability (YARN-1819). if (!reqAppStates.isEmpty() && !reqAppStates.contains(appReport.getYarnApplicationState())) { continue; } AppInfo app = new AppInfo(appReport); ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString(app.getCurrentAppAttemptId()); String queuePercent = "N/A"; String clusterPercent = "N/A"; if (appReport.getApplicationResourceUsageReport() != null) { queuePercent = String.format("%.1f", appReport.getApplicationResourceUsageReport().getQueueUsagePercentage()); clusterPercent = String.format("%.1f", appReport.getApplicationResourceUsageReport().getClusterUsagePercentage()); } String blacklistedNodesCount = "N/A"; RMApp rmApp = rm.getRMContext().getRMApps().get(appAttemptId.getApplicationId()); if (rmApp != null) { RMAppAttempt appAttempt = rmApp.getRMAppAttempt(appAttemptId); Set<String> nodes = null == appAttempt ? null : appAttempt.getBlacklistedNodes(); if (nodes != null) { blacklistedNodesCount = String.valueOf(nodes.size()); } } String percent = StringUtils.format("%.1f", app.getProgress()); appsTableData.append("[\"<a href='").append(url("app", app.getAppId())).append("'>") .append(app.getAppId()).append("</a>\",\"") .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app.getUser()))) .append("\",\"") .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app.getName()))) .append("\",\"") .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app.getType()))) .append("\",\"") .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app.getQueue()))) .append("\",\"").append(String.valueOf(app.getPriority())).append("\",\"") .append(app.getStartedTime()).append("\",\"").append(app.getFinishedTime()).append("\",\"") .append(app.getAppState() == null ? UNAVAILABLE : app.getAppState()).append("\",\"") .append(app.getFinalAppStatus()).append("\",\"") .append(app.getRunningContainers() == -1 ? "N/A" : String.valueOf(app.getRunningContainers())) .append("\",\"") .append(app.getAllocatedCpuVcores() == -1 ? "N/A" : String.valueOf(app.getAllocatedCpuVcores())) .append("\",\"") .append(app.getAllocatedMemoryMB() == -1 ? "N/A" : String.valueOf(app.getAllocatedMemoryMB())) .append("\",\"") .append(app.getAllocatedGpu() == -1 ? "N/A" : String.valueOf(app.getAllocatedGpu())) .append("\",\"").append(queuePercent).append("\",\"").append(clusterPercent).append("\",\"") // Progress bar .append("<br title='").append(percent).append("'> <div class='").append(C_PROGRESSBAR) .append("' title='").append(join(percent, '%')).append("'> ").append("<div class='") .append(C_PROGRESSBAR_VALUE).append("' style='").append(join("width:", percent, '%')) .append("'> </div> </div>").append("\",\"<a "); String trackingURL = app.getTrackingUrl() == null || app.getTrackingUrl().equals(UNAVAILABLE) || app.getAppState() == YarnApplicationState.NEW ? null : app.getTrackingUrl(); String trackingUI = app.getTrackingUrl() == null || app.getTrackingUrl().equals(UNAVAILABLE) || app.getAppState() == YarnApplicationState.NEW ? "Unassigned" : app.getAppState() == YarnApplicationState.FINISHED || app.getAppState() == YarnApplicationState.FAILED || app.getAppState() == YarnApplicationState.KILLED ? "History" : "ApplicationMaster"; appsTableData.append(trackingURL == null ? "#" : "href='" + trackingURL).append("'>").append(trackingUI) .append("</a>\",").append("\"").append(blacklistedNodesCount).append("\"],\n"); } if (appsTableData.charAt(appsTableData.length() - 2) == ',') { appsTableData.delete(appsTableData.length() - 2, appsTableData.length() - 1); } appsTableData.append("]"); html.script().$type("text/javascript")._("var appsTableData=" + appsTableData)._(); tbody._()._(); }
From source file:com.arksoft.epamms.ZGlobal1_Operation.java
/** //////////////////////////////////////////////////////////////////////////////////////////////////// ///*from w w w . j a v a 2 s . c om*/ // Method Name: GetDataTypeForAttribute // // Return the Data Type for an attribute // //////////////////////////////////////////////////////////////////////////////////////////////////// public int GetDataTypeForAttribute( String stringDataType, View lpView, String entityName, String attributeName ) { LPVIEWENTITY lpEntityDef; LPVIEWATTRIB lpAttributeDef; int nRC; lpEntityDef = String zGETPTR( MiGetEntityDefForView( lpView, entityName ) ); if ( lpEntityDef == 0 ) return -16; // Position on attribute. #ifdef VIEWENTITY_OD lpAttributeDef = String zGETPTR( lpEntityDef->hFirstOD_Attrib ); nRC = 1; while ( lpAttributeDef > 0 && nRC > 0 ) { if ( zstrcmp( lpAttributeDef->stringName, attributeName ) == 0 ) nRC = 0; if ( nRC > 0 ) lpAttributeDef = String zGETPTR( lpAttributeDef->hNextOD_Attrib ); } #else lpAttributeDef = String zGETPTR( lpEntityDef->hFirstAttributeDef ); nRC = 1; while ( lpAttributeDef > 0 && nRC > 0 ) { if ( zstrcmp( lpAttributeDef->stringName, attributeName ) == 0 ) nRC = 0; if ( nRC > 0 ) lpAttributeDef = String zGETPTR( lpAttributeDef->hNextAttributeDef ); } #endif if ( nRC > 0 ) { MessageSend( lpView, "", "GetDataTypeForAttribute", "The attribute specified was not found.", zMSGQ_OBJECT_CONSTRAINT_ERROR, 0 ); return -1; } // Set single character datatype followed by a string terminator. *stringDataType = lpAttributeDef->hDomain->cType; *(stringDataType + 1) = 0; return 0; } // GetDataTypeForAttribute **/ // int ParseOutEntityAttribute(String entityDotAttribute, StringBuilder entityName, StringBuilder attributeName) { int k; int lSkipLth; // Initialize entityName and attributeName. entityName.replace(0, -1, entityDotAttribute); attributeName.delete(0, -1); // entityDotAttribute is pointing to the first character of the entity name on entry to this routine. // Parse out Entity Name for (k = 0; k < entityName.length(); k++) { char ch = entityName.charAt(k); if (ch == '.' || ch == ']' || ch == '}') { entityName.setCharAt(k, '\0'); if (ch == '}') return -2; if (ch != ']') // there is an attribute, so keep going { int j = 0; k++; // Parse out Attribute Name ch = entityDotAttribute.charAt(k); while (ch != ']' && ch != '}') { if (ch == '}') return -2; attributeName.setCharAt(j, ch); j++; k++; ch = entityDotAttribute.charAt(k); } attributeName.setCharAt(k, '\0'); } } } lSkipLth = k + 1; // TODO not sure this translation to java is exactly right for SkipLth return lSkipLth; }
From source file:net.emotivecloud.scheduler.drp4one.DRP4OVF.java
/** * * Extract Disk Attributes for the VM template * * @param ovf file where the disk attributes will be extractred from * @return String with the disk description section for the ONE VM Template *//*w w w . j av a2 s. c om*/ private String VMDescriptionDisks(EmotiveOVF ovf) { StringBuilder buf = new StringBuilder(); // Let's avoid some work to the JVM :) // 100 characters should avoid buffer resizing in most cases. // Don't worry, no buffer overflow ahead :) StringBuilder propertyName = new StringBuilder(100); log.debug("ovfdiskssize:" + ovf.getDisks().size()); TargetedDisk sortedDisks[] = new TargetedDisk[ovf.getDisks().size()]; int tgtDskIdx = 0; /* * It seems that OpenNebula requires that the disks are listed in the * physical order, while OVF has no constraint on the position of the * disks definition within the list. So I sort the disks according to * the physical device name: * * TargetedDisk is a Comparable extension of OVFDisk that orders the * disks according to the device name, in the same way Linux does. */ for (OVFDisk ovfDisk : ovf.getDisks().values()) { log.debug(ovfDisk.getId()); sortedDisks[tgtDskIdx++] = new TargetedDisk(ovfDisk.getId(), ovfDisk, ovf); } log.debug("number of sorted disks:" + sortedDisks.length); Arrays.sort(sortedDisks); // retrieve from product property disk name if (ovf.getProductProperty(VM_BASEIMAGE) != null) { log.debug(ovf.getProductProperty(VM_BASEIMAGE)); OVFDisk master_disk = ovf.getDisks().get(ovf.getProductProperty(VM_BASEIMAGE)); log.debug(master_disk); log.debug("master disk id:" + master_disk.getId()); propertyName.append(master_disk.getId()); propertyName.append(PROP_PATORURL); TargetedDisk ovfDisk1 = new TargetedDisk("master", master_disk, ovf); buf.append("DISK = [\n"); propertyName.append(master_disk.getId()); propertyName.append(PROP_PATORURL); // If we have a path or URL specified, we have a pysical // disk. Else we have a pre-registered disk resource. String pathOrURL = ovfDisk1.getHref(); log.debug("got image ref:" + pathOrURL); System.out.println("DRP4ONE - VMDescriptionDisks() > buf before: \n" + buf.toString()); addVirtualDiskConfig(ovfDisk1, pathOrURL, ovf, propertyName, buf); System.out.println("DRP4ONE - VMDescriptionDisks() > buf after: \n" + buf.toString()); buf.append("]\n"); weGotDisk = true; } for (TargetedDisk ovfDisk : sortedDisks) { if (ovfDisk.getDskName().equals(ovf.getProductProperty(VM_BASEIMAGE))) { continue; } System.out.println("DRP4ONE - VMDescriptionDisks() > for: entro n l for" + buf.toString()); buf.append("DISK = [\n"); propertyName.delete(0, propertyName.length()); log.debug("got disk name:" + ovfDisk.getDskName()); System.out.println("DRP4ONE - VMDescriptionDisks() > for: got disk name:" + ovfDisk.getDskName()); propertyName.append(ovfDisk.getDskName()); propertyName.append(PROP_PATORURL); String pathOrURL = ovfDisk.getHref(); log.debug("got image ref:" + pathOrURL); System.out.println("DRP4ONE - VMDescriptionDisks() > for: got image ref:" + pathOrURL); if (pathOrURL == null) { pathOrURL = ovf4oneProperties(propertyName.toString(), false); } propertyName.delete(ovfDisk.getDskNameLen(), propertyName.length()); propertyName.append(PROP_TYPE); // String typeName = // ovf.getProductProperty(propertyName.toString()); String typeName = ovf.getProductProperty(propertyName.toString()); String propTypeName = ovf4oneProperties(propertyName.toString(), false); // When no disk type is specified, OpenNebula defaults to disk, and // we do so. DskType dskType = (typeName == null || "".equals(typeName.trim())) ? ((propTypeName == null || "".equals(propTypeName.trim())) ? DskType.virtualDisk : DskType.fromString(propTypeName)) : DskType.fromString(typeName); log.debug("Disk type" + dskType); System.out.println("DRP4ONE - VMDescriptionDisks() > for: Disk type" + dskType); if (dskType == null) { throw new DRPOneException( "\n.OVF file is missing mandatory disk type for disk " + ovfDisk.getDskName(), StatusCodes.NOT_FOUND); } Long size = ovfDisk.getCapacityMB(); if (size == null && (dskType == DskType.virtualSwap || dskType == DskType.onTheFlyDisk)) { throw new DRPOneException("\n.OVF file is missing mandatory size specification for a disk ", StatusCodes.BAD_OVF); } switch (dskType) { case virtualDisk: System.out.println("virtualDisk"); addVirtualDiskConfig(ovfDisk, pathOrURL, ovf, propertyName, buf); System.out.println("DRP4ONE > Finishes addVirtualDiskConfig() virtualDisk"); weGotDisk = true; break; case virtualSwap: System.out.println("virtualSwap"); if (pathOrURL != null && !"".equals(pathOrURL)) { buf.append("SOURCE = \""); buf.append(pathOrURL); buf.append("\",\n"); } // else // throw new // DRPOneException("\n.OVF file is missing mandatory path or URL specification for swap disk named" // + ovfDisk.getDskName(), // StatusCodes.BAD_OVF); if (ovfDisk.getTarget() == null) { throw new DRPOneException( "Missing mandatory target specification for a swap disk named " + ovfDisk.getDskName(), StatusCodes.BAD_OVF); } buf.append("TYPE=swap,\n"); buf.append("SIZE = "); buf.append(size); buf.append(",\n"); buf.append("TARGET = \""); buf.append(ovfDisk.getTarget()); buf.append("\""); // update the init.sh file to swapon the swap space arg2.add(ovfDisk.getTarget()); try { // bw.write("/sbin/swapon /dev/"+device_map.get(no_of_virtual_disk++)+"\n"); } catch (Exception e) { log.error("io exception occurred"); } break; case blockDevice: System.out.println("blockDevice"); buf.append("TYPE=block,\n"); buf.append("SOURCE = \""); buf.append(pathOrURL); buf.append("\",\n"); if (ovfDisk.getTarget() != null) { buf.append("TARGET = \""); } buf.append(ovfDisk.getTarget()); buf.append("\""); break; case onTheFlyDisk: System.out.println("onTheFlyDisk"); propertyName.delete(ovfDisk.getDskNameLen(), propertyName.length()); propertyName.append(PROP_FORMAT); log.debug("received on the fly disk"); // //@madigiro - 21 july String format = ovf.getProductProperty(propertyName.toString()); if (format == null) { format = ovf4oneProperties(propertyName.toString(), false); } if (format == null) { throw new DRPOneException( "Missing mandatory format specification for an ont the fly disk image " + ovfDisk.getDskName(), StatusCodes.BAD_OVF); } buf.append("TYPE=fs,\n"); if (pathOrURL != null && !"".equals(pathOrURL)) { buf.append("SOURCE = \""); buf.append(pathOrURL); buf.append("\",\n"); } buf.append("SIZE = "); buf.append(size); buf.append(",\n"); buf.append("FORMAT = \""); buf.append(format); buf.append("\",\n"); if (ovfDisk.getTarget() != null) { buf.append("TARGET = \""); } buf.append(ovfDisk.getTarget()); buf.append("\""); arg1.add(ovfDisk.getTarget()); }// switch finishes here typeName = ""; pathOrURL = ""; buf.append("]\n"); } return buf.toString(); }