List of usage examples for org.apache.commons.lang StringUtils ordinalIndexOf
public static int ordinalIndexOf(String str, String searchStr, int ordinal)
Finds the n-th index within a String, handling null
.
From source file:ezbake.warehaus.WarehausUtils.java
public static String getUriPrefixFromUri(String uri) { // Assuming the uri schema is of the form - CATEGORY://FEED_NAME/ID String uriPrefix = uri;/*from w ww . ja v a 2 s .c om*/ int idx = StringUtils.ordinalIndexOf(uri, "/", 3); if (idx > -1) { uriPrefix = uri.substring(0, idx); } return uriPrefix; }
From source file:com.haulmont.cuba.restapi.CommitRequest.java
public InstanceRef parseInstanceRefAndRegister(String fullId) throws InstantiationException, IllegalAccessException { EntityLoadInfo loadInfo;//from w w w . j a v a 2 s . com if (!fullId.startsWith("NEW-")) { loadInfo = EntityLoadInfo.parse(fullId); if (loadInfo == null) { throw new RuntimeException("Cannot parse id: " + fullId); } InstanceRef existingRef = instanceRefs .get(loadInfo.getMetaClass().getName() + "-" + loadInfo.getId().toString()); if (existingRef != null) { return existingRef; } } else { int idDashIndex = StringUtils.ordinalIndexOf(fullId, "-", 2); if (idDashIndex == -1) { String entityName = fullId.substring("NEW-".length()); String generatedId = generateId(entityName); fullId = fullId + "-" + generatedId; } loadInfo = EntityLoadInfo.parse(fullId); if (loadInfo == null) { throw new RuntimeException("Cannot parse id: " + fullId); } } if (loadInfo.isNewEntity()) newInstanceIds.add(loadInfo.toString()); InstanceRef result = new InstanceRef(loadInfo); instanceRefs.put(loadInfo.getMetaClass().getName() + "-" + loadInfo.getId().toString(), result); return result; }
From source file:com.liferay.ide.project.core.modules.ServiceCommand.java
private String[] _getServiceBundle(String serviceName, GogoBundleDeployer bundleDeployer) throws Exception { String[] serviceBundleInfo;//from ww w . j a v a2 s .c o m String bundleGroup = ""; String bundleName; String bundleVersion; // String result = supervisor.packages("packages " + serviceName.substring(0, serviceName.lastIndexOf("."))); String result = ""; if (result.startsWith("No exported packages")) { //result = supervisor.run("services (objectClass=" + serviceName + ") | grep \"Registered by bundle:\" "); result = ""; serviceBundleInfo = _parseRegisteredBundle(result); } else { serviceBundleInfo = _parseSymbolicName(result); } bundleName = serviceBundleInfo[0]; bundleVersion = serviceBundleInfo[1]; if (bundleName.equals("org.eclipse.osgi,system.bundle")) { bundleGroup = "com.liferay.portal"; } else if (bundleName.startsWith("com.liferay")) { bundleGroup = "com.liferay"; } else { int ordinalIndexOf = StringUtils.ordinalIndexOf(bundleName, ".", 3); if (ordinalIndexOf != -1) { bundleGroup = bundleName.substring(0, ordinalIndexOf); } else { ordinalIndexOf = StringUtils.ordinalIndexOf(bundleName, ".", 2); if (ordinalIndexOf != -1) { bundleGroup = bundleName.substring(0, ordinalIndexOf); } } } return new String[] { bundleGroup, bundleName, bundleVersion }; }
From source file:hivemall.fm.FieldAwareFactorizationMachineUDTFTest.java
private static void runTest(String testName, String testOptions, float lossThreshold) throws IOException, HiveException { println(testName);//from w w w. j ava 2 s.com FieldAwareFactorizationMachineUDTF udtf = new FieldAwareFactorizationMachineUDTF(); ObjectInspector[] argOIs = new ObjectInspector[] { ObjectInspectorFactory .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), PrimitiveObjectInspectorFactory.javaDoubleObjectInspector, ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, testOptions) }; udtf.initialize(argOIs); FieldAwareFactorizationMachineModel model = udtf.initModel(udtf._params); Assert.assertTrue("Actual class: " + model.getClass().getName(), model instanceof FFMStringFeatureMapModel); double loss = 0.d; double cumul = 0.d; for (int trainingIteration = 1; trainingIteration <= ITERATIONS; ++trainingIteration) { BufferedReader data = new BufferedReader(new InputStreamReader( FieldAwareFactorizationMachineUDTFTest.class.getResourceAsStream("bigdata.tr.txt"))); loss = udtf._cvState.getCumulativeLoss(); int lines = 0; for (int lineNumber = 0; lineNumber < MAX_LINES; ++lineNumber, ++lines) { //gather features in current line final String input = data.readLine(); if (input == null) { break; } ArrayList<String> featureStrings = new ArrayList<String>(); ArrayList<StringFeature> features = new ArrayList<StringFeature>(); //make StringFeature for each word = data point String remaining = input; int wordCut = remaining.indexOf(' '); while (wordCut != -1) { featureStrings.add(remaining.substring(0, wordCut)); remaining = remaining.substring(wordCut + 1); wordCut = remaining.indexOf(' '); } int end = featureStrings.size(); double y = Double.parseDouble(featureStrings.get(0)); if (y == 0) { y = -1;//LibFFM data uses {0, 1}; Hivemall uses {-1, 1} } for (int wordNumber = 1; wordNumber < end; ++wordNumber) { String entireFeature = featureStrings.get(wordNumber); int featureCut = StringUtils.ordinalIndexOf(entireFeature, ":", 2); String feature = entireFeature.substring(0, featureCut); double value = Double.parseDouble(entireFeature.substring(featureCut + 1)); features.add(new StringFeature(feature, value)); } udtf.process(new Object[] { toStringArray(features), y }); } cumul = udtf._cvState.getCumulativeLoss(); loss = (cumul - loss) / lines; println(trainingIteration + " " + loss + " " + cumul / (trainingIteration * lines)); data.close(); } println("model size=" + udtf._model.getSize()); Assert.assertTrue("Last loss was greater than expected: " + loss, loss < lossThreshold); }
From source file:io.hops.examples.spark.kafka.StreamingLogs.java
private static JSONObject parser(String line, String appId) { JSONObject jsonLog = new JSONObject(line); JSONObject index = new JSONObject(); String priority, logger, thread, timestamp; priority = logger = thread = timestamp = null; //Sample line: String[] attrs = jsonLog.getString("message") .substring(0, StringUtils.ordinalIndexOf(jsonLog.getString("message"), " ", 4)).split(" "); String message = jsonLog.getString("message") .substring(StringUtils.ordinalIndexOf(jsonLog.getString("message"), " ", 4) + 1); try {/*from ww w . j av a 2 s . co m*/ priority = attrs[2]; logger = attrs[3]; //thread = attrs[5]; timestamp = attrs[0] + " " + attrs[1]; //Convert timestamp to appropriate format DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS"); Date result = df.parse(timestamp); Locale currentLocale = Locale.getDefault(); SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", currentLocale); timestamp = format.format(result); } catch (Exception ex) { LOG.log(Level.WARNING, "Error while parsing log, setting default index parameters:{0}", ex.getMessage()); message = jsonLog.getString("message"); priority = "parse error"; logger = "parse error"; //thread = "parse error"; timestamp = "parse error"; } index.put("message", message); index.put("priority", priority); index.put("logger_name", logger); index.put("timestamp", timestamp); index.put("application", appId); index.put("host", jsonLog.getJSONObject("beat").getString("hostname")); index.put("project", Hops.getProjectName()); index.put("jobname", Hops.getJobName()); if (jsonLog.getString("source").contains("/")) { index.put("file", jsonLog.getString("source").substring(jsonLog.getString("source").lastIndexOf("/") + 1)); } else { index.put("file", jsonLog.getString("source")); } return index; }
From source file:com.liferay.ide.project.core.modules.ServiceWrapperCommand.java
private void _getServiceWrapperList(Map<String, String[]> wrapperMap, String name, JarInputStream jarInputStream) { if (name.endsWith("ServiceWrapper.class") && !name.contains("$")) { name = name.replaceAll("\\\\", ".").replaceAll("/", "."); name = name.substring(0, name.lastIndexOf(".")); Attributes mainAttributes = jarInputStream.getManifest().getMainAttributes(); String bundleName = mainAttributes.getValue("Bundle-SymbolicName"); String version = mainAttributes.getValue("Bundle-Version"); String group = ""; if (bundleName.equals("com.liferay.portal.kernel")) { group = "com.liferay.portal"; } else {//from ww w. ja v a 2s . co m int ordinalIndexOf = StringUtils.ordinalIndexOf(bundleName, ".", 2); if (ordinalIndexOf != -1) { group = bundleName.substring(0, ordinalIndexOf); } } wrapperMap.put(name, new String[] { group, bundleName, version }); } }
From source file:com.oneops.opamp.service.Notifications.java
/** * Send ops event notification.//from www . jav a2s . c o m * * @param event change event for which the notification needs to be sent. * @param note Additional note for notification. * @param severity severity of notification * */ public NotificationMessage sendOpsEventNotification(CiChangeStateEvent event, String note, NotificationSeverity severity, String subject, String text, Map<String, Object> payloadEntries) { OpsBaseEvent oEvent = getEventUtil().getOpsEvent(event); if (oEvent == null) return null; CmsCI ci = cmProcessor.getCiById(oEvent.getCiId()); if (ci == null) { logger.error("Can not get CmsCI for id - " + oEvent.getCiId()); return null; } NotificationMessage notify = new NotificationMessage(); notify.setType(NotificationType.ci); notify.setCmsId(oEvent.getCiId()); notify.setSource(NOTIFICATION_SOURCE); notify.setNsPath(ci.getNsPath()); notify.setTimestamp(oEvent.getTimestamp()); notify.setCloudName(event.getCloudName()); notify.putPayloadEntry(EVENT_NAME, oEvent.getSource()); notify.putPayloadEntry(CI_NAME, ci.getCiName()); notify.putPayloadEntry(STATUS, oEvent.getStatus()); notify.putPayloadEntry(OLD_STATE, event.getOldState()); notify.putPayloadEntry(NEW_STATE, event.getNewState()); notify.putPayloadEntry(CLASS_NAME, ci.getCiClassName()); notify.putPayloadEntry(STATE, oEvent.getState()); if (oEvent.getMetrics() != null) { notify.putPayloadEntry(METRICS, gson.toJson(oEvent.getMetrics())); } addIpToNotification(ci, notify); if (payloadEntries != null && payloadEntries.size() > 0) { notify.putPayloadEntries(payloadEntries); } notify.setManifestCiId(oEvent.getManifestId()); if (event.getComponentStatesCounters() != null) { for (String counterName : event.getComponentStatesCounters().keySet()) { notify.putPayloadEntry(counterName, String.valueOf(event.getComponentStatesCounters().get(counterName))); } } // tomcat-compute-cpu:HighCpuUsage->split. // app-tomcat-JvmInfo:PANGAEA:APP:US:Taxo_Svc:jvm:memory int index = StringUtils.ordinalIndexOf(oEvent.getName(), COLON, 1); if (index != StringUtils.INDEX_NOT_FOUND) { String threshHoldName = oEvent.getName().substring(index + 1); notify.putPayloadEntry(THRESHOLD, threshHoldName); } CmsCI envCi = envProcessor.getEnv4Bom(ci.getCiId()); CmsCIAttribute envProfileAttrib = envCi.getAttribute(PROFILE_ATTRIBUTE_NAME); if (envProfileAttrib != null) { notify.setEnvironmentProfileName(envProfileAttrib.getDfValue()); } CmsCIAttribute adminStatusAttrib = envCi.getAttribute(ADMINSTATUS_ATTRIBUTE_NAME); if (adminStatusAttrib != null) { notify.setAdminStatus(envCi.getAttribute(ADMINSTATUS_ATTRIBUTE_NAME).getDfValue()); } notify.setManifestCiId(oEvent.getManifestId()); String subjectPrefix = NotificationMessage.buildSubjectPrefix(ci.getNsPath()); if (oEvent.getState().equalsIgnoreCase("open")) { if (severity == null) { notify.setSeverity(NotificationSeverity.warning); } else { notify.setSeverity(severity); } if (StringUtils.isNotEmpty(subject)) { notify.setSubject(subjectPrefix + subject); } else { notify.setSubject(subjectPrefix + oEvent.getName() + SUBJECT_SUFFIX_OPEN_EVENT); } //subject = <monitorName:[threshold_name|heartbeat]> [is violated|recovered] // text = <ciName> is <newState>, <<opamp action/notes>> if (StringUtils.isNotEmpty(text)) { notify.setText(text); } else { notify.setText(ci.getCiName() + " is in " + event.getNewState() + " state"); } } else if (oEvent.getState().equalsIgnoreCase("close")) { // close events go on INFO notify.setSeverity(NotificationSeverity.info); if (StringUtils.isNotEmpty(subject)) { notify.setSubject(subjectPrefix + subject); } else { notify.setSubject(subjectPrefix + oEvent.getName() + SUBJECT_SUFFIX_CLOSE_EVENT); } if (StringUtils.isNotEmpty(text)) { notify.setText(text); } else { notify.setText(ci.getCiName() + " is in " + event.getNewState() + " state"); } } if (StringUtils.isNotEmpty(note)) { notify.appendText("; " + note); } else { notify.appendText("."); } if (logger.isDebugEnabled()) { Gson gson = new Gson(); logger.debug(gson.toJson(notify)); } antennaClient.executeAsync(notify); return notify; }
From source file:de.tudarmstadt.ukp.csniper.webapp.search.tgrep.TgrepQuery.java
private List<EvaluationItem> parseOutput(List<String> aOutput) { List<EvaluationItem> items = new ArrayList<EvaluationItem>(); if (aOutput.size() % LINES_PER_MATCH > 0) { throw new DataAccessResourceFailureException("Tgrep2 produced [" + aOutput.size() + "] output lines, but should have produced a multiple of [" + LINES_PER_MATCH + "]."); } else {// w ww. ja va2 s . c om String[] comment; String text; int tokenBeginIndex; int tokenEndIndex; for (Iterator<String> it = aOutput.iterator(); it.hasNext();) { // comment - split into documentId, beginOffset, endOffset comment = it.next().substring(2).split(TgrepEngine.COMMENT_SEPARATOR); if (comment.length < 3) { throw new DataAccessResourceFailureException("The corpus contains a malformed comment line [" + StringUtils.join(comment, " ,") + "]."); } String documentId = comment[META_DOCUMENT_ID]; int beginOffset = Integer.parseInt(comment[META_BEGIN_OFFSET]); int endOffset = Integer.parseInt(comment[META_END_OFFSET]); // text string - trim and replace bracket placeholders text = it.next().trim(); text = StringUtils.replace(text, LEFT_BRACKET, "("); text = StringUtils.replace(text, RIGHT_BRACKET, ")"); // token index of first token in match (tgrep indices are 1-based, make them // 0-based) tokenBeginIndex = Integer.parseInt(it.next()) - 1; // token index of last token in match (tgrep indices are 1-based, make them 0-based) tokenEndIndex = Integer.parseInt(it.next()) - 1; // set corpus position to -1; this is cqp specific and we don't use it atm EvaluationItem item = new EvaluationItem(corpus, documentId, type, beginOffset, endOffset, text); // text-based (i.e. sentence-based) offsets (+1 to skip the whitespace itself) int matchBegin = StringUtils.ordinalIndexOf(text, " ", tokenBeginIndex) + 1; int matchEnd = StringUtils.ordinalIndexOf(text, " ", tokenEndIndex + 1); item.setMatchOnItemText(matchBegin, matchEnd); item.setMatchOnOriginalTextViaTokenIndicesAndLookGoodWhileDoingSo(tokenBeginIndex, tokenEndIndex); items.add(item); } } return items; }
From source file:com.ibm.bi.dml.runtime.instructions.cp.VariableCPInstruction.java
@Override public void updateInstructionThreadID(String pattern, String replace) throws DMLRuntimeException { if (opcode == VariableOperationCode.CreateVariable || opcode == VariableOperationCode.SetFileName) { //replace in-memory instruction input2.set_name(input2.getName().replaceAll(pattern, replace)); // Find a start position of file name string. int iPos = StringUtils.ordinalIndexOf(instString, Lop.OPERAND_DELIMITOR, CREATEVAR_FILE_NAME_VAR_POS); // Find a end position of file name string. int iPos2 = StringUtils.indexOf(instString, Lop.OPERAND_DELIMITOR, iPos + 1); StringBuilder sb = new StringBuilder(); sb.append(instString.substring(0, iPos + 1)); // It takes first part before file name. // This will replace 'pattern' with 'replace' string from file name. sb.append(ProgramConverter.saveReplaceFilenameThreadID(instString.substring(iPos + 1, iPos2 + 1), pattern, replace));// w ww. ja va2s . c o m sb.append(instString.substring(iPos2 + 1)); // It takes last part after file name. instString = sb.toString(); } }
From source file:org.apache.hadoop.gateway.filter.PortMappingHelperHandler.java
@Override public void handle(final String target, final Request baseRequest, final HttpServletRequest request, final HttpServletResponse response) throws IOException, ServletException { // If Port Mapping feature enabled if (config.isGatewayPortMappingEnabled()) { int targetIndex; String context = ""; String baseURI = baseRequest.getUri().toString(); // extract the gateway specific part i.e. {/gatewayName/} String originalContextPath = ""; targetIndex = StringUtils.ordinalIndexOf(target, "/", 2); // Match found e.g. /{string}/ if (targetIndex > 0) { originalContextPath = target.substring(0, targetIndex + 1); } else if (targetIndex == -1) { targetIndex = StringUtils.ordinalIndexOf(target, "/", 1); // For cases "/" and "/hive" if (targetIndex == 0) { originalContextPath = target; }// w w w . ja va 2s . c o m } // Match "/{gatewayName}/{topologyName/foo" or "/". // There could be a case where content is served from the root // i.e. https://host:port/ if (!baseURI.startsWith(originalContextPath)) { final int index = StringUtils.ordinalIndexOf(baseURI, "/", 3); if (index > 0) { context = baseURI.substring(0, index); } } if (!StringUtils.isBlank(context)) { LOG.topologyPortMappingAddContext(target, context + target); } // Move on to the next handler in chain with updated path super.handle(context + target, baseRequest, request, response); } else { super.handle(target, baseRequest, request, response); } }