List of usage examples for java.lang Exception getStackTrace
public StackTraceElement[] getStackTrace()
From source file:com.microsoft.tfs.client.eclipse.filemodification.TFSFileModificationValidator.java
public IStatus validateEdit(final IFile[] files, final boolean attemptUi, final Object shell) { final ResourceDataManager resourceDataManager = TFSEclipseClientPlugin.getDefault() .getResourceDataManager();/*from ww w. j av a2 s. c o m*/ final TFSRepository repository = repositoryProvider.getRepository(); if (repositoryProvider.getRepositoryStatus() == ProjectRepositoryStatus.CONNECTING) { getStatusReporter(attemptUi, shell).reportError( Messages.getString("TFSFileModificationValidator.ErrorConnectionInProgress"), //$NON-NLS-1$ new Status(IStatus.ERROR, TFSEclipseClientPlugin.PLUGIN_ID, 0, Messages.getString("TFSFileModificationValidator.ErrorConnectionInProgressDescription"), //$NON-NLS-1$ null)); return Status.CANCEL_STATUS; } /* * Offline server workspace. Simply mark files as writable and continue. */ if (repository == null) { for (int i = 0; i < files.length; i++) { log.info( MessageFormat.format("Setting {0} writable, project is offline from TFS server", files[i])); //$NON-NLS-1$ files[i].setReadOnly(false); } return Status.OK_STATUS; } /* * Local workspace: ignore this entirely. This method is only called for * read-only files. A read only file in a local workspace was not placed * by us and we should not set them writable. */ if (WorkspaceLocation.LOCAL.equals(repository.getWorkspace().getLocation())) { for (int i = 0; i < files.length; i++) { log.info(MessageFormat.format("Ignoring read-only file {0} in local TFS workspace", files[i])); //$NON-NLS-1$ } return Status.OK_STATUS; } /* * HACK: avoid "phantom pending changes" that arise from the undo * manager. If we have no undoable context (ie, no Shell), then check to * see if we're being called from the undo manager and simply defer this * operation. * * This bug is hard to reproduce, so details are thus far limited. The * best guess is that the Eclipse undo manager watches edits using a * resource changed listener (for post-change and post-build * notifications.) It then realizes that the current contents of a file * are identical to a previous undo state, and therefore needs to * resynchronize the file history. It then, for some reason, calls * validateEdit on that file. This causes the file to be checked out * (here.) */ if (attemptUi == false && shell == null) { /* * Since this is a terrible hack, we may need to have users disable * this functionality with a sysprop. */ if ("false".equalsIgnoreCase(System.getProperty(IGNORE_UNDO_MANAGER_PROPERTY_NAME)) == false) //$NON-NLS-1$ { /* Build an exception to get our stack trace. */ final Exception e = new Exception(""); //$NON-NLS-1$ e.fillInStackTrace(); final StackTraceElement[] stackTrace = e.getStackTrace(); if (stackTrace != null) { for (int i = 0; i < stackTrace.length; i++) { if (stackTrace[i].getClassName() .equals("org.eclipse.ui.internal.ide.undo.WorkspaceUndoMonitor")) //$NON-NLS-1$ { log.info("Ignoring file modification request from WorkspaceUndoMonitor"); //$NON-NLS-1$ return Status.OK_STATUS; } } } } } /* Pend edits for these files. */ final List<String> pathList = new ArrayList<String>(); final Set<String> projectSet = new HashSet<String>(); for (int i = 0; i < files.length; i++) { if (IGNORED_RESOURCES_FILTER.filter(files[i]) == ResourceFilterResult.REJECT) { log.info(MessageFormat.format("Setting {0} writable, file matches automatic validation filter", //$NON-NLS-1$ files[i])); files[i].setReadOnly(false); continue; } /* Make sure that this file exists on the server. */ if (!resourceDataManager.hasResourceData(files[i]) && resourceDataManager.hasCompletedRefresh(files[i].getProject())) { continue; } final String path = Resources.getLocation(files[i], LocationUnavailablePolicy.IGNORE_RESOURCE); final String serverPath = repository.getWorkspace().getMappedServerPath(path); if (path == null) { continue; } final PendingChange pendingChange = repository.getPendingChangeCache() .getPendingChangeByLocalPath(path); /* Don't pend changes when there's already an add or edit pended. */ if (pendingChange != null && (pendingChange.getChangeType().contains(ChangeType.ADD) || pendingChange.getChangeType().contains(ChangeType.EDIT))) { log.debug(MessageFormat.format("File {0} has pending change {1}, ignoring", //$NON-NLS-1$ files[i], pendingChange.getChangeType().toUIString(true, pendingChange))); continue; } pathList.add(path); log.info(MessageFormat.format("File {0} is being modified, checking out", files[i])); //$NON-NLS-1$ if (serverPath != null) { projectSet.add(ServerPath.getTeamProject(serverPath)); } } if (pathList.size() == 0) { return Status.OK_STATUS; } LockLevel forcedLockLevel = null; boolean forcedGetLatest = false; /* * Query the server's default checkout lock and get latest on checkout * setting */ for (final Iterator<String> i = projectSet.iterator(); i.hasNext();) { final String teamProject = i.next(); final String exclusiveCheckoutAnnotation = repository.getAnnotationCache() .getAnnotationValue(VersionControlConstants.EXCLUSIVE_CHECKOUT_ANNOTATION, teamProject, 0); final String getLatestAnnotation = repository.getAnnotationCache() .getAnnotationValue(VersionControlConstants.GET_LATEST_ON_CHECKOUT_ANNOTATION, teamProject, 0); if ("true".equalsIgnoreCase(exclusiveCheckoutAnnotation)) //$NON-NLS-1$ { forcedLockLevel = LockLevel.CHECKOUT; break; } /* Server get latest on checkout forces us to work synchronously */ if ("true".equalsIgnoreCase(getLatestAnnotation)) //$NON-NLS-1$ { forcedGetLatest = true; } } /* Allow UI hooks to handle prompt before checkout. */ final TFSFileModificationOptions checkoutOptions = getOptions(attemptUi, shell, pathList.toArray(new String[pathList.size()]), forcedLockLevel); if (!checkoutOptions.getStatus().isOK()) { return checkoutOptions.getStatus(); } final String[] paths = checkoutOptions.getFiles(); final LockLevel lockLevel = checkoutOptions.getLockLevel(); final boolean getLatest = checkoutOptions.isGetLatest(); final boolean synchronousCheckout = checkoutOptions.isSynchronous(); final boolean foregroundCheckout = checkoutOptions.isForeground(); if (paths.length == 0) { return Status.OK_STATUS; } final ItemSpec[] itemSpecs = new ItemSpec[paths.length]; for (int i = 0; i < paths.length; i++) { itemSpecs[i] = new ItemSpec(paths[i], RecursionType.NONE); } /* * Query get latest on checkout preference (and ensure server supports * the feature) */ GetOptions getOptions = GetOptions.NO_DISK_UPDATE; PendChangesOptions pendChangesOptions = PendChangesOptions.NONE; if (repository.getWorkspace().getClient().getServerSupportedFeatures() .contains(SupportedFeatures.GET_LATEST_ON_CHECKOUT) && getLatest) { /* * If we're doing get latest on checkout, we need add the overwrite * flag: we need to set the file writable before this method exits * (in order for Eclipse to pick up the change, but we need to do * the get in another thread (so that we can clear the resource lock * on this file.) Thus we need to set the file writable, then fire a * synchronous worker to overwrite it. This is safe as this method * will ONLY be called when the file is readonly. */ pendChangesOptions = PendChangesOptions.GET_LATEST_ON_CHECKOUT; getOptions = GetOptions.NONE; } /* * Build the checkout command - no need to query conflicts here, the * only conflicts that can arise from a pend edit are writable file * conflicts (when get latest on checkout is true.) This method is never * called for writable files. */ final EditCommand editCommand = new EditCommand(repository, itemSpecs, lockLevel, null, getOptions, pendChangesOptions, false); /* * Pend changes in the foreground if get latest on checkout is * requested. A disk update may be required, so we want to block user * input. */ if (synchronousCheckout || pendChangesOptions.contains(PendChangesOptions.GET_LATEST_ON_CHECKOUT) || forcedGetLatest) { /* * Wrap this edit command in one that disables the plugin's * automatic resource refresh behavior. This is required to avoid * deadlocks: the calling thread has taken a resource lock on the * resource it wishes to check out - the plugin will also require a * resource lock to do the refresh in another thread. */ final ICommand wrappedEditCommand = new IgnoreResourceRefreshesEditCommand(editCommand); final IStatus editStatus = getSynchronousCommandExecutor(attemptUi, shell).execute(wrappedEditCommand); /* Refresh files on this thread, since it has the resource lock. */ for (int i = 0; i < files.length; i++) { try { files[i].refreshLocal(IResource.DEPTH_ZERO, new NullProgressMonitor()); } catch (final Throwable e) { log.warn(MessageFormat.format("Could not refresh {0}", files[i].getName()), e); //$NON-NLS-1$ } } return editStatus; } /* Pend changes in the background */ else { synchronized (backgroundFiles) { for (int i = 0; i < files.length; i++) { files[i].setReadOnly(false); backgroundFiles.put(files[i], new TFSFileModificationStatusData(files[i])); } } final JobCommandAdapter editJob = new JobCommandAdapter(editCommand); editJob.setPriority(Job.INTERACTIVE); editJob.setUser(foregroundCheckout); editJob.schedule(); final Thread editThread = new Thread(new Runnable() { @Override public void run() { IStatus editStatus; try { /* * We don't need to safe-wait with * ExtensionPointAsyncObjectWaiter because we're * guaranteed not on the UI thread. */ editJob.join(); editStatus = editJob.getResult(); } catch (final Exception e) { editStatus = new Status(IStatus.ERROR, TFSEclipseClientPlugin.PLUGIN_ID, 0, null, e); } if (editStatus.isOK()) { synchronized (backgroundFiles) { for (int i = 0; i < files.length; i++) { final TFSFileModificationStatusData statusData = backgroundFiles.remove(files[i]); if (statusData != null) { log.info(MessageFormat.format("File {0} checked out in {1} seconds", //$NON-NLS-1$ files[i], (int) ((System.currentTimeMillis() - statusData.getStartTime()) / 1000))); } } } } else { final List<TFSFileModificationStatusData> statusDataList = new ArrayList<TFSFileModificationStatusData>(); synchronized (backgroundFiles) { for (int i = 0; i < files.length; i++) { final TFSFileModificationStatusData statusData = backgroundFiles.remove(files[i]); if (statusData != null) { log.info(MessageFormat.format("File {0} failed to check out in {1} seconds", //$NON-NLS-1$ files[i], (int) ((System.currentTimeMillis() - statusData.getStartTime()) / 1000))); statusDataList.add(statusData); } } } /* * Unfortunately, we have to roll back ALL FILES when an * edit fails. We could (in theory) be better about this * and use the non fatal listener in EditCommand to give * us the paths that failed, but at the moment, the use * case is only for one file at a time, so this is okay. */ final TFSFileModificationStatusData[] statusData = statusDataList .toArray(new TFSFileModificationStatusData[statusDataList.size()]); getStatusReporter(attemptUi, shell).reportStatus(repository, statusData, editStatus); } } }); editThread.start(); return Status.OK_STATUS; } }
From source file:com.esri.geoevent.solutions.adapter.cot.CoTAdapterInbound.java
private String filterOutDots(String s) throws Exception { try {//from ww w . j a v a 2 s .co m String sStageOne = s.replace("h.", "").replace("t.", "").replace("r.", "").replace("q.", "") .replace("o.", ""); String[] s2 = sStageOne.trim().split(" "); ArrayList<String> l1 = new ArrayList<String>(); for (String item : s2) { l1.add(item); } ArrayList<String> l2 = new ArrayList<String>(); Iterator<String> iterator = l1.iterator(); while (iterator.hasNext()) { String o = (String) iterator.next(); if (!l2.contains(o)) l2.add(o); } StringBuffer sb = new StringBuffer(); for (String item : l2) { sb.append(item); sb.append(" "); } return sb.toString().trim().toLowerCase(); } catch (Exception e) { log.error(e); log.error(e.getStackTrace()); throw (e); } }
From source file:org.matsim.counts.algorithms.CountSimComparisonKMLWriter.java
/** * Writes the data to the file at the path given as String * * @param filename/* w ww.j av a2s. c om*/ */ @Override public void writeFile(final String filename) { log.info("Writing google earth file to " + filename); // init kml this.mainKml = kmlObjectFactory.createKmlType(); this.mainDoc = kmlObjectFactory.createDocumentType(); this.mainKml.setAbstractFeatureGroup(kmlObjectFactory.createDocument(mainDoc)); // create the styles and the folders createStyles(); // create a folder this.mainFolder = kmlObjectFactory.createFolderType(); this.mainFolder.setName("Comparison, Iteration " + this.iterationNumber); this.mainDoc.getAbstractFeatureGroup().add(kmlObjectFactory.createFolder(this.mainFolder)); // the writer this.writer = new KMZWriter(filename); try { //try to create the legend this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createScreenOverlay(createLegend())); } catch (IOException e) { log.error("Cannot add legend to the KMZ file.", e); } try { //add the matsim logo to the kml this.mainFolder.getAbstractFeatureGroup() .add(kmlObjectFactory.createScreenOverlay(MatsimKMLLogo.writeMatsimKMLLogo(writer))); } catch (IOException e) { log.error("Cannot add logo to the KMZ file.", e); } try { // copy required icons to the kmz this.writer.addNonKMLFile(MatsimResource.getAsInputStream("icons/plus.png"), CROSSICON); this.writer.addNonKMLFile(MatsimResource.getAsInputStream("icons/minus.png"), MINUSICON); } catch (IOException e) { log.error("Could not copy copy plus-/minus-icons to the KMZ.", e); } // prepare folders for simRealPerHour-Graphs (top-left, xy-plots) FolderType simRealFolder = kmlObjectFactory.createFolderType(); simRealFolder.setName("XY Comparison Plots"); this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createFolder(simRealFolder)); // error graphs and awtv graph { ScreenOverlayType errorGraph = createBiasErrorGraph(filename); errorGraph.setVisibility(Boolean.TRUE); this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createScreenOverlay(errorGraph)); } { ScreenOverlayType errorGraph = createBoxPlotErrorGraph(); if (errorGraph != null) { errorGraph.setVisibility(Boolean.FALSE); this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createScreenOverlay(errorGraph)); } } { ScreenOverlayType awtv = null; try { awtv = this.createAWTVGraph(); } catch (Exception ee) { log.warn( "generating awtv (average weekday traffic volumes) graph failed; printing stacktrace but continuing anyways ..."); for (int ii = 0; ii < ee.getStackTrace().length; ii++) { log.info(ee.getStackTrace()[ii].toString()); } } if (awtv != null) { awtv.setVisibility(Boolean.FALSE); this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createScreenOverlay(awtv)); } } // link graphs this.createCountsLoadCurveGraphs(); // hourly data... for (int h = 1; h < 25; h++) { // the timespan for this hour TimeSpanType timespan = kmlObjectFactory.createTimeSpanType(); timespan.setBegin("1999-01-01T" + Time.writeTime(((h - 1) * 3600))); timespan.setEnd("1999-01-01T" + Time.writeTime((h * 3600))); // first add the xyplot ("SimRealPerHourGraph") as overlay this.addCountsSimRealPerHourGraphs(simRealFolder, h, timespan); // add the placemarks for the links in this hour FolderType subfolder = kmlObjectFactory.createFolderType(); subfolder.setName(createFolderName(h)); subfolder.setAbstractTimePrimitiveGroup(kmlObjectFactory.createTimeSpan(timespan)); this.mainFolder.getAbstractFeatureGroup().add(kmlObjectFactory.createFolder(subfolder)); writeLinkData(this.countComparisonFilter.getCountsForHour(Integer.valueOf(h)), subfolder); } finish(); }
From source file:org.cagrid.identifiers.namingauthority.NamingAuthorityTestCase.java
@Test public void testRegisterGSID() { // System.out.println("testing the RegisterGSID"); LOG.info("testing testRegisterGSID method"); // check no register access. SecurityInfo secInfo = new SecurityInfoImpl("Unknown"); try {/*from w ww.j a v a 2 s. co m*/ this.NamingAuthority.registerGSID(secInfo, null, null); fail("test registerGSID"); } catch (Exception e) { LOG.info("passed registerGSID for " + secInfo.getUser() + ". MSG:\" " + e.getMessage() + "\""); } secInfo = new SecurityInfoImpl("User13"); try { this.NamingAuthority.registerSite(secInfo, "a", "a", "1.0", "srikalyan", "srikalyan@semanticbits.com", "443", "SB"); LOG.info("passed registerSite inorder to test registerGSID for user " + secInfo.getUser()); } catch (Exception e) { StringBuffer sb = new StringBuffer(); for (StackTraceElement temp : e.getStackTrace()) sb.append(temp.getClassName() + "[" + temp.getLineNumber() + "]\n"); fail("test registeSite for user " + secInfo.getUser() + ". MSG:\"" + sb.toString() + "\""); } String identifier = null; try { identifier = this.NamingAuthority.registerGSID(secInfo, null, null); LOG.info("passed registerGSID without parent and Identifier for " + secInfo.getUser() + "."); } catch (Exception e) { fail("test registerGSID without parent and Identifier for " + secInfo.getUser() + ". MSG: \"" + e.getMessage() + "\""); } try { this.NamingAuthority.registerGSID(secInfo, null, new String[] { identifier, " ", "" }); LOG.info("passed test registerGSID with parentIdentifiers not null for " + secInfo.getUser() + "."); } catch (Exception e) { fail("test registerGSID with parentIdentifiers not null for " + secInfo.getUser() + ". MSG: \"" + e.getMessage() + "\""); } identifier = "5784d1dd-9373-533e-8086-fd479fbd564e"; try { identifier = this.NamingAuthority.registerGSID(secInfo, identifier, null); LOG.info("passed test registerGSID with suggestedIdentifier not null for " + secInfo.getUser() + ". id: " + identifier); } catch (Exception e) { fail("test registerGSID with suggestedIdentifier not null for " + secInfo.getUser() + ". MSG: \"" + e.getMessage() + "\""); } try { identifier = this.NamingAuthority.registerGSID(secInfo, identifier, new String[] { identifier, " ", "" }); fail("test registerGSID with suggestedIdentifier and parent identifiers as not null and with suggestedIdentifier as one of the member in the parent identifier " + secInfo.getUser()); } catch (Exception e) { LOG.info( "test registerGSID with suggestedIdentifier and parent identifiers as not null and with suggestedIdentifier as one of the member in the parent identifier " + secInfo.getUser()); } }
From source file:com.rvantwisk.cnctools.controllers.CNCToolsController.java
/** * Handle exception and show a strack trace, at least to inform the user that something was wrong * This is also a last resort, if you can handle the exception in the dialog, please do so and instruct the user! * * @param exception/*from w w w .j a va2 s . co m*/ */ public void handleException(Exception exception) { logger.error("generateGCode: General Exception", exception); final FXMLDialog dialog = screens.errorDialog(); ErrorController controller = dialog.getController(); StringBuilder sb = new StringBuilder(); sb.append(exception.toString()).append("\n"); for (StackTraceElement trace : exception.getStackTrace()) { if (trace.getClassName().startsWith("com.rvantwisk")) { sb.append(trace.getClassName()).append(":").append(trace.getMethodName()).append(":") .append(trace.getLineNumber()).append("\n"); } } controller.setMessage(sb.toString()); dialog.showAndWait(); }
From source file:lu.fisch.canze.activities.MainActivity.java
public void loadSettings() { debug("MainActivity: loadSettings"); try {//w w w. j av a 2 s .co m SharedPreferences settings = getSharedPreferences(PREFERENCES_FILE, 0); bluetoothDeviceName = settings.getString("deviceName", null); bluetoothDeviceAddress = settings.getString("deviceAddress", null); gatewayUrl = settings.getString("gatewayUrl", null); // String dataFormat = settings.getString("dataFormat", "crdt"); String deviceType = settings.getString("device", "Arduino"); safeDrivingMode = settings.getBoolean("optSafe", true); bluetoothBackgroundMode = settings.getBoolean("optBTBackground", false); milesMode = settings.getBoolean("optMiles", false); dataExportMode = settings.getBoolean("optDataExport", false); debugLogMode = settings.getBoolean("optDebugLog", false); fieldLogMode = settings.getBoolean("optFieldLog", false); toastLevel = settings.getInt("optToast", 1); if (bluetoothDeviceName != null && !bluetoothDeviceName.isEmpty() && bluetoothDeviceName.length() > 4) BluetoothManager.getInstance() .setDummyMode(bluetoothDeviceName.substring(0, 4).compareTo("HTTP") == 0); String carStr = settings.getString("car", "None"); switch (carStr) { case "None": car = CAR_NONE; break; case "Zo": case "ZOE": case "ZOE Q210": car = CAR_ZOE_Q210; break; case "ZOE R240": car = CAR_ZOE_R240; break; case "ZOE Q90": car = CAR_ZOE_Q90; break; case "ZOE R90": car = CAR_ZOE_R90; break; case "Fluence": car = CAR_FLUENCE; break; case "Kangoo": car = CAR_KANGOO; break; case "X10": car = CAR_X10; break; } // as the settings may have changed, we need to reload different things // create a new device switch (deviceType) { case "Bob Due": device = new BobDue(); break; case "ELM327": device = new ELM327(); break; case "ELM327Http": device = new ELM327OverHttp(); break; default: device = null; break; } // since the car type may have changed, reload the frame timings and fields Frames.getInstance().load(); fields.load(); if (device != null) { // initialise the connection device.initConnection(); // register application wide fields // registerApplicationFields(); // now done in Fields.load } // after loading PREFERENCES we may have new values for "dataExportMode" dataExportMode = dataLogger.activate(dataExportMode); } catch (Exception e) { MainActivity.debug(e.getMessage()); StackTraceElement[] st = e.getStackTrace(); for (int i = 0; i < st.length; i++) MainActivity.debug(st[i].toString()); } }
From source file:org.metis.push.PusherBean.java
@Override /**//ww w.jav a2 s. c om * This method handles an incoming message from the web socket client. */ public void handleTextMessage(WebSocketSession session, TextMessage message) throws Exception { if (session == null) { LOG.error(getBeanName() + ": null session"); throw new Exception(getBeanName() + ":handleTextMessage, null session was received"); } // the session should be in the registry WdsSocketSession wdsSession = getWdsSessions().get(session.getId()); if (wdsSession == null) { LOG.error(getBeanName() + ":handleTextMessage, session with this id is not in registry: " + session.getId()); session.close(new CloseStatus(SERVER_ERROR.getCode(), "ERROR, session with this id not in registry: " + session.getId())); return; } // some sort of message should have been received if (message == null) { LOG.error(getBeanName() + ":handleTextMessage, null message parameter"); session.close(new CloseStatus(POLICY_VIOLATION.getCode(), "ERROR, session with this id gave a null message " + "parameter: " + session.getId())); return; } // we're supposed to receive a JSON object String jsonMsg = message.getPayload(); if (jsonMsg == null) { LOG.error(getBeanName() + ":handleTextMessage, getPayload returns null or empty string"); session.close(new CloseStatus(POLICY_VIOLATION.getCode(), "ERROR, session with this id did not return a payload: " + session.getId())); return; } if (jsonMsg.isEmpty()) { LOG.error(getBeanName() + ":handleTextMessage, getPayload returns zero-length string"); session.close(new CloseStatus(POLICY_VIOLATION.getCode(), "ERROR, session with this id returns zero-length payload: " + session.getId())); return; } // dump the request if trace is on if (LOG.isTraceEnabled()) { LOG.trace(getBeanName() + ":***** processing new request *****"); LOG.trace(getBeanName() + ":session id = " + session.getId()); LOG.trace(getBeanName() + ":session remote address = " + session.getRemoteAddress().toString()); LOG.trace(getBeanName() + ":session uri = " + session.getUri().toString()); LOG.trace(getBeanName() + ":session json object = " + jsonMsg); } // parse the json object List<Map<String, String>> jParams = null; try { jParams = Utils.parseJson(jsonMsg); } catch (Exception exc) { LOG.error(getBeanName() + ":caught this " + "exception while parsing json object: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } session.close(new CloseStatus(SERVER_ERROR.getCode(), "ERROR, got this json parsing exception: " + exc.getMessage())); return; } if (jParams == null || jParams.isEmpty()) { LOG.error(getBeanName() + ":json parser returns null or " + "empty json array"); session.close( new CloseStatus(SERVER_ERROR.getCode(), "ERROR, json parser returns null or empty json array")); return; } // if trace is on, dump the params (if any) to the log if (LOG.isDebugEnabled()) { LOG.debug( getBeanName() + ": handleRequestInternal, received these params: " + jParams.get(0).toString()); } // get the command portion of the json message Map<String, String> map = jParams.get(0); String command = map.remove(WS_COMMAND); if (command == null) { LOG.error(getBeanName() + ":command field not present"); session.close(POLICY_VIOLATION); session.close(new CloseStatus(POLICY_VIOLATION.getCode(), "ERROR, command string not present or improperly set: " + command)); return; } if (!command.equals(WS_SUBSCRIBE) && !command.equals(WS_PING)) { LOG.error(getBeanName() + ":received this unknown command = " + command); session.close(POLICY_VIOLATION); session.close(new CloseStatus(POLICY_VIOLATION.getCode(), "ERROR, received this unknown command = " + command)); return; } // Get the SQL Job, if any, that this session is currently subscribed to SqlJob job = wdsSession.getMyJob(); // if this is a ping command, return session's current subscription if (command.equals(WS_PING)) { LOG.debug(getBeanName() + ":received ping command"); List<Map<String, Object>> response = new ArrayList<Map<String, Object>>(); Map<String, Object> map0 = new HashMap<String, Object>(); if (job != null) { LOG.debug(getBeanName() + ": client is subscribed"); map0.put(WS_STATUS, WS_SUBSCRIBED); map = job.getParams(); if (map != null && !map.isEmpty()) { for (String key : map.keySet()) { map0.put(key, map.get(key)); } } } else { LOG.debug(getBeanName() + ": client is not subscribed"); map0.put(WS_STATUS, WS_OK); } response.add(map0); // send response back to client session.sendMessage(new TextMessage(Utils.generateJson(response))); return; } // find a sql statement that matches the incoming session request's // params SqlStmnt sqlStmnt = (map == null || map.isEmpty()) ? SqlStmnt.getMatch(getSqlStmnts4Get(), null) : SqlStmnt.getMatch(getSqlStmnts4Get(), map.keySet()); // if getMatch could not find a match, then return error to client if (sqlStmnt == null) { LOG.error(getBeanName() + ":ERROR, unable to find sql " + "statement with this map: " + map.toString()); List<Map<String, Object>> response = new ArrayList<Map<String, Object>>(); Map<String, Object> map0 = new HashMap<String, Object>(); if (map != null && !map.isEmpty()) { for (String key : map.keySet()) { map0.put(key, map.get(key)); } } map0.put(WS_STATUS, WS_NOT_FOUND); // send response back to client session.sendMessage(new TextMessage(Utils.generateJson(response))); return; } // other than a ping, the only other command from the client is a // subscription command // Does this session already exist in one of the sql jobs? Note that the // client can switch subscriptions. if (job != null) { // the session pertains to a job, but does that job's map match // that of this session's subscription request if (job.isParamMatch(map)) { // if so, we're done return; } else { // else remove this session from that job - the client is // switching subscriptions job.removeSession(wdsSession.getId()); } } mainLock.lock(); try { // if we've gotten this far, the session does not pertain to a job // or it is a subscription change. so we now need to find an // existing job whose params match that of the incoming session. if // no job was found, then create and start one if (sqlStmnt.findSqlJob(map, wdsSession) == null) { sqlStmnt.createSqlJob(map, wdsSession); } } finally { mainLock.unlock(); } }
From source file:com.esri.geoevent.solutions.adapter.cot.CoTAdapterInbound.java
@SuppressWarnings("incomplete-switch") private void traverseBranch(Node node, FieldGroup fieldGroup, FieldDefinition fieldDefinition) throws FieldException { try {/* www . ja va 2 s . c o m*/ if (node == null) return; // System.out.println("Examining node named \""+node.getNodeName()+"\""); FieldType fieldType = fieldDefinition.getType(); switch (fieldType) { case Group: FieldGroup childFieldGroup = fieldGroup.createFieldGroup(fieldDefinition.getName()); fieldGroup.setField(fieldDefinition.getName(), childFieldGroup); for (FieldDefinition childFieldDefinition : fieldDefinition.getChildren()) { String childName = childFieldDefinition.getName(); List<Node> childNodes = findChildNodes(node, childName); if (childNodes.size() > 0) { for (Node childNode : childNodes) traverseBranch(childNode, childFieldGroup, childFieldDefinition); } else traverseBranch(node, childFieldGroup, childFieldDefinition); } break; case String: if (fieldDefinition.getName().equals("#text")) { String value = node.getNodeValue(); if (value != null) fieldGroup.setField(fieldDefinition.getName(), value); } else { String value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), value); } break; case Integer: String value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), new Integer(value)); break; case Double: value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), new Double(value)); break; case Boolean: value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), new Boolean(value)); break; case Date: value = getAttribute(node, fieldDefinition.getName()); if (value != null) { Date date = new Date(); try { date = CoTUtilities.parseCoTDate(value); } catch (Exception ex) { } fieldGroup.setField(fieldDefinition.getName(), date); } break; case Geometry: MapGeometry geometry = createGeometry(node); if (geometry != null) fieldGroup.setField(fieldDefinition.getName(), geometry); break; case Long: value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), new Long(value)); break; case Short: value = getAttribute(node, fieldDefinition.getName()); if (value != null) fieldGroup.setField(fieldDefinition.getName(), new Integer(value)); break; } } catch (Exception e) { log.error(e); log.error(e.getStackTrace()); } }
From source file:org.onosproject.sse.SseTopologyViewWebSocket.java
@Override public void onMessage(String data) { lastActive = System.currentTimeMillis(); try {/* w w w.j a v a 2 s . c o m*/ processMessage((ObjectNode) mapper.reader().readTree(data)); //log.info("HLKSSE event is {}", (ObjectNode) mapper.reader().readTree(data)); } catch (Exception e) { //e.printStackTrace(); log.warn("Unable to parse GUI request {} due to {}", data, e.getStackTrace()); log.debug("Boom!!!", e); } }
From source file:com.wso2telco.dep.reportingservice.BillingHostObject.java
/** * Js function_get dashboard time consumers by api. * * @param cx the cx//from w w w . ja v a 2s.co m * @param thisObj the this obj * @param args the args * @param funObj the fun obj * @return the native object * @throws APIManagementException the API management exception */ public static NativeObject jsFunction_getDashboardTimeConsumersByAPI(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws APIManagementException { String timeRange = args[0].toString(); String operator = args[1].toString(); String subscriber = args[2].toString(); if (operator.equalsIgnoreCase("All")) { operator = HostObjectConstants.ALL_OPERATORS; } else { operator = operator.toUpperCase(); } if (subscriber.equalsIgnoreCase("All")) { subscriber = HostObjectConstants.ALL_SUBSCRIBERS; } Calendar now = Calendar.getInstance(); String toDate = getCurrentTime(now); String fromDate = subtractTimeRange(now, timeRange); NativeObject apiConsumpData = new NativeObject(); NativeArray slowestApis = new NativeArray(0); NativeArray chartData = new NativeArray(0); try { Map<String, String[]> responseMap = SbHostObjectUtils.getTimeConsumptionForAllAPIs(operator, subscriber, fromDate, toDate); short i = 0; for (Map.Entry<String, String[]> timeEntry : responseMap.entrySet()) { NativeObject slowestApiInfo = new NativeObject(); NativeObject chartDataForApi = new NativeObject(); String[] data = timeEntry.getValue(); slowestApiInfo.put("apiName", slowestApiInfo, timeEntry.getKey()); slowestApiInfo.put("highestAvgConsumption", slowestApiInfo, data[1]); chartDataForApi.put("apiName", chartDataForApi, timeEntry.getKey()); chartDataForApi.put("totalAvgConsumption", chartDataForApi, data[2]); slowestApis.put(i, slowestApis, slowestApiInfo); chartData.put(i, chartData, chartDataForApi); i++; } apiConsumpData.put("slowestApis", apiConsumpData, slowestApis); apiConsumpData.put("chartData", apiConsumpData, chartData); } catch (Exception e) { log.error("Error occured getAllResponseTimes "); log.error(e.getStackTrace()); handleException("Error occurred while populating Response Time graph.", e); } return apiConsumpData; }