List of usage examples for java.lang UnsupportedOperationException getMessage
public String getMessage()
From source file:org.sakaiproject.entitybroker.rest.EntityEncodingManager.java
/** * Handled the internal encoding of data into an entity object * //from w w w .java2 s.co m * @param ref the entity reference * @param format the format which the input is encoded in * @param input the data being input * @return the entity object based on the data * @throws FormatUnsupportedException if you do not handle this format type (passes control to the internal handlers) * @throws EntityEncodingException if you cannot encode the received data into an entity * @throws IllegalArgumentException if any of the arguments are invalid * @throws IllegalStateException for all other failures */ @SuppressWarnings("unchecked") public Object internalInputTranslator(EntityReference ref, String format, InputStream input, HttpServletRequest req) { Object entity = null; Inputable inputable = entityProviderManager.getProviderByPrefixAndCapability(ref.getPrefix(), Inputable.class); if (inputable != null) { // get a the current entity object or a sample Object current = entityBrokerManager.getSampleEntityObject(ref.getPrefix(), ref.getId()); if (current != null) { if (Formats.HTML.equals(format) || format == null || "".equals(format)) { // html req handled specially if (req != null) { Map<String, String[]> params = req.getParameterMap(); if (params != null && params.size() > 0) { entity = current; try { ReflectUtils.getInstance().populateFromParams(entity, params); } catch (RuntimeException e) { throw new EntityEncodingException("Unable to populate bean for ref (" + ref + ") from request: " + e.getMessage(), ref + "", e); } } else { // no request params, bad request throw new EntityException( "No request params for html input request (there must be at least one) for reference: " + ref, ref.toString(), HttpServletResponse.SC_BAD_REQUEST); } } } else { // all other formats if (input == null) { // no request params, bad request throw new EntityException( "No input for input translation (input cannot be null) for reference: " + ref, ref.toString(), HttpServletResponse.SC_BAD_REQUEST); } else { String data = StringUtils.makeStringFromInputStream(input); Map<String, Object> decoded = null; try { decoded = decodeData(data, format); } catch (IllegalArgumentException iae) { throw new EntityEncodingException("No encoder available for the given format (" + format + "), ref=" + ref + ":" + iae.getMessage(), ref.toString(), iae); } catch (UnsupportedOperationException uoe) { throw new EntityEncodingException("Failure during internal input encoding of entity: " + ref + " to format (" + format + "):" + uoe.getMessage(), ref.toString(), uoe); } entity = current; // handle the special case where the JSON was created by xstream or something else that puts the data inside an object with a "root" if (decoded.size() == 1 && decoded.containsKey(ref.getPrefix())) { Object o = decoded.get(ref.getPrefix()); if (o instanceof Map) { decoded = (Map<String, Object>) o; } } try { ReflectUtils.getInstance().populate(entity, decoded); } catch (RuntimeException e) { throw new EntityEncodingException("Unable to populate bean for ref (" + ref + ") from data: " + decoded + ":" + e.getMessage(), ref + "", e); } } } } } else { throw new IllegalArgumentException("This entity (" + ref + ") does not allow input translation"); } if (entity == null) { throw new EntityException("Unable to encode entity from input for reference: " + ref, ref.toString(), HttpServletResponse.SC_BAD_REQUEST); } return entity; }
From source file:com.cloudera.beeswax.BeeswaxServiceImpl.java
/** * Submit a query and return a handle (QueryHandle). The query runs asynchronously. * Queries can be long-lasting, so we push the execution into a new state. * Compiling happens in the current context so we report errors early. *//*from www . j av a 2 s .c o m*/ @Override public QueryHandle query(final Query query) throws BeeswaxException { // First, create an id and reset the LogContext String uuid = UUID.randomUUID().toString(); final QueryHandle handle = new QueryHandle(uuid, uuid); final LogContext lc = LogContext.registerCurrentThread(handle.log_context); lc.resetLog(); // Make an administrative record final RunningQueryState state = new RunningQueryState(query, lc); // acquire delegation token if needed try { state.setDelegationToken(getDelegationTokenFromMetaStore(query.hadoop_user)); } catch (UnsupportedOperationException e) { // If delegationToken is not support in this environment, then ignore it } catch (HiveException e) { throw new BeeswaxException(e.getMessage(), handle.log_context, handle); } catch (MetaException e) { throw new BeeswaxException(e.getMessage(), handle.log_context, handle); } catch (TException e) { throw new BeeswaxException(e.getMessage(), handle.log_context, handle); } try { return doWithState(state, new PrivilegedExceptionAction<QueryHandle>() { public QueryHandle run() throws Exception { state.setQueryHandle(handle); runningQueries.put(handle.id, state); state.initialize(); // All kinds of things can go wrong when we compile it. So catch all. try { state.compile(); } catch (BeeswaxException perr) { state.saveException(perr); throw perr; } catch (Throwable t) { state.saveException(t); throw new BeeswaxException(t.toString(), handle.log_context, handle); } // Now spin off the query. state.submitTo(executor, lc); return handle; } }); } catch (BeeswaxException e) { throw e; } }
From source file:info.magnolia.cms.core.Content.java
/** * UUID of the node refrenced by this object * @return uuid/*from w ww. java2s .co m*/ */ public String getUUID() { try { return this.node.getUUID(); } catch (UnsupportedOperationException e) { log.error(e.getMessage()); } catch (RepositoryException re) { log.error("Exception caught", re); } return StringUtils.EMPTY; }
From source file:org.apache.hadoop.hdfs.server.datanode.DWRRDataXceiver.java
@Override public void releaseShortCircuitFds(SlotId slotId) throws IOException { boolean success = false; try {// ww w .ja va 2s. c o m String error; Status status; try { datanode.shortCircuitRegistry.unregisterSlot(slotId); error = null; status = Status.SUCCESS; } catch (UnsupportedOperationException e) { error = "unsupported operation"; status = Status.ERROR_UNSUPPORTED; } catch (Throwable e) { error = e.getMessage(); status = Status.ERROR_INVALID; } ReleaseShortCircuitAccessResponseProto.Builder bld = ReleaseShortCircuitAccessResponseProto .newBuilder(); bld.setStatus(status); if (error != null) { bld.setError(error); } bld.build().writeDelimitedTo(socketOut); success = true; } finally { if (ClientTraceLog.isInfoEnabled()) { BlockSender.ClientTraceLog.info(String.format( "src: 127.0.0.1, dest: 127.0.0.1, op: RELEASE_SHORT_CIRCUIT_FDS," + " shmId: %016x%016x, slotIdx: %d, srvID: %s, success: %b", slotId.getShmId().getHi(), slotId.getShmId().getLo(), slotId.getSlotIdx(), datanode.getDatanodeUuid(), success)); } } }
From source file:org.jenkinsci.plugins.structs.describable.DescribableModelTest.java
@Issue("JENKINS-46122") @Test/*from ww w . ja va 2 s. com*/ public void resolveSymbolOnWrongBaseClass() throws Exception { try { DescribableModel.resolveClass(Tech.class, null, "rod"); fail("No symbol for Tech should exist."); } catch (UnsupportedOperationException e) { assertEquals("no known implementation of " + Tech.class + " is using symbol rod", e.getMessage()); } }
From source file:org.structr.rest.servlet.JsonRestServlet.java
@Override protected void doPost(final HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { final List<RestMethodResult> results = new LinkedList<>(); final SecurityContext securityContext; final Authenticator authenticator; final Resource resource; try {//from w w w. j a v a2 s. com // first thing to do! request.setCharacterEncoding("UTF-8"); response.setCharacterEncoding("UTF-8"); response.setContentType("application/json; charset=utf-8;"); // isolate request authentication in a transaction try (final Tx tx = StructrApp.getInstance().tx()) { authenticator = config.getAuthenticator(); securityContext = authenticator.initializeAndExamineRequest(request, response); tx.success(); } final App app = StructrApp.getInstance(securityContext); final String input = IOUtils.toString(request.getReader()); final IJsonInput jsonInput = cleanAndParseJsonString(app, input); if (securityContext != null) { // isolate resource authentication try (final Tx tx = app.tx()) { resource = ResourceHelper.applyViewTransformation(request, securityContext, ResourceHelper.optimizeNestedResourceChain( ResourceHelper.parsePath(securityContext, request, resourceMap, propertyView)), propertyView); authenticator.checkResourceAccess(securityContext, request, resource.getResourceSignature(), propertyView.get(securityContext)); tx.success(); } // isolate doPost boolean retry = true; while (retry) { if (resource.createPostTransaction()) { try (final Tx tx = app.tx()) { for (JsonInput propertySet : jsonInput.getJsonInputs()) { results.add(resource.doPost(convertPropertySetToMap(propertySet))); } tx.success(); retry = false; } catch (DeadlockDetectedException ddex) { retry = true; } } else { try { for (JsonInput propertySet : jsonInput.getJsonInputs()) { results.add(resource.doPost(convertPropertySetToMap(propertySet))); } retry = false; } catch (DeadlockDetectedException ddex) { retry = true; } } } // set default value for property view propertyView.set(securityContext, config.getDefaultPropertyView()); // isolate write output try (final Tx tx = app.tx()) { if (!results.isEmpty()) { final RestMethodResult result = results.get(0); final int resultCount = results.size(); if (result != null) { if (resultCount > 1) { for (final RestMethodResult r : results) { final GraphObject objectCreated = r.getContent().get(0); if (!result.getContent().contains(objectCreated)) { result.addContent(objectCreated); } } // remove Location header if more than one object was // written because it may only contain a single URL result.addHeader("Location", null); } result.commitResponse(gson.get(), response); } } tx.success(); } } else { // isolate write output try (final Tx tx = app.tx()) { new RestMethodResult(HttpServletResponse.SC_FORBIDDEN).commitResponse(gson.get(), response); tx.success(); } } } catch (FrameworkException frameworkException) { // set status & write JSON output response.setStatus(frameworkException.getStatus()); gson.get().toJson(frameworkException, response.getWriter()); response.getWriter().println(); } catch (JsonSyntaxException jsex) { logger.log(Level.WARNING, "POST: Invalid JSON syntax", jsex.getMessage()); int code = HttpServletResponse.SC_BAD_REQUEST; response.setStatus(code); response.getWriter() .append(RestMethodResult.jsonError(code, "JsonSyntaxException in POST: " + jsex.getMessage())); } catch (JsonParseException jpex) { logger.log(Level.WARNING, "Unable to parse JSON string", jpex.getMessage()); int code = HttpServletResponse.SC_BAD_REQUEST; response.setStatus(code); response.getWriter() .append(RestMethodResult.jsonError(code, "JsonParseException in POST: " + jpex.getMessage())); } catch (UnsupportedOperationException uoe) { logger.log(Level.WARNING, "POST not supported"); int code = HttpServletResponse.SC_BAD_REQUEST; response.setStatus(code); response.getWriter() .append(RestMethodResult.jsonError(code, "POST not supported: " + uoe.getMessage())); } catch (Throwable t) { logger.log(Level.WARNING, "Exception in POST", t); int code = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; response.setStatus(code); response.getWriter() .append(RestMethodResult.jsonError(code, "JsonSyntaxException in POST: " + t.getMessage())); } finally { try { //response.getWriter().flush(); response.getWriter().close(); } catch (Throwable t) { logger.log(Level.WARNING, "Unable to flush and close response: {0}", t.getMessage()); } } }
From source file:org.apache.solr.handler.dataimport.DataImporter.java
public DIHConfiguration loadDataConfig(InputSource configFile) { DIHConfiguration dihcfg = null;/*from w w w . j av a 2 s. c om*/ try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); // only enable xinclude, if a a SolrCore and SystemId is present (makes no sense otherwise) if (core != null && configFile.getSystemId() != null) { try { dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { LOG.warn("XML parser doesn't support XInclude option"); } } DocumentBuilder builder = dbf.newDocumentBuilder(); if (core != null) builder.setEntityResolver(new SystemIdResolver(core.getResourceLoader())); builder.setErrorHandler(XMLLOG); Document document; try { document = builder.parse(configFile); } finally { // some XML parsers are broken and don't close the byte stream (but they should according to spec) IOUtils.closeQuietly(configFile.getByteStream()); } dihcfg = readFromXml(document); LOG.info("Data Configuration loaded successfully"); } catch (Exception e) { throw new DataImportHandlerException(SEVERE, "Data Config problem: " + e.getMessage(), e); } for (Entity e : dihcfg.getEntities()) { if (e.getAllAttributes().containsKey(SqlEntityProcessor.DELTA_QUERY)) { isDeltaImportSupported = true; break; } } return dihcfg; }
From source file:org.apache.hadoop.hdfs.server.datanode.DWRRDataXceiver.java
@Override public void requestShortCircuitShm(String clientName) throws IOException { NewShmInfo shmInfo = null;//from w ww . j av a 2 s .co m boolean success = false; DomainSocket sock = peer.getDomainSocket(); try { if (sock == null) { sendShmErrorResponse(ERROR_INVALID, "Bad request from " + peer + ": must request a shared " + "memory segment over a UNIX domain socket."); return; } try { shmInfo = datanode.shortCircuitRegistry.createNewMemorySegment(clientName, sock); // After calling #{ShortCircuitRegistry#createNewMemorySegment}, the // socket is managed by the DomainSocketWatcher, not the DWRRDataXceiver. releaseSocket(); } catch (UnsupportedOperationException e) { sendShmErrorResponse(ERROR_UNSUPPORTED, "This datanode has not been configured to support " + "short-circuit shared memory segments."); return; } catch (IOException e) { sendShmErrorResponse(ERROR, "Failed to create shared file descriptor: " + e.getMessage()); return; } sendShmSuccessResponse(sock, shmInfo); success = true; } finally { if (ClientTraceLog.isInfoEnabled()) { if (success) { BlockSender.ClientTraceLog.info(String.format( "cliID: %s, src: 127.0.0.1, dest: 127.0.0.1, " + "op: REQUEST_SHORT_CIRCUIT_SHM," + " shmId: %016x%016x, srvID: %s, success: true", clientName, shmInfo.shmId.getHi(), shmInfo.shmId.getLo(), datanode.getDatanodeUuid())); } else { BlockSender.ClientTraceLog.info(String.format( "cliID: %s, src: 127.0.0.1, dest: 127.0.0.1, " + "op: REQUEST_SHORT_CIRCUIT_SHM, " + "shmId: n/a, srvID: %s, success: false", clientName, datanode.getDatanodeUuid())); } } if ((!success) && (peer == null)) { // If we failed to pass the shared memory segment to the client, // close the UNIX domain socket now. This will trigger the // DomainSocketWatcher callback, cleaning up the segment. IOUtils.cleanup(null, sock); } IOUtils.cleanup(null, shmInfo); } }
From source file:uk.ac.diamond.scisoft.analysis.dataset.MathsTest.java
@Test public void testRemainder() { AbstractDataset a, b, c = null, d = null; Complex zv = new Complex(-3.5, 0); final double dv = zv.getReal(); long start;// w w w. j a va 2 s. c o m int n; for (String dn : classes.keySet()) { final int dtype = classes.get(dn); Random.seed(12735L); for (String en : classes.keySet()) { final int etype = classes.get(en); System.out.println("Remaindering " + dn + " by " + en); n = 32; for (int i = 0; i < SITER; i++) { if (dtype < AbstractDataset.ARRAYINT8) { a = Random.randn(n).imultiply(100); a = a.cast(dtype); } else { AbstractDataset[] aa = new AbstractDataset[ISIZEA]; for (int j = 0; j < ISIZEA; j++) { aa[j] = Random.randn(n).imultiply(100); } a = DatasetUtils.cast(aa, dtype); } if (etype < AbstractDataset.ARRAYINT8) { b = Random.randn(n).imultiply(100); b = b.cast(etype); } else { AbstractDataset[] ab = new AbstractDataset[ISIZEB]; for (int j = 0; j < ISIZEB; j++) { ab[j] = Random.randn(n).imultiply(100); } b = DatasetUtils.cast(ab, etype); } start = -System.nanoTime(); try { c = Maths.remainder(a, b); } catch (IllegalArgumentException e) { System.out.println("Could not perform this operation: " + e.getMessage()); continue; } catch (UnsupportedOperationException ue) { System.out.println("Could not perform this operation: " + ue.getMessage()); continue; } start += System.nanoTime(); double ntime = ((double) start) / c.getSize(); d = AbstractDataset.zeros(c); start = -System.nanoTime(); IndexIterator ita = a.getIterator(); IndexIterator itb = b.getIterator(); int j = 0; if (dtype < AbstractDataset.ARRAYINT8 && etype < AbstractDataset.ARRAYINT8) { while (ita.hasNext() && itb.hasNext()) { d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() % ((Number) b.getObjectAbs(itb.index)).doubleValue()); } } else { final double[] answer = new double[MAXISIZE]; final int is = d.getElementsPerItem(); if (a.getElementsPerItem() < is) { while (ita.hasNext() && itb.hasNext()) { final double xa = a.getElementDoubleAbs(ita.index); for (int k = 0; k < ISIZEB; k++) { answer[k] = xa % b.getElementDoubleAbs(itb.index + k); } d.setObjectAbs(j, answer); j += is; } } else if (b.getElementsPerItem() < is) { while (ita.hasNext() && itb.hasNext()) { final double xb = b.getElementDoubleAbs(itb.index); for (int k = 0; k < ISIZEA; k++) { answer[k] = a.getElementDoubleAbs(ita.index + k) % xb; } d.setObjectAbs(j, answer); j += is; } } else { while (ita.hasNext() && itb.hasNext()) { for (int k = 0; k < is; k++) { answer[k] = a.getElementDoubleAbs(ita.index + k) % b.getElementDoubleAbs(itb.index + k); } d.setObjectAbs(j, answer); j += is; } } } start += System.nanoTime(); double otime = ((double) start) / d.getSize(); System.out.printf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100. * (otime - ntime) / otime); checkDatasets(a, b, c, d); n *= SSTEP; } } Random.seed(12735L); n = 32; System.out.println("Remaindering " + dn + " by constant"); for (int i = 0; i < SITER; i++) { if (dtype < AbstractDataset.ARRAYINT8) { a = Random.randn(n); a.imultiply(100); a = a.cast(dtype); } else { AbstractDataset[] aa = new AbstractDataset[ISIZEA]; for (int j = 0; j < ISIZEA; j++) { aa[j] = Random.randn(n).imultiply(100); } a = DatasetUtils.cast(aa, dtype); } start = -System.nanoTime(); try { c = Maths.remainder(a, dv); } catch (IllegalArgumentException e) { System.out.println("Could not perform this operation: " + e.getMessage()); continue; } catch (UnsupportedOperationException ue) { System.out.println("Could not perform this operation: " + ue.getMessage()); continue; } start += System.nanoTime(); double ntime = ((double) start) / c.getSize(); d = AbstractDataset.zeros(c); start = -System.nanoTime(); IndexIterator ita = a.getIterator(); int j = 0; if (dtype < AbstractDataset.ARRAYINT8) { while (ita.hasNext()) { d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() % dv); } } else { final double[] answer = new double[ISIZEA]; while (ita.hasNext()) { for (int k = 0; k < ISIZEA; k++) { answer[k] = a.getElementDoubleAbs(ita.index + k) % dv; } d.setObjectAbs(j, answer); j += ISIZEA; } } start += System.nanoTime(); double otime = ((double) start) / d.getSize(); System.out.printf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100. * (otime - ntime) / otime); checkDatasets(a, dv, c, d); n *= SSTEP; } Random.seed(12735L); n = 32; System.out.println("Remaindering constant by " + dn); for (int i = 0; i < SITER; i++) { if (dtype < AbstractDataset.ARRAYINT8) { a = Random.randn(n); a.imultiply(100); a = a.cast(dtype); } else { AbstractDataset[] aa = new AbstractDataset[ISIZEA]; for (int j = 0; j < ISIZEA; j++) { aa[j] = Random.randn(n).imultiply(100); } a = DatasetUtils.cast(aa, dtype); } start = -System.nanoTime(); try { c = Maths.remainder(dv, a); } catch (IllegalArgumentException e) { System.out.println("Could not perform this operation: " + e.getMessage()); continue; } catch (UnsupportedOperationException ue) { System.out.println("Could not perform this operation: " + ue.getMessage()); continue; } start += System.nanoTime(); double ntime = ((double) start) / c.getSize(); d = AbstractDataset.zeros(c); start = -System.nanoTime(); IndexIterator ita = a.getIterator(); int j = 0; if (dtype < AbstractDataset.ARRAYINT8) { while (ita.hasNext()) { d.setObjectAbs(j++, dv % ((Number) a.getObjectAbs(ita.index)).doubleValue()); } } else { final double[] answer = new double[ISIZEA]; while (ita.hasNext()) { for (int k = 0; k < ISIZEA; k++) { answer[k] = dv % a.getElementDoubleAbs(ita.index + k); } d.setObjectAbs(j, answer); j += ISIZEA; } } start += System.nanoTime(); double otime = ((double) start) / d.getSize(); System.out.printf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100. * (otime - ntime) / otime); checkDatasets(dv, a, c, d); n *= SSTEP; } } }
From source file:org.geotools.data.complex.config.AppSchemaDataAccessConfigurator.java
private Set<FeatureTypeMapping> createFeatureTypeMappings( Map<String, DataAccess<FeatureType, Feature>> sourceDataStores) throws IOException { Set mappingsConfigs = config.getTypeMappings(); Set<FeatureTypeMapping> featureTypeMappings = new HashSet<FeatureTypeMapping>(); for (Iterator it = mappingsConfigs.iterator(); it.hasNext();) { TypeMapping dto = (TypeMapping) it.next(); try {/*w w w. j a v a 2 s . c o m*/ FeatureSource featureSource = getFeatureSource(dto, sourceDataStores); // get CRS from underlying feature source and pass it on CoordinateReferenceSystem crs; try { crs = featureSource.getSchema().getCoordinateReferenceSystem(); } catch (UnsupportedOperationException e) { // web service back end doesn't support getSchema crs = null; } AttributeDescriptor target = getTargetDescriptor(dto, crs); // set original schema locations for encoding target.getType().getUserData().put("schemaURI", schemaURIs); List attMappings = getAttributeMappings(target, dto.getAttributeMappings(), dto.getItemXpath(), crs); FeatureTypeMapping mapping; mapping = FeatureTypeMappingFactory.getInstance(featureSource, target, attMappings, namespaces, dto.getItemXpath(), dto.isXmlDataStore()); String mappingName = dto.getMappingName(); if (mappingName != null) { mapping.setName(Types.degloseName(mappingName, namespaces)); } featureTypeMappings.add(mapping); } catch (Exception e) { LOGGER.warning("Error creating app-schema data store for '" + (dto.getMappingName() != null ? dto.getMappingName() : dto.getTargetElementName()) + "', caused by: " + e.getMessage()); throw new IOException(e); } } return featureTypeMappings; }