List of usage examples for java.util Map toString
public String toString()
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java
@Test public void test_columnarMapping_standalone() throws JsonProcessingException, IOException { final String both = Resources.toString( Resources/*from www.j a va 2 s.co m*/ .getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/full_mapping_test.json"), Charsets.UTF_8); final JsonNode both_json = _mapper.readTree(both); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups = ElasticsearchIndexUtils .parseDefaultMapping(both_json, Optional.empty(), Optional.empty(), Optional.empty(), _config.search_technology_override(), _mapper); //DEBUG // System.out.println("(Field lookups = " + field_lookups + ")"); // System.out.println("(Analyzed default = " + _config.columnar_technology_override().default_field_data_analyzed() + ")"); // System.out.println("(NotAnalyzed default = " + _config.columnar_technology_override().default_field_data_notanalyzed() + ")"); // 1) Mappings - field name specified (include) { final Stream<String> test_stream1 = Stream.of("@version", "field_not_present", "@timestamp"); final Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> test_stream_result_1 = ElasticsearchIndexUtils .createFieldIncludeLookups(test_stream1, fn -> Either.left(fn), field_lookups, _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_notanalyzed(), JsonNode.class), _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_analyzed(), JsonNode.class), false, _config.search_technology_override(), Collections.emptyMap(), _mapper, "_default_"); final Map<Either<String, Tuple2<String, String>>, JsonNode> test_map_result_1 = test_stream_result_1 .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2())); final String test_map_expected_1 = "{Left(@timestamp)={'type':'date','fielddata':{}}, Right((field_not_present,*))={'mapping':{'index':'not_analyzed','type':'{dynamic_type}','fielddata':{'format':'doc_values'}},'path_match':'field_not_present','match_mapping_type':'*'}, Left(@version)={'type':'string','index':'analyzed','fielddata':{'format':'paged_bytes'}}}"; assertEquals(test_map_expected_1, strip(test_map_result_1.toString())); //DEBUG //System.out.println("(Field column lookups = " + test_map_result_1 + ")"); } // 2) Mappings - field pattern specified (include) { final Stream<String> test_stream1 = Stream.of("*", "test*"); final Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> test_stream_result_1 = ElasticsearchIndexUtils .createFieldIncludeLookups(test_stream1, fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_notanalyzed(), JsonNode.class), _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_analyzed(), JsonNode.class), true, _config.search_technology_override(), Collections.emptyMap(), _mapper, "_default_"); final Map<Either<String, Tuple2<String, String>>, JsonNode> test_map_result_1 = test_stream_result_1 .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2())); final String test_map_expected_1 = "{Right((test*,*))={'mapping':{'type':'string','index':'analyzed','omit_norms':true,'fields':{'raw':{'type':'string','index':'not_analyzed','ignore_above':256,'fielddata':{'format':'doc_values'}}},'fielddata':{'format':'paged_bytes'}},'path_match':'test*','match_mapping_type':'*'}, Right((*,*))={'mapping':{'index':'not_analyzed','type':'{dynamic_type}','fielddata':{'format':'doc_values'}},'path_match':'*','match_mapping_type':'*'}}"; assertEquals(test_map_expected_1, strip(test_map_result_1.toString())); //DEBUG //System.out.println("(Field column lookups = " + test_map_result_1 + ")"); } // 3) Mappings - field name specified (exclude) { final Stream<String> test_stream1 = Stream.of("@version", "field_not_present", "@timestamp"); final Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> test_stream_result_1 = ElasticsearchIndexUtils .createFieldExcludeLookups(test_stream1, fn -> Either.left(fn), field_lookups, _config.search_technology_override(), Collections.emptyMap(), _mapper, "_default_"); final Map<Either<String, Tuple2<String, String>>, JsonNode> test_map_result_1 = test_stream_result_1 .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2())); final String test_map_expected_1 = "{Left(@timestamp)={'type':'date','fielddata':{'format':'disabled'}}, Right((field_not_present,*))={'mapping':{'index':'not_analyzed','type':'{dynamic_type}','fielddata':{'format':'disabled'}},'path_match':'field_not_present','match_mapping_type':'*'}, Left(@version)={'type':'string','index':'analyzed','fielddata':{'format':'disabled'}}}"; assertEquals(test_map_expected_1, strip(test_map_result_1.toString())); //DEBUG //System.out.println("(Field column lookups = " + test_map_result_1 + ")"); } // 4) Mappings - field type specified (exclude) { final Stream<String> test_stream1 = Stream.of("*", "test*"); final Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> test_stream_result_1 = ElasticsearchIndexUtils .createFieldExcludeLookups(test_stream1, fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, _config.search_technology_override(), Collections.emptyMap(), _mapper, "_default_"); final Map<Either<String, Tuple2<String, String>>, JsonNode> test_map_result_1 = test_stream_result_1 .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2())); final String test_map_expected_1 = "{Right((test*,*))={'mapping':{'type':'string','index':'analyzed','omit_norms':true,'fields':{'raw':{'type':'string','index':'not_analyzed','ignore_above':256,'fielddata':{'format':'disabled'}}},'fielddata':{'format':'disabled'}},'path_match':'test*','match_mapping_type':'*'}, Right((*,*))={'mapping':{'index':'not_analyzed','type':'{dynamic_type}','fielddata':{'format':'disabled'}},'path_match':'*','match_mapping_type':'*'}}"; assertEquals(test_map_expected_1, strip(test_map_result_1.toString())); //DEBUG //System.out.println("(Field column lookups = " + test_map_result_1 + ")"); } // 5) Check with type specific fielddata formats { assertEquals(2, _config.columnar_technology_override().enabled_field_data_analyzed().size()); assertEquals(2, _config.columnar_technology_override().enabled_field_data_notanalyzed().size()); assertTrue("Did override settings", _config.columnar_technology_override().enabled_field_data_analyzed() .containsKey("test_type_123")); assertTrue("Did override settings", _config.columnar_technology_override() .enabled_field_data_notanalyzed().containsKey("test_type_123")); final Stream<String> test_stream1 = Stream.of("test_type_123"); final Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> test_stream_result_1 = ElasticsearchIndexUtils .createFieldIncludeLookups(test_stream1, fn -> Either.left(fn), field_lookups, _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_notanalyzed(), JsonNode.class), _mapper.convertValue( _config.columnar_technology_override().enabled_field_data_analyzed(), JsonNode.class), false, _config.search_technology_override(), Collections.emptyMap(), _mapper, "test_type_123"); final Map<Either<String, Tuple2<String, String>>, JsonNode> test_map_result_1 = test_stream_result_1 .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2())); final String test_map_expected_1 = "{Right((test_type_123,*))={'mapping':{'index':'not_analyzed','type':'{dynamic_type}','fielddata':{'format':'test2'}},'path_match':'test_type_123','match_mapping_type':'*'}}"; assertEquals(test_map_expected_1, strip(test_map_result_1.toString())); } }
From source file:dk.netarkivet.archive.arcrepository.bitpreservation.FileBasedActiveBitPreservation.java
/** * Get the checksum of a list of files in a replica * (map ([filename] -> map ([replica] -> [list of checksums])). * * Note that this method runs a batch job on the bitarchives, and therefore * may take a long time, depending on network delays. * * @param rep The replica to ask for checksums. * @param filenames The names of the files to ask for checksums for. * @return The MD5 checksums of the files, or the empty string if the file * was not in the replica./*from ww w . j av a 2s . c om*/ * @see ChecksumJob#parseLine(String) */ private Map<String, List<String>> getChecksums(Replica rep, Set<String> filenames) { // initialise the resulting map. Map<String, List<String>> res = new HashMap<String, List<String>>(); try { PreservationArcRepositoryClient arcClient = ArcRepositoryClientFactory.getPreservationInstance(); // for each file extract the checksum through a checksum message // and then put it into the resulting map. for (String file : filenames) { // retrieve the checksum from the replica. String checksum = arcClient.getChecksum(rep.getId(), file); // put the checksum into a list, or make empty list if the // checksum was not retrieved. List<String> csList; if (checksum == null || checksum.isEmpty()) { log.warn("The checksum for file '" + file + "' from " + "replica '" + rep + "' was invalid. " + "Empty list returned"); csList = Collections.<String>emptyList(); } else { csList = new ArrayList<String>(); csList.add(checksum); } // put the filename and list into the map. res.put(file, csList); } log.debug("The map from a checksum archive: " + res.toString()); } catch (NetarkivetException e) { // This is not critical. Log and continue. log.warn("The retrieval of checksums from a checksum archive was " + "not successful.", e); } return res; }
From source file:org.jsweet.transpiler.JSweetTranspiler.java
/** * Evaluates the given source files with the given evaluation engine. * <p>// ww w. j av a2s . co m * If given engine name is "Java", this function looks up for the classes in * the classpath and run the main methods when found. * * @param engineName * the engine name: either "Java" or any valid and installed * JavaScript engine. * @param transpilationHandler * the log handler * @param sourceFiles * the source files to be evaluated (transpiled first if needed) * @return the evaluation result * @throws Exception * when an internal error occurs */ public EvaluationResult eval(String engineName, TranspilationHandler transpilationHandler, SourceFile... sourceFiles) throws Exception { logger.info("[" + engineName + " engine] eval files: " + Arrays.asList(sourceFiles)); if ("Java".equals(engineName)) { // search for main functions JSweetContext context = new JSweetContext(this); Options options = Options.instance(context); if (classPath != null) { options.put(Option.CLASSPATH, classPath); } options.put(Option.XLINT, "path"); JavacFileManager.preRegister(context); JavaFileManager fileManager = context.get(JavaFileManager.class); List<JavaFileObject> fileObjects = toJavaFileObjects(fileManager, Arrays.asList(SourceFile.toFiles(sourceFiles))); JavaCompiler compiler = JavaCompiler.instance(context); compiler.attrParseOnly = true; compiler.verbose = true; compiler.genEndPos = false; logger.info("parsing: " + fileObjects); List<JCCompilationUnit> compilationUnits = compiler.enterTrees(compiler.parseFiles(fileObjects)); MainMethodFinder mainMethodFinder = new MainMethodFinder(); try { for (JCCompilationUnit cu : compilationUnits) { cu.accept(mainMethodFinder); } } catch (Exception e) { // swallow on purpose } if (mainMethodFinder.mainMethod != null) { try { initExportedVarMap(); Class<?> c = Class.forName( mainMethodFinder.mainMethod.getEnclosingElement().getQualifiedName().toString()); c.getMethod("main", String[].class).invoke(null, (Object) null); } catch (Exception e) { throw new Exception("evalution error", e); } } final Map<String, Object> map = getExportedVarMap(); return new EvaluationResult() { @SuppressWarnings("unchecked") @Override public <T> T get(String variableName) { return (T) map.get("_exportedVar_" + variableName); } @Override public String toString() { return map.toString(); } @Override public String getExecutionTrace() { return "<not available>"; } }; } else { if (!areAllTranspiled(sourceFiles)) { ErrorCountTranspilationHandler errorHandler = new ErrorCountTranspilationHandler( transpilationHandler); transpile(errorHandler, sourceFiles); if (errorHandler.getErrorCount() > 0) { throw new Exception("unable to evaluate: transpilation errors remain"); } } StringWriter trace = new StringWriter(); Process runProcess; if (context.useModules) { File f = null; if (!context.entryFiles.isEmpty()) { f = context.entryFiles.get(0); for (SourceFile sf : sourceFiles) { if (sf.getJavaFile().equals(f)) { f = sf.getJsFile(); } } } if (f == null) { f = sourceFiles[sourceFiles.length - 1].getJsFile(); } logger.info("[modules] eval file: " + f); runProcess = ProcessUtil.runCommand(ProcessUtil.NODE_COMMAND, line -> trace.append(line + "\n"), null, f.getPath()); } else { File tmpFile = new File(new File(TMP_WORKING_DIR_NAME), "eval.tmp.js"); FileUtils.deleteQuietly(tmpFile); if (jsLibFiles != null) { for (File jsLibFile : jsLibFiles) { String script = FileUtils.readFileToString(jsLibFile); FileUtils.write(tmpFile, script + "\n", true); } } for (SourceFile sourceFile : sourceFiles) { String script = FileUtils.readFileToString(sourceFile.getJsFile()); FileUtils.write(tmpFile, script + "\n", true); } logger.info("[no modules] eval file: " + tmpFile); runProcess = ProcessUtil.runCommand(ProcessUtil.NODE_COMMAND, line -> trace.append(line + "\n"), null, tmpFile.getPath()); } int returnCode = runProcess.exitValue(); logger.info("return code=" + returnCode); if (returnCode != 0) { throw new Exception("evaluation error (code=" + returnCode + ") - trace=" + trace); } return new TraceBasedEvaluationResult(trace.getBuffer().toString()); } }
From source file:facebook.TiFacebookModule.java
@Kroll.method public void presentSendRequestDialog(@Kroll.argument(optional = true) final KrollDict args) { GameRequestDialog requestDialog = new GameRequestDialog(TiApplication.getInstance().getCurrentActivity()); requestDialog.registerCallback(callbackManager, new FacebookCallback<GameRequestDialog.Result>() { KrollDict data = new KrollDict(); public void onSuccess(GameRequestDialog.Result result) { final String postId = result.getRequestId(); if (postId != null) { data.put(PROPERTY_RESULT, postId); }//w w w. j a va 2s . c om data.put(PROPERTY_SUCCESS, true); data.put(PROPERTY_CANCELLED, false); fireEvent(EVENT_REQUEST_DIALOG_COMPLETE, data); } public void onCancel() { data.put(PROPERTY_SUCCESS, false); data.put(PROPERTY_CANCELLED, true); fireEvent(EVENT_REQUEST_DIALOG_COMPLETE, data); } public void onError(FacebookException error) { data.put(PROPERTY_SUCCESS, false); data.put(PROPERTY_CANCELLED, false); data.put(PROPERTY_ERROR, "Error sending Game Request"); fireEvent(EVENT_REQUEST_DIALOG_COMPLETE, data); } }); String title = (String) args.get("title"); String message = (String) args.get("message"); Map<String, String> data = (HashMap<String, String>) args.get("data"); String recipients = (String) args.get("recipients"); String suggestions = (String) args.get("recipientSuggestions"); String objectID = (String) args.get("objectID"); String to = (String) args.get("to"); if (to != null) { Log.w(TAG, "Property `to` is deprecated. Please use `recipients`."); } int actionTypeChoice = args.optInt("actionType", TiFacebookModule.ACTION_TYPE_NONE); ActionType actionType; switch (actionTypeChoice) { case TiFacebookModule.ACTION_TYPE_SEND: actionType = ActionType.SEND; break; case TiFacebookModule.ACTION_TYPE_TURN: actionType = ActionType.TURN; break; case TiFacebookModule.ACTION_TYPE_ASK_FOR: actionType = ActionType.ASKFOR; break; default: case TiFacebookModule.ACTION_TYPE_NONE: actionType = null; break; } int filtersChoice = args.optInt("filters", TiFacebookModule.FILTER_NONE); Filters filters; switch (filtersChoice) { case TiFacebookModule.FILTER_APP_NON_USERS: filters = Filters.APP_NON_USERS; break; case TiFacebookModule.FILTER_APP_USERS: filters = Filters.APP_USERS; break; default: case TiFacebookModule.FILTER_NONE: filters = null; break; } String dataString = null; if (data != null) { dataString = data.toString(); } List<String> recipientsList = null; if (recipients != null) { String[] recipientsArray = recipients.split(","); recipientsList = Arrays.asList(recipientsArray); } List<String> suggestionsList = null; if (suggestions != null) { String[] suggestionsArray = suggestions.split(","); suggestionsList = Arrays.asList(suggestionsArray); } GameRequestContent content = new GameRequestContent.Builder().setTitle(title).setMessage(message) .setData(dataString).setRecipients(recipientsList).setActionType(actionType).setObjectId(objectID) .setFilters(filters).setSuggestions(suggestionsList).build(); requestDialog.show(content); }
From source file:uk.ac.cam.cl.dtg.segue.api.AdminFacade.java
/** * Get the event data for a specified user. * //from w w w .j a va 2 s.c o m * @param request * - request information used authentication * @param requestForCaching * - request information used for caching. * @param httpServletRequest * - the request which may contain session information. * @param fromDate * - date to start search * @param toDate * - date to end search * @param events * - comma separated list of events of interest., * @param bin * - Should we group data into the first day of the month? true or false. * @return Returns a map of eventType to Map of dates to total number of events. */ @GET @Path("/users/event_data/over_time") @Produces(MediaType.APPLICATION_JSON) @GZIP public Response getEventDataForAllUsers(@Context final Request request, @Context final HttpServletRequest httpServletRequest, @Context final Request requestForCaching, @QueryParam("from_date") final Long fromDate, @QueryParam("to_date") final Long toDate, @QueryParam("events") final String events, @QueryParam("bin_data") final Boolean bin) { Map<String, Map<LocalDate, Long>> eventLogsByDate; try { eventLogsByDate = fetchEventDataForAllUsers(request, httpServletRequest, requestForCaching, fromDate, toDate, events, bin); } catch (BadRequestException e) { return new SegueErrorResponse(Status.BAD_REQUEST, e.getMessage()).toResponse(); } catch (ForbiddenException e) { return new SegueErrorResponse(Status.FORBIDDEN, e.getMessage()).toResponse(); } catch (NoUserLoggedInException e) { return SegueErrorResponse.getNotLoggedInResponse(); } catch (SegueDatabaseException e) { log.error("Database error while getting event details for a user.", e); return new SegueErrorResponse(Status.INTERNAL_SERVER_ERROR, "Unable to complete the request.") .toResponse(); } EntityTag etag = new EntityTag(eventLogsByDate.toString().hashCode() + ""); Response cachedResponse = generateCachedResponse(requestForCaching, etag); if (cachedResponse != null) { return cachedResponse; } return Response.ok(eventLogsByDate).tag(etag) .cacheControl(getCacheControl(NUMBER_SECONDS_IN_FIVE_MINUTES, false)).build(); }
From source file:ddf.catalog.CatalogFrameworkImpl.java
@Override public void configurationUpdateCallback(Map<String, String> properties) { String methodName = "configurationUpdateCallback"; logger.debug("ENTERING: " + methodName); if (properties != null && !properties.isEmpty()) { if (logger.isDebugEnabled()) { logger.debug(properties.toString()); }//from www. j a v a2s . c o m String ddfSiteName = properties.get(ConfigurationManager.SITE_NAME); if (StringUtils.isNotBlank(ddfSiteName)) { logger.debug("ddfSiteName = " + ddfSiteName); this.setId(ddfSiteName); } String ddfVersion = properties.get(ConfigurationManager.VERSION); if (StringUtils.isNotBlank(ddfVersion)) { logger.debug("ddfVersion = " + ddfVersion); this.setVersion(ddfVersion); } String ddfOrganization = properties.get(ConfigurationManager.ORGANIZATION); if (StringUtils.isNotBlank(ddfOrganization)) { logger.debug("ddfOrganization = " + ddfOrganization); this.setOrganization(ddfOrganization); } } else { logger.debug("properties are NULL or empty"); } logger.debug("EXITING: " + methodName); }
From source file:org.metis.pull.WdsResourceBean.java
/** * This method gets called by the WdsRdbMapper bean to handle a HTTP * request. This method must be multi-thread capable. Note that since we're * not using Views, this method must return null. * /* w w w . j a v a2 s . co m*/ * @param request * the http request that is being serviced * @param response * the response that will be sent back to the service consumer * @return must return null since we're not using a view * @throws Exception */ @SuppressWarnings("unchecked") protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug(getBeanName() + ": handleRequestInternal - **** new request ****"); // dump the request if trace is on if (LOG.isTraceEnabled()) { LOG.trace(getBeanName() + ":handleRequestInternal - method = " + request.getMethod()); LOG.trace(getBeanName() + ":handleRequestInternal - uri = " + request.getRequestURI()); LOG.trace(getBeanName() + ":handleRequestInternal - protocol = " + request.getProtocol()); LOG.trace(getBeanName() + ":handleRequestInternal - secure = " + request.isSecure()); // dump all the http headers and their values Enumeration<String> headerNames = request.getHeaderNames(); if (headerNames != null) { while (headerNames.hasMoreElements()) { String headerName = headerNames.nextElement(); LOG.trace(getBeanName() + ":handleRequestInternal - " + headerName + " = " + request.getHeader(headerName)); } } if (request.getQueryString() != null) { LOG.trace(getBeanName() + ":handleRequestInternal - queryString = " + request.getQueryString()); } } long currentTime = System.currentTimeMillis(); // give the response a Date header with the current time response.setDateHeader(DATE_HDR, currentTime); // assign the Server header this container's info response.setHeader(SERVER_HDR, getServerInfo()); // determine the HTTP protocol version being used by the client // default version will be 0 int protocolVersion = 0; try { protocolVersion = Integer .parseInt(request.getProtocol().split(FORWARD_SLASH_STR)[1].split(ESC_DOT_STR)[1]); } catch (Exception exc) { LOG.warn(getBeanName() + ": handleRequestInternal - unable to get http protocol " + "version, stack trace follows: "); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); } LOG.trace(getBeanName() + ":handleRequestInternal - using this " + "protocol version: " + protocolVersion); /* * Ok, the request first needs to run the security gauntlet * * We do not want to send any error messages back to the client that * would give it a hint that we're invoking SQL statements. This is a * countermeasure for SQL injection probes. */ // see if this RDB is restricting user agents and if so, validate user // agent if ((getAllowedAgents() != null && !getAllowedAgents().isEmpty()) || (getNotAllowedAgents() != null && !getNotAllowedAgents().isEmpty())) { String userAgent = request.getHeader(USER_AGENT_HDR); if (userAgent != null && userAgent.length() > 0) { LOG.debug( getBeanName() + ": handleRequestInternal - validating this " + "user agent: " + userAgent); // Convert to lower case as allowed agents have been // converted to lower case as well userAgent = userAgent.toLowerCase(); boolean allow = false; if (getAllowedAgents() != null && !getAllowedAgents().isEmpty()) { for (String agent : getAllowedAgents()) { LOG.trace(getBeanName() + ": handleRequestInternal - comparing to this " + "allowed agent : " + agent); if (userAgent.indexOf(agent) >= 0) { LOG.trace(getBeanName() + ": handleRequestInternal - this allowed agent " + "was found: " + agent); allow = true; break; } } } else { allow = true; for (String agent : getNotAllowedAgents()) { LOG.trace(getBeanName() + ": handleRequestInternal - comparing to this " + "non-allowed agent : " + agent); if (userAgent.indexOf(agent) >= 0) { LOG.trace(getBeanName() + ": handleRequestInternal - this non-allowed " + "agent was found: " + agent); allow = false; break; } } } if (!allow) { response.sendError(SC_UNAUTHORIZED, "ERROR, user agent " + "is not authorized"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user agent is " + "not authorized"); return null; } } else { response.sendError(SC_UNAUTHORIZED, "ERROR, user agent info " + "was not received and is required!"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user agent header " + "is required but was not provided by the client"); return null; } } // we do not support chunked transfer encoding, which is a http // 1.1 feature. if (request.getHeader(TRANSFER_ENCODING_HDR) != null && request.getHeader(TRANSFER_ENCODING_HDR).equalsIgnoreCase(CHUNKED)) { response.sendError(SC_BAD_REQUEST, "Chunked tranfer encoding is not " + "supported"); return null; } /* * isSecure returns a boolean indicating whether this request was made * using a secure channel, such as HTTPS. so, if the channel must be * secure, but it is not, then throw an exception and return an error. */ if (isSecure() && !request.isSecure()) { response.sendError(SC_UNAUTHORIZED, "ERROR, channel is not secure"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, channel is not secure"); return null; } /* * getUserPrincipal() returns a java.security.Principal containing the * name of the user making this request, else it returns null if the * user has not been authenticated. so, if it is mandated that the user * be authenticated, but has not been authenticated, then throw an * exception and return an error */ if (isAuthenticated() && request.getUserPrincipal() == null) { response.sendError(SC_UNAUTHORIZED, "ERROR, user is not authenticated"); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, user is not authenticated"); return null; } /* * Check for valid method - the only supported http methods are GET, * POST, PUT, and DELETE. Here are some good descriptions regarding the * methods and their use with respect to this servlet. * * The GET method is used for projecting data from the DB. So it maps to * a select statement. * * The PUT and POST methods are used for inserting or updating an entity * in the DB. So they map to either an update or insert. * * The DELETE is used for removing one or more entities from the DB. So * it maps to a delete. * * The bean must be assigned at least one of the methods to service */ Method method = null; try { method = Enum.valueOf(Method.class, request.getMethod().toUpperCase()); LOG.debug(getBeanName() + ": handleRequestInternal - processing this method: " + method.toString()); } catch (IllegalArgumentException e) { LOG.error(getBeanName() + ":handleRequestInternal - This method is not allowed [" + request.getMethod() + "]"); response.setHeader("Allow", allowedMethodsRsp); response.sendError(SC_METHOD_NOT_ALLOWED, "This method is not allowed [" + request.getMethod() + "]"); return null; } // do some more method validation; i.e., make sure requested method has // been assigned a SQL statement // // TODO: we may be able to remove this block of code String s1 = null; if (method.isGet() && sqlStmnts4Get == null || method.isPost() && sqlStmnts4Post == null || method.isPut() && sqlStmnts4Put == null || method.isDelete() && sqlStmnts4Delete == null) { response.setHeader("Allow", allowedMethodsRsp); s1 = "HTTP method [" + method + "] is not supported"; response.sendError(SC_METHOD_NOT_ALLOWED, s1); LOG.error(getBeanName() + ":handleRequestInternal - " + s1); return null; } // If the client has specified an 'Accept' header field, then determine // if it is willing or capable of accepting JSON or anything (*/*) // // TODO: what about the client accepting urlencoded strings?? s1 = request.getHeader(ACCEPT_HDR); if (s1 != null && s1.length() > 0) { LOG.debug(getBeanName() + ":handleRequestInternal - client-specified media " + "type in accept header = " + s1); // parse the accept header's content String[] mediaTypes = s1.trim().split(COMMA_STR); boolean match = false; for (String mediaType : mediaTypes) { mediaType = mediaType.trim().toLowerCase(); if (mediaType.startsWith(anyContentType) || mediaType.startsWith(jsonContentType)) { match = true; break; } } if (!match) { LOG.error(getBeanName() + ":handleRequestInternal - client-specified media type of '" + s1 + "' does not include '" + "'" + jsonContentType); response.sendError(SC_NOT_ACCEPTABLE, "client-specified media " + "type of '" + s1 + "' does not include '" + "'" + jsonContentType); return null; } } // pick up the corresponding list of SQL statements for this request List<SqlStmnt> sqlStmnts = null; switch (method) { case GET: sqlStmnts = getSqlStmnts4Get(); break; case DELETE: sqlStmnts = getSqlStmnts4Delete(); break; case PUT: sqlStmnts = getSqlStmnts4Put(); break; case POST: sqlStmnts = getSqlStmnts4Post(); break; default: response.sendError(SC_METHOD_NOT_ALLOWED, "ERROR, unsupported method type: " + method); LOG.error(getBeanName() + ": handleRequestInternal - ERROR, encountered unknown " + "method type: " + method); return null; } // ~~~~~~ EXTRACT PARAMERTERS, IF ANY ~~~~~~~~~~~ // GETs with entity bodies are illegal if (method.isGet() && request.getContentLength() > 0) { response.sendError(SC_BAD_REQUEST, "Client has issued a malformed or illegal request; " + "GET cannot include entity body"); return null; } // the DELETE method also cannot include an entity body; however, the // servlet containers already ignore them. so no need to check for that // see if json object arrived boolean jsonObjectPresent = (method.isPost() || method.isPut()) && (request.getContentLength() > 0 && request.getContentType().equalsIgnoreCase(jsonContentType)); LOG.debug(getBeanName() + ": jsonObjectPresent = " + jsonObjectPresent); // see if this is a PUT with entity. we've learned that for PUTs, // getParameterMap does not work the same across all servlet containers. // so we need take care of this ourselves boolean putWithBodyPresent = (method.isPut()) && (request.getContentLength() > 0 && request.getContentType().equalsIgnoreCase(urlEncodedContentType)); LOG.debug(getBeanName() + ": putWithBodyPresent = " + putWithBodyPresent); // collect incoming parameters and place them in a common bucket // // ~~~~ ALL PARAMETER KEY NAMES MUST BE FORCED TO LOWER CASE ~~~ // List<Map<String, String>> cParams = new ArrayList<Map<String, String>>(); // first, get the incoming query or form parameters (if any); we will // assume that each key has only one parameter. in other words, // we're not dealing with drop-down boxes or things similar if (!putWithBodyPresent && !jsonObjectPresent) { Map<String, String[]> qParams = request.getParameterMap(); if (qParams != null && !qParams.isEmpty()) { Map<String, String> qMap = new HashMap<String, String>(); for (String key : qParams.keySet()) { qMap.put(key.toLowerCase(), qParams.get(key)[0]); } if (!qMap.isEmpty()) { cParams.add(qMap); LOG.debug(getBeanName() + ": query params = " + qMap.toString()); } } } // a put with entity body arrived, so get the parameters from the // body and place them in the common bucket else if (putWithBodyPresent) { try { Map<String, String> putParams = null; // parseUrlEncoded will force keys to lower case putParams = Utils.parseUrlEncoded(request.getInputStream()); if (putParams != null && !putParams.isEmpty()) { cParams.add(putParams); } } catch (Exception exc) { LOG.error(getBeanName() + ": ERROR, caught this " + "exception while parsing urlencoded string: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_BAD_REQUEST, "urlencoded string parsing error: " + exc.getMessage()); return null; } } // ok, a json object arrived, so get parameters defined in that object // and place them in the common bucket else { // its a json object, so parse it to extract params from it try { List<Map<String, String>> jParams = null; // parseJson will ensure that all passed-in JSON objects have // the same set of identical keys jParams = Utils.parseJson(request.getInputStream()); if (jParams != null && !jParams.isEmpty()) { // if we also got query params then ensure they have the // same set of keys as the json params. why anyone would // ever do this is beyond me, but I'll leave it in for now if (!cParams.isEmpty()) { Map<String, String> cMap = cParams.get(0); Map<String, String> jMap = jParams.get(0); for (String key : cMap.keySet()) { if (jMap.get(key) == null) { String eStr = getBeanName() + ": ERROR, json " + "object key set does not match query " + "param key set"; LOG.error(eStr); response.sendError(SC_BAD_REQUEST, eStr); return null; } } // place the passed in query params in the jParams // bucket jParams.add(cMap); } // assign the jParams bucket to the common bucket cParams = jParams; } } catch (Exception exc) { LOG.error(getBeanName() + ": ERROR, caught this " + "exception while parsing json object: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_BAD_REQUEST, "json parsing error: " + exc.getMessage()); return null; } } // if trace is on, dump the params (if any) to the log if (LOG.isDebugEnabled()) { if (!cParams.isEmpty()) { for (int i = 0; i < cParams.size(); i++) { LOG.debug(getBeanName() + ": handleRequestInternal - received these params: " + cParams.get(i).toString()); } } else { LOG.debug(getBeanName() + ": handleRequestInternal - did not receive any params"); } } // ensure none of the params' values have been black listed if (!cParams.isEmpty() && getBlackList().length() > 0) { char[] bl = getBlackList().toCharArray(); for (int i = 0; i < cParams.size(); i++) { for (String value : cParams.get(i).values()) { if (Utils.isOnBlackList(value, bl)) { response.sendError(SC_BAD_REQUEST, "encountered black listed character in this param " + "value: " + value); LOG.error(getBeanName() + "handleRequestInternal - encountered black listed " + "character in this param value: " + value); return null; } } } } // find the proper SQL statement based on the incoming parameters' (if // any) keys SqlStmnt sqlStmnt = null; try { // getMatch will try and find a match, even if no params were // provided. // @formatter:off sqlStmnt = (cParams.isEmpty()) ? SqlStmnt.getMatch(sqlStmnts, null) : SqlStmnt.getMatch(sqlStmnts, cParams.get(0).keySet()); // @formatter:on if (sqlStmnt == null && !cParams.isEmpty()) { LOG.error(getBeanName() + ":ERROR, unable to find sql " + "statement with this incoming param set: " + cParams.toString()); response.sendError(SC_INTERNAL_SERVER_ERROR, "internal server error: mapping error"); return null; } else if (sqlStmnt == null) { LOG.warn(getBeanName() + ": warning, unable to find sql " + "statement on first pass, will use extra path info"); } else { LOG.debug(getBeanName() + ": handleRequestInternal - matching sql stmt = " + sqlStmnt.toString()); } } catch (Exception exc) { LOG.error(getBeanName() + ":ERROR, caught this exception " + "while mapping sql to params: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "mapping error"); return null; } // if getMatch could not find a match - perhaps input params were not // provided - then use the URI's 'extended path' information as an input // param if (sqlStmnt == null) { LOG.debug(getBeanName() + ": invoking getExtraPathInfo"); String[] xtraPathInfo = Utils.getExtraPathInfo(request.getPathInfo()); if (xtraPathInfo != null && xtraPathInfo.length >= 2) { LOG.debug(getBeanName() + ": extra path key:value = " + xtraPathInfo[0] + ":" + xtraPathInfo[1]); } else { LOG.error(getBeanName() + ":ERROR, getExtraPathInfo failed to find info"); response.sendError(SC_INTERNAL_SERVER_ERROR, "internal server error: mapping error"); return null; } // put the xtra path info in the common param bucket and try again cParams.clear(); Map<String, String> xMap = new HashMap<String, String>(); xMap.put(xtraPathInfo[0], xtraPathInfo[1]); cParams.add(xMap); // try again with the extra path info sqlStmnt = SqlStmnt.getMatch(sqlStmnts, xMap.keySet()); if (sqlStmnt == null) { LOG.error(getBeanName() + ":ERROR, unable to find sql " + "statement with this xtra path info: " + cParams.toString()); response.sendError(SC_NOT_FOUND, "internal server error: mapping error"); return null; } } // if we've gotten this far, we've gotten past the security gauntlet and // we have a SQL statement to work with. SqlResult sqlResult = null; try { // get the output stream OutputStream os = response.getOutputStream(); // FIRE IN THE DB HOLE :) if ((sqlResult = sqlStmnt.execute(cParams)) == null) { // execute will have logged the necessary debug/error info response.sendError(SC_INTERNAL_SERVER_ERROR); return null; } // execute went through ok, lets see how to respond switch (method) { case GET: // if a resultset was returned, then set the content type, // convert it to json, and write it out List<Map<String, Object>> listMap = sqlResult.getResultSet(); if (listMap != null) { // tell the client the content type response.setContentType(rspJsonContentType); String jsonOutput = Utils.generateJson(sqlResult.getResultSet()); LOG.trace(getBeanName() + ": returning this payload - " + jsonOutput); os.write(jsonOutput.getBytes()); // ensure that only the client can cache the data and tell // the client how long the data can remain active response.setHeader(CACHE_CNTRL_HDR, (getCacheControl() != null) ? getCacheControl() : DFLT_CACHE_CNTRL_STR); response.setHeader(PRAGMA_HDR, PRAGMA_NO_CACHE_STR); response.setDateHeader(EXPIRES_HDR, currentTime + (getExpires() * 1000)); } else { LOG.debug(getBeanName() + ": NOT returning json message"); } response.setStatus(SC_OK); break; case DELETE: // a DELETE should not send back an entity body response.setStatus(SC_NO_CONTENT); break; case PUT: /* * PUTs are idempotent; therefore, they must provide ALL the * properties that pertain to the resource/entity that they are * creating or updating. Updates cannot be partial updates; they * must be full updates. A PUT is issued by a client that knows * the identifier (in our case, primary key) of the * resource/entity. Therefore, we do not have to send back a * Location header in response to a PUT that has created a * resource. */ if (sqlStmnt.isInsert()) { response.setStatus(SC_CREATED); } else { response.setStatus(SC_OK); } break; case POST: /* * A POST is not idempotent; therefore, it can be used to * perform a 'partial' update, as well as a full create. When * creating a resource via POST, the client does not know the * primary key, and it assumes it will be auto-generated; * therefore, a Location header with auto-generated key must be * returned to client. */ if (sqlStmnt.isInsert()) { response.setStatus(SC_CREATED); // we need to return the new key, but only if it was not a // batch insert. the new key should be returned via the // location header // check if a key holder exists; if not, then table was not // configured with auto-generated key. String locationPath = request.getRequestURL().toString(); if (sqlResult.getKeyHolder() != null) { // key holder exists, check and see if a key is // present if (sqlResult.getKeyHolder().getKey() != null) { String id = sqlResult.getKeyHolder().getKey().toString(); LOG.debug(getBeanName() + ": getKey() returns " + id); locationPath += ("/" + id); LOG.debug(getBeanName() + ": locationPath = " + locationPath); response.setHeader(LOCATION_HDR, locationPath); } // no key, check for multiple keys // TODO: should we send back all keys? else if (sqlResult.getKeyHolder().getKeys() != null) { Map<String, Object> keyMap = sqlResult.getKeyHolder().getKeys(); LOG.debug(getBeanName() + ": getKeys() returns " + keyMap); } // maybe map of keys? // TODO: should we send back all keys? else if (sqlResult.getKeyHolder().getKeyList() != null) { for (Map<String, Object> map : sqlResult.getKeyHolder().getKeyList()) { LOG.debug(getBeanName() + ": Map from getKeyList(): " + map); } } } else { // if it was not an insert, then it was an update. LOG.debug(getBeanName() + ": key holder was not returned for the insert"); } } else { // it was not an insert, so just send back an OK for the // update response.setStatus(SC_OK); } break; default: response.setStatus(SC_OK); break; } } catch (JsonProcessingException exc) { LOG.error(getBeanName() + ":ERROR, caught this " + "JsonProcessingException while trying to gen json " + "message: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "parsing error"); return null; } catch (Exception exc) { LOG.error(getBeanName() + ":ERROR, caught this " + "Exception while trying to gen json " + "message: " + exc.toString()); LOG.error(getBeanName() + ": exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error(getBeanName() + ": Caused by " + exc.getCause().toString()); LOG.error(getBeanName() + ": causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } response.sendError(SC_INTERNAL_SERVER_ERROR, "parsing error"); return null; } finally { if (sqlResult != null) { SqlResult.enqueue(sqlResult); } } // must return null, because we're not using views! return null; }
From source file:javazoom.jlgui.player.amp.Player.java
/** * Open callback, stream is ready to play. */// w ww.j a va 2s . co m public void opened(Object stream, Map properties) { audioInfo = properties; log.debug(properties.toString()); }
From source file:org.apache.cloudstack.region.RegionManagerImpl.java
/** * {@inheritDoc}/*from w ww. j a va 2 s . co m*/ */ @Override public Account updateAccount(UpdateAccountCmd cmd) { Long accountId = cmd.getId(); Long domainId = cmd.getDomainId(); DomainVO domain = _domainDao.findById(domainId); String accountName = cmd.getAccountName(); String newAccountName = cmd.getNewName(); String networkDomain = cmd.getNetworkDomain(); //ToDo send details Map<String, String> details = cmd.getDetails(); Account account = null; if (accountId != null) { account = _accountDao.findById(accountId); } else { account = _accountDao.findEnabledAccount(accountName, domainId); } // Check if account exists if (account == null || account.getType() == Account.ACCOUNT_TYPE_PROJECT) { s_logger.error("Unable to find account by accountId: " + accountId + " OR by name: " + accountName + " in domain " + domainId); throw new InvalidParameterValueException("Unable to find account by accountId: " + accountId + " OR by name: " + accountName + " in domain " + domainId); } String command = "updateAccount"; List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new NameValuePair(ApiConstants.NEW_NAME, newAccountName)); params.add(new NameValuePair(ApiConstants.ID, account.getUuid())); params.add(new NameValuePair(ApiConstants.ACCOUNT, accountName)); params.add(new NameValuePair(ApiConstants.DOMAIN_ID, domain.getUuid())); params.add(new NameValuePair(ApiConstants.NETWORK_DOMAIN, networkDomain)); params.add(new NameValuePair(ApiConstants.NEW_NAME, newAccountName)); if (details != null) { params.add(new NameValuePair(ApiConstants.ACCOUNT_DETAILS, details.toString())); } int regionId = account.getRegionId(); if (getId() == regionId) { Account updatedAccount = _accountMgr.updateAccount(cmd); if (updatedAccount != null) { List<RegionVO> regions = _regionDao.listAll(); for (Region region : regions) { if (region.getId() == getId()) { continue; } params.add(new NameValuePair(ApiConstants.IS_PROPAGATE, "true")); if (RegionsApiUtil.makeAPICall(region, command, params)) { s_logger.debug("Successfully updated account :" + account.getUuid() + " in Region: " + region.getId()); } else { s_logger.error("Error while updating account :" + account.getUuid() + " in Region: " + region.getId()); } } } return updatedAccount; } else { //First update in the Region where account is created Region region = _regionDao.findById(regionId); RegionAccount updatedAccount = RegionsApiUtil.makeAccountAPICall(region, command, params); if (updatedAccount != null) { Long id = _identityDao.getIdentityId("account", updatedAccount.getUuid()); updatedAccount.setId(id); Long domainID = _identityDao.getIdentityId("domain", updatedAccount.getDomainUuid()); updatedAccount.setDomainId(domainID); s_logger.debug("Successfully updated account :" + account.getUuid() + " in source Region: " + region.getId()); return updatedAccount; } else { throw new CloudRuntimeException("Error while updating account :" + account.getUuid() + " in source Region: " + region.getId()); } } }
From source file:gtu._work.ui.ObnfInsertCreaterUI.java
private void processSqlBtnAction() { Map<String, String> wkDataObjectMapCopy = new LinkedHashMap<String, String>(); Map<String, String> wkKeyMapCopy = new LinkedHashMap<String, String>(); DefaultListModel dbFieldListModel = (DefaultListModel) dbFieldList.getModel(); for (Enumeration<?> enu = dbFieldListModel.elements(); enu.hasMoreElements();) { KeyValue kv = (KeyValue) enu.nextElement(); if (kv.pk == false) { wkDataObjectMapCopy.put(kv.key, kv.value); } else {//from w ww. j a va 2 s . c om wkKeyMapCopy.put(kv.key, kv.value); } } if (useDomainJarDefineChkBox.isSelected()) { loadJarConfig(); StringBuffer sb = new StringBuffer(); sb.append("PK=>\n"); this.keepKey(wkKeyMapCopy, test.getPkColumns(), sb); sb.append("COLUMN=>\n"); this.keepKey(wkDataObjectMapCopy, test.getColumns(), sb); JCommonUtil._jOptionPane_showMessageDialog_info(sb); for (String key : wkKeyMapCopy.keySet()) { if (StringUtils.isBlank(wkKeyMapCopy.get(key)) && // wkDataObjectMapCopy.containsKey(key) && // StringUtils.isNotBlank(wkDataObjectMapCopy.get(key))) { System.out.println("##pk = " + key + " = " + wkDataObjectMapCopy.get(key)); wkKeyMapCopy.put(key, wkDataObjectMapCopy.get(key)); } } } System.out.println("wkKeyMapCopy = " + wkKeyMapCopy); System.out.println("wkDataObjectMapCopy = " + wkDataObjectMapCopy); String whereCondition = ""; if (!wkKeyMapCopy.isEmpty()) { System.out.println("====> wkKeyMapCopy ? !!!!!!"); whereCondition = wkKeyMapCopy.toString(); } else { whereCondition = wkDataObjectMapCopy.toString(); } whereCondition = whereCondition.replaceAll(",", "' and "); whereCondition = whereCondition.replaceAll("=", "='"); whereCondition = whereCondition.substring(1); whereCondition = whereCondition.substring(0, whereCondition.length() - 1); whereCondition = whereCondition + "'"; String selectSQL = "select * from " + tableName + " where " + whereCondition + ";\n\n"; String updateSetStr = wkDataObjectMapCopy.toString(); updateSetStr = updateSetStr.replaceAll(",", "' ,"); updateSetStr = updateSetStr.replaceAll("=", "='"); updateSetStr = updateSetStr.substring(1); updateSetStr = updateSetStr.substring(0, updateSetStr.length() - 1); updateSetStr = updateSetStr + "'"; String updateSQL = "update " + tableName + " set " + updateSetStr + " where " + whereCondition + ";\n\n"; List<String> insertFieldList = new ArrayList<String>(); List<String> insertValueList = new ArrayList<String>(); for (String key : wkDataObjectMapCopy.keySet()) { insertFieldList.add(key); insertValueList.add(wkDataObjectMapCopy.get(key)); } String inf = insertFieldList.toString(); inf = inf.replaceAll(" ", ""); inf = inf.substring(1, inf.length() - 1); String inv = insertValueList.toString(); inv = inv.replaceAll(" ", ""); inv = inv.replaceAll(",", "','"); inv = inv.substring(1, inv.length() - 1); inv = "'" + inv + "'"; String insertSQL = "insert into " + tableName + " (" + inf + ") values (" + inv + ");\n\n"; insertSqlArea.setText(selectSQL + updateSQL + insertSQL); }