List of usage examples for java.util Collections singletonMap
public static <K, V> Map<K, V> singletonMap(K key, V value)
From source file:api.QuizResource.java
@GET @Path("submit/{chapter}/{number}") @Produces(MediaType.APPLICATION_JSON)/*from www.ja va2s. c o m*/ public String submitAnswer(@PathParam("chapter") Integer chapter, @PathParam("number") Integer number, @QueryParam("answer") List<String> answers) { if (!userBean.isLoggedIn()) { return jsonError("Gotta be logged in bruv."); } boolean result = quizBean.submitAnswer(chapter, number, answers, userBean); ObjectMapper mapper = new ObjectMapper(); try { return mapper.writeValueAsString(Collections.singletonMap("isCorrect", result)); } catch (JsonProcessingException ex) { ex.printStackTrace(); } return jsonError("Unknown json failure :("); }
From source file:com.liveramp.cascading_ext.CascadingUtil.java
private Map<String, String> getSerializationsProperty() { // Get the existing serializations List<String> strings = new ArrayList<String>(); String existing = new JobConf().get("io.serializations"); if (existing != null) { strings.add(existing);//from w w w .j a va 2s .com } // Append our custom serializations for (Class<? extends Serialization> klass : serializations) { strings.add(klass.getName()); } return Collections.singletonMap("io.serializations", StringUtils.join(strings, ",")); }
From source file:com.insys.cfclient.nozzle.InfluxDBSender.java
@Async public void sendBatch(List<String> messages) { log.debug("ENTER sendBatch"); httpClient.setErrorHandler(new ResponseErrorHandler() { @Override// w ww . ja v a 2s. c o m public boolean hasError(ClientHttpResponse clientHttpResponse) throws IOException { return clientHttpResponse.getRawStatusCode() > 399; } @Override public void handleError(ClientHttpResponse clientHttpResponse) throws IOException { } }); RetryTemplate retryable = new RetryTemplate(); retryable.setBackOffPolicy(getBackOffPolicy()); retryable.setRetryPolicy(new SimpleRetryPolicy(properties.getMaxRetries(), Collections.singletonMap(ResourceAccessException.class, true))); final AtomicInteger counter = new AtomicInteger(0); retryable.execute(retryContext -> { int count = counter.incrementAndGet(); log.trace("Attempt {} to deliver this batch", count); final StringBuilder builder = new StringBuilder(); messages.forEach(s -> builder.append(s).append("\n")); String body = builder.toString(); RequestEntity<String> entity = new RequestEntity<>(body, HttpMethod.POST, getUri()); ResponseEntity<String> response; response = httpClient.exchange(entity, String.class); if (response.getStatusCode() != HttpStatus.NO_CONTENT) { log.error("Failed to write logs to InfluxDB! Expected error code 204, got {}", response.getStatusCodeValue()); log.trace("Request Body: {}", body); log.trace("Response Body: {}", response.getBody()); } else { log.debug("batch sent successfully!"); } log.debug("EXIT sendBatch"); return null; }, recoveryContext -> { log.trace("Failed after {} attempts!", counter.get()); return null; }); }
From source file:com.amazonaws.sample.entitlement.rs.JaxRsEntitlementService.java
/** * Request sessions for a user//from ww w . ja va 2 s. co m * @param authorization string that is associated to the identity of a user * @return prettified JSON sessions */ @GET @Path("/sessions/") @Produces(MediaType.APPLICATION_JSON) public Response requestSessions(@HeaderParam("Authorization") String authorization) { try { Item user = entitlementService.getUserFromAuthorization(authorization); String response = entitlementService.getUserSessions(user); return response(Status.OK, response); } catch (AuthorizationException e) { String authenticateHeader = e.getAuthenticateHeader(); if (authenticateHeader == null) { return response(Status.UNAUTHORIZED, e.getMessage()); } else { return response(Status.UNAUTHORIZED, e.getMessage(), Collections.singletonMap("WWW-Authenticate", authenticateHeader)); } } catch (ApplicationBadStateException e) { return response(Status.CONFLICT, e.getMessage()); } }
From source file:hudson.gridmaven.gridlayer.PluginImpl.java
/** * Launches Hadoop in a separate JVM./*from w w w .j a va 2 s .com*/ * * @param rootDir * The slave/master root. */ public static /*package*/ Channel createHadoopVM(File rootDir, TaskListener listener) throws IOException, InterruptedException { // install Hadoop if it's not there rootDir = new File(rootDir, "hadoop"); FilePath distDir = new FilePath(new File(rootDir, "dist")); URL u = PluginImpl.class.getResource("hadoop.tar.gz"); distDir.installIfNecessaryFrom(u, listener, "Hadoop"); File logDir = new File(rootDir, "logs"); logDir.mkdirs(); return Channels.newJVM("Hadoop", listener, null, new ClasspathBuilder().addAll(distDir, "hadoop-*-core.jar").addAll(distDir, "lib/**/*.jar") .add(distDir.child("conf")), Collections.singletonMap("hadoop.log.dir", logDir.getAbsolutePath())); }
From source file:io.spring.initializr.actuate.stat.ProjectGenerationStatPublisherTests.java
@Test public void fatalErrorOnlyLogs() { ProjectRequest request = createProjectRequest(); this.retryTemplate .setRetryPolicy(new SimpleRetryPolicy(2, Collections.singletonMap(Exception.class, true))); this.mockServer.expect(requestTo("http://example.com/elastic/initializr/request")) .andExpect(method(HttpMethod.POST)).andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR)); this.mockServer.expect(requestTo("http://example.com/elastic/initializr/request")) .andExpect(method(HttpMethod.POST)).andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR)); this.statPublisher.handleEvent(new ProjectGeneratedEvent(request)); this.mockServer.verify(); }
From source file:com.sina.app.spout.ClkKafkaSpout.java
protected boolean fillBuffer() { if (!_inProgress.isEmpty() || !_queue.isEmpty()) { throw new IllegalStateException("cannot fill buffer when buffer or pending messages are non-empty"); }//w w w.ja v a2s . c o m if (_iterator == null) { final Map<String, List<KafkaStream<byte[], byte[]>>> streams = _consumer .createMessageStreams(Collections.singletonMap(_topic, 1)); _iterator = streams.get(_topic).get(0).iterator(); } try { int size = 0; while (size < _bufSize && _iterator.hasNext()) { final MessageAndMetadata<byte[], byte[]> message = _iterator.next(); final KafkaMessageId id = new KafkaMessageId(message.partition(), message.offset()); _inProgress.put(id, message.message()); size++; } } catch (final ConsumerTimeoutException e) { } if (_inProgress.size() > 0) { _queue.addAll(_inProgress.keySet()); LOG.debug("buffer now has {} messages to be emitted", _queue.size()); return true; } else { return false; } }
From source file:com.cedac.security.oauth2.provider.client.MongoClientDetailsServiceTests.java
@Test public void testLoadingClientIdWithAdditionalInformation() { collection.insert(new BasicDBObject("clientId", "clientIdWithAddInfo").append("additionalInformation", new BasicDBObject("foo", "bar"))); ClientDetails clientDetails = fixture.loadClientByClientId("clientIdWithAddInfo"); assertEquals("clientIdWithAddInfo", clientDetails.getClientId()); assertEquals(Collections.singletonMap("foo", "bar"), clientDetails.getAdditionalInformation()); }
From source file:org.zenoss.zep.dao.impl.EventDetailsConfigDaoImpl.java
@Override @TransactionalReadOnly// www . jav a 2s. c om public EventDetailItem findByName(String eventDetailName) throws ZepException { final Map<String, String> fields = Collections.singletonMap(COLUMN_DETAIL_ITEM_NAME, eventDetailName); final String sql = "SELECT proto_json FROM event_detail_index_config WHERE detail_item_name=:detail_item_name"; final List<EventDetailItem> items = this.template.query(sql, new RowMapper<EventDetailItem>() { @Override public EventDetailItem mapRow(ResultSet rs, int rowNum) throws SQLException { return DaoUtils.protobufFromJson(rs.getString(COLUMN_PROTO_JSON), EventDetailItem.getDefaultInstance()); } }, fields); return (items.isEmpty()) ? null : items.get(0); }