List of usage examples for java.util Map containsValue
boolean containsValue(Object value);
From source file:com.linkedin.pinot.integration.tests.HybridClusterIntegrationTest.java
@BeforeClass public void setUp() throws Exception { //Clean up//from w w w. ja v a 2s.c o m ensureDirectoryExistsAndIsEmpty(_tmpDir); ensureDirectoryExistsAndIsEmpty(_segmentDir); ensureDirectoryExistsAndIsEmpty(_tarDir); // Start Zk, Kafka and Pinot startHybridCluster(); // Unpack the Avro files TarGzCompressionUtils.unTar(new File(TestUtils.getFileFromResourceUrl(OfflineClusterIntegrationTest.class .getClassLoader().getResource("On_Time_On_Time_Performance_2014_100k_subset_nonulls.tar.gz"))), _tmpDir); _tmpDir.mkdirs(); final List<File> avroFiles = getAllAvroFiles(); File schemaFile = getSchemaFile(); schema = Schema.fromFile(schemaFile); addSchema(schemaFile, schema.getSchemaName()); final List<String> invertedIndexColumns = makeInvertedIndexColumns(); final String sortedColumn = makeSortedColumn(); // Create Pinot table addHybridTable("mytable", "DaysSinceEpoch", "daysSinceEpoch", KafkaStarterUtils.DEFAULT_ZK_STR, KAFKA_TOPIC, schema.getSchemaName(), TENANT_NAME, TENANT_NAME, avroFiles.get(0), sortedColumn, invertedIndexColumns, null); LOGGER.info("Running with Sorted column=" + sortedColumn + " and inverted index columns = " + invertedIndexColumns); // Create a subset of the first 8 segments (for offline) and the last 6 segments (for realtime) final List<File> offlineAvroFiles = getOfflineAvroFiles(avroFiles); final List<File> realtimeAvroFiles = getRealtimeAvroFiles(avroFiles); // Load data into H2 ExecutorService executor = Executors.newCachedThreadPool(); setupH2AndInsertAvro(avroFiles, executor); // Create segments from Avro data LOGGER.info("Creating offline segments from avro files " + offlineAvroFiles); buildSegmentsFromAvro(offlineAvroFiles, executor, 0, _segmentDir, _tarDir, "mytable", false, null); // Initialize query generator setupQueryGenerator(avroFiles, executor); executor.shutdown(); executor.awaitTermination(10, TimeUnit.MINUTES); // Set up a Helix spectator to count the number of segments that are uploaded and unlock the latch once 12 segments are online final CountDownLatch latch = new CountDownLatch(1); HelixManager manager = HelixManagerFactory.getZKHelixManager(getHelixClusterName(), "test_instance", InstanceType.SPECTATOR, ZkStarter.DEFAULT_ZK_STR); manager.connect(); manager.addExternalViewChangeListener(new ExternalViewChangeListener() { @Override public void onExternalViewChange(List<ExternalView> externalViewList, NotificationContext changeContext) { for (ExternalView externalView : externalViewList) { if (externalView.getId().contains("mytable")) { Set<String> partitionSet = externalView.getPartitionSet(); if (partitionSet.size() == offlineSegmentCount) { int onlinePartitionCount = 0; for (String partitionId : partitionSet) { Map<String, String> partitionStateMap = externalView.getStateMap(partitionId); if (partitionStateMap.containsValue("ONLINE")) { onlinePartitionCount++; } } if (onlinePartitionCount == offlineSegmentCount) { System.out.println("Got " + offlineSegmentCount + " online tables, unlatching the main thread"); latch.countDown(); } } } } } }); // Upload the segments int i = 0; for (String segmentName : _tarDir.list()) { System.out.println("Uploading segment " + (i++) + " : " + segmentName); File file = new File(_tarDir, segmentName); FileUploadUtils.sendSegmentFile("localhost", "8998", segmentName, new FileInputStream(file), file.length()); } // Wait for all offline segments to be online latch.await(); // Load realtime data into Kafka LOGGER.info("Pushing data from realtime avro files " + realtimeAvroFiles); pushAvroIntoKafka(realtimeAvroFiles, KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KAFKA_TOPIC); // Wait until the Pinot event count matches with the number of events in the Avro files int pinotRecordCount, h2RecordCount; long timeInFiveMinutes = System.currentTimeMillis() + 5 * 60 * 1000L; Statement statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); statement.execute("select count(*) from mytable"); ResultSet rs = statement.getResultSet(); rs.first(); h2RecordCount = rs.getInt(1); rs.close(); waitForRecordCountToStabilizeToExpectedCount(h2RecordCount, timeInFiveMinutes); }
From source file:com.hp.alm.ali.idea.content.taskboard.BacklogItemPanel.java
public void applyFilter() { Map<Entity, Boolean> matches = matches(); if (matches.containsValue(true)) { setHighlight(this, matches.get(item), filter.getFilter()); for (TaskPanel taskPanel : tasks.values()) { Entity task = taskPanel.getTask(); setHighlight(taskPanel, matches.get(task), filter.getFilter()); }//from w ww. jav a 2 s . c o m getTaskContent().setVisible(true); setVisible(true); } else { getTaskContent().setVisible(false); setVisible(false); } }
From source file:com.haulmont.idp.controllers.IdpController.java
@PostMapping(value = "/auth", produces = "application/json; charset=UTF-8") @ResponseBody//from w w w .j a v a 2 s. c o m public AuthResponse authenticate(@RequestBody AuthRequest auth, @CookieValue(value = CUBA_IDP_COOKIE_NAME, defaultValue = "") String idpSessionCookie, HttpServletResponse response) { String serviceProviderUrl = auth.getServiceProviderUrl(); if (!Strings.isNullOrEmpty(serviceProviderUrl) && !idpConfig.getServiceProviderUrls().contains(serviceProviderUrl)) { log.warn("Incorrect serviceProviderUrl {} passed, will be used default", serviceProviderUrl); serviceProviderUrl = null; } if (Strings.isNullOrEmpty(serviceProviderUrl)) { if (!idpConfig.getServiceProviderUrls().isEmpty()) { serviceProviderUrl = idpConfig.getServiceProviderUrls().get(0); } else { log.error("IDP property cuba.idp.serviceProviderUrls is not set"); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value()); return null; } } Locale sessionLocale = null; if (globalConfig.getLocaleSelectVisible() && auth.getLocale() != null) { Map<String, Locale> availableLocales = globalConfig.getAvailableLocales(); Locale requestedLocale = Locale.forLanguageTag(auth.getLocale()); if (availableLocales.containsValue(requestedLocale)) { sessionLocale = requestedLocale; } } if (sessionLocale == null) { sessionLocale = messageTools.getDefaultLocale(); } if (!Strings.isNullOrEmpty(idpSessionCookie)) { boolean loggedOut = idpService.logout(idpSessionCookie); if (loggedOut) { log.info("Logged out IDP session {}", idpSessionCookie); logoutCallbackInvoker.performLogoutOnServiceProviders(idpSessionCookie); } } IdpService.IdpLoginResult loginResult; try { loginResult = idpService.login(auth.getUsername(), passwordEncryption.getPlainHash(auth.getPassword()), sessionLocale, ImmutableMap.of(ClientType.class.getName(), ClientType.WEB.name())); } catch (LoginException e) { // remove auth cookie Cookie cookie = new Cookie(CUBA_IDP_COOKIE_NAME, ""); cookie.setMaxAge(0); response.addCookie(cookie); log.warn("Unable to login user {}", auth.getUsername()); return AuthResponse.failed("invalid_credentials"); } if (loginResult.getSessionId() != null) { Cookie idpCookie = new Cookie(CUBA_IDP_COOKIE_NAME, loginResult.getSessionId()); idpCookie.setMaxAge(idpConfig.getIdpCookieMaxAge()); idpCookie.setHttpOnly(idpConfig.getIdpCookieHttpOnly()); response.addCookie(idpCookie); } String serviceProviderRedirectUrl; try { URIBuilder uriBuilder = new URIBuilder(serviceProviderUrl); if ("client-ticket".equals(auth.getResponseType())) { uriBuilder.setFragment(CUBA_IDP_TICKET_PARAMETER + "=" + loginResult.getServiceProviderTicket()); } else { uriBuilder.setParameter(CUBA_IDP_TICKET_PARAMETER, loginResult.getServiceProviderTicket()); } serviceProviderRedirectUrl = uriBuilder.build().toString(); } catch (URISyntaxException e) { return AuthResponse.failed("invalid_params"); } log.info("Logged in IDP session with ticket {}, user: {}", loginResult.getServiceProviderTicket(), auth.getUsername()); return AuthResponse.authenticated(serviceProviderRedirectUrl); }
From source file:org.egov.ptis.repository.dashboard.RevenueDashboardRepository.java
public List<Map<String, Object>> revenueTrendForTheWeek() { final Query qry = getQuery("revenue.ptis.collectiontrend"); final DateTime currentDate = new DateTime(); qry.setParameter("fromDate", startOfGivenDate(currentDate.minusDays(6)).toDate()); qry.setParameter("toDate", endOfGivenDate(currentDate).toDate()); final List<Object[]> revenueData = qry.list(); final List<Map<String, Object>> currentYearTillDays = constructDayPlaceHolder(currentDate.minusDays(6), currentDate, "E-dd", "EEEE, dd MMM yyyy"); for (final Object[] revnueObj : revenueData) for (final Map<String, Object> mapdata : currentYearTillDays) if (mapdata.containsValue( org.apache.commons.lang.StringUtils.capitalize(String.valueOf(revnueObj[0]).toLowerCase()))) mapdata.put("y", Double.valueOf(String.valueOf(revnueObj[1]))); return currentYearTillDays; }
From source file:org.polymap.kaps.ui.form.DefaultEntityFormEditorPage.java
@Override public void createFormContent(final IFormEditorPageSite site) { super.createFormContent(site); String objectName = ""; if (composite instanceof SchlNamed) { objectName = ((SchlNamed) composite).schl().get(); }//ww w . j a v a 2 s .com site.setEditorTitle(formattedTitle(editorTitle, objectName, null)); site.setFormTitle(formattedTitle(editorTitle, objectName, getTitle())); Composite parent = site.getPageBody(); Composite lastLine = null; EntityType<?> entityType = composite.getEntityType(); // sort after labeling Map<String, String> labels = new TreeMap<String, String>(); for (String propertyName : propertyNames) { labels.put(labelFor(propertyName), propertyName); } // special handling for the key schl if (composite instanceof SchlNamed && labels.containsValue("schl")) { lastLine = newFormField(labelFor("schl")).setEnabled(((SchlNamed) composite).schl().get() == null) .setProperty(new PropertyAdapter(((SchlNamed) composite).schl())) .setField(new StringFormField()).setValidator(new NotNullValidator()) .setLayoutData(left().top(lastLine).create()).create(); } for (String label : labels.keySet()) { String propertyName = labels.get(label); if ("schl".equals(propertyName)) { continue; } Property property = entityType.getProperty(propertyName); // if (!(property instanceof EntityType.ManyAssociation)) { Class propertyType = property.getType(); Object delegate = null; try { Method m = composite.getClass().getMethod(property.getName(), new Class[0]); delegate = m.invoke(composite, new Object[0]); } catch (Exception e) { throw new IllegalStateException("this must never be thrown", e); } // lastLine = newFormField( "Schlssel" ).setEnabled( // composite.schl().get() == null ) // .setProperty( new PropertyAdapter( composite.schl() ) ).setField( new // StringFormField() ) // .setValidator( new NotNullValidator() ).setLayoutData( left().top( // lastLine ).create() ).create(); // // // lastLine = newFormField( "Bezeichung" ).setProperty( new // PropertyAdapter( composite.name() ) ) // .setValidator( new NotNullValidator() ).setField( new // StringFormField() ) // .setLayoutData( left().top( lastLine ).create() ).create(); // // lastLine = lastLine; // lastLine = newFormField( "Gebudeart" ).setToolTipText( // "Gebudeart entsprechend Statistischem Bundesamt" ) // .setProperty( new AssociationAdapter<GebaeudeArtStaBuComposite>( // composite.gebaeudeArtStabu() ) ) // .setField( namedAssocationsPicklist( GebaeudeArtStaBuComposite.class ) // ) // .setLayoutData( left().top( lastLine ).create() ).create(); FormFieldBuilder fieldBuilder = newFormField(label).setToolTipText(tooltipFor(propertyName)) .setLayoutData(left().top(lastLine).create()); if (String.class.isAssignableFrom(propertyType)) { fieldBuilder.setProperty(new PropertyAdapter((org.qi4j.api.property.Property) delegate)) .setField(new StringFormField()); } else if (Integer.class.isAssignableFrom(propertyType)) { fieldBuilder.setProperty(new PropertyAdapter((org.qi4j.api.property.Property) delegate)) .setField(new StringFormField()).setValidator(new MyNumberValidator(Integer.class)); } else if (Double.class.isAssignableFrom(propertyType)) { fieldBuilder.setProperty(new PropertyAdapter((org.qi4j.api.property.Property) delegate)) .setField(new StringFormField()).setValidator(new MyNumberValidator(Double.class, 2)); } else if (Date.class.isAssignableFrom(propertyType)) { fieldBuilder.setProperty(new PropertyAdapter((org.qi4j.api.property.Property) delegate)) .setField(new DateTimeFormField()); } else if (Boolean.class.isAssignableFrom(propertyType)) { fieldBuilder.setProperty(new PropertyAdapter((org.qi4j.api.property.Property) delegate)) .setField(new CheckboxFormField()); } else if (Named.class.isAssignableFrom(propertyType)) { fieldBuilder .setProperty(new AssociationAdapter((org.qi4j.api.entity.association.Association) delegate)) .setField(namedAssocationsPicklist(propertyType)); } lastLine = fieldBuilder.create(); } }
From source file:org.codice.ddf.persistence.internal.PersistentStoreImpl.java
@Override // Input Map is expected to have the suffixes on the key names public void add(String type, Map<String, Object> properties) throws PersistenceException { LOGGER.debug("type = {}", type); if (type == null || type.isEmpty()) { return;//from w ww . j a v a 2 s . c o m } if (properties == null || properties.isEmpty() || properties.containsValue("guest")) { return; } LOGGER.debug("Adding entry of type {}", type); // Set Solr Core name to type and create/connect to Solr Core SolrClient coreSolrClient = getSolrCore(type); if (coreSolrClient == null) { return; } Date now = new Date(); //DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ"); //String createdDate = df.format(now); SolrInputDocument solrInputDocument = new SolrInputDocument(); solrInputDocument.addField("createddate_tdt", now); for (Map.Entry<String, Object> entry : properties.entrySet()) { solrInputDocument.addField(entry.getKey(), entry.getValue()); } try { UpdateResponse response = coreSolrClient.add(solrInputDocument); LOGGER.debug("UpdateResponse from add of SolrInputDocument: {}", response); } catch (SolrServerException e) { LOGGER.info("SolrServerException while adding Solr index for persistent type {}", type, e); doRollback(coreSolrClient, type); throw new PersistenceException( "SolrServerException while adding Solr index for persistent type " + type, e); } catch (IOException e) { LOGGER.info("IOException while adding Solr index for persistent type {}", type, e); doRollback(coreSolrClient, type); throw new PersistenceException("IOException while adding Solr index for persistent type " + type, e); } catch (RuntimeException e) { LOGGER.info("RuntimeException while adding Solr index for persistent type {}", type, e); doRollback(coreSolrClient, type); throw new PersistenceException("RuntimeException while adding Solr index for persistent type " + type, e); } }
From source file:edu.umd.ks.cm.util.spring.CmToSisExportAdvice.java
@Transactional(readOnly = false, noRollbackFor = { DoesNotExistException.class }, rollbackFor = { Throwable.class }) public void updateSisCourseInfoCluSetUpdate(ProceedingJoinPoint pjp, ContextInfo contextInfo) throws Throwable { if (true) {//from www . j a va 2 s . c o m return; } // If the enablePushToSis environment variable is false, do not write course to SIS // (allows us to turn off push for public environment) if (!enablePushToSis) { return; } Object[] args = pjp.getArgs(); String newCluSetId = (String) args[0]; // Modified cluSetId CluSetInfo newCluSetInfo = (CluSetInfo) args[1]; // Modified cluSetInfo // Make sure it's a CluSet we care about (Hardcoded) String cluSetName = newCluSetInfo.getName(); // "cluSetName" will now be a long description name (was just the code before) // So, get and check the new map which contains hardcoded set description names. Map<String, String> CoreGenCluSetCodeToDescriptionMap = coreGenedClusetMapper .getCodeToDescriptionMap(contextInfo); Boolean weCare = CoreGenCluSetCodeToDescriptionMap.containsValue(cluSetName); if (weCare) { // Obtain new Ids Set<String> newCluIds = new HashSet<String>(newCluSetInfo.getCluIds()); List<String> listNewCluIds = newCluSetInfo.getCluIds(); // Obtain old ("current") Ids via luService call List<String> listOldCluIds = luService.getAllCluIdsInCluSet(newCluSetId, contextInfo); Set<String> oldCluIds = new HashSet<String>(listOldCluIds); // Removed Courses (old - new) Set<String> removedCluIds = new HashSet<String>(oldCluIds); removedCluIds.removeAll(newCluIds); System.out.println("Removed these clu IDs: " + removedCluIds); for (String cluId : removedCluIds) { // Translate from VerIndId to current Ver Id to get current courseInfo obj VersionDisplayInfo vdi = courseService .getCurrentVersion(CourseServiceConstants.COURSE_NAMESPACE_URI, cluId, contextInfo); CourseInfo courseInfo = courseService.getCourse(vdi.getId(), contextInfo); //sisCmDao.updateSisCourseInfo(courseInfo, "P");//FIXME we should test to see if there is a pushed record before we update vs create } // Added Courses (new - old) Set<String> addedCluIds = new HashSet<String>(newCluIds); addedCluIds.removeAll(oldCluIds); System.out.println("Added these clu IDs: " + addedCluIds); for (String cluId : addedCluIds) { // Translate from VerIndId to current Ver Id to get current courseInfo obj VersionDisplayInfo vdi = courseService .getCurrentVersion(CourseServiceConstants.COURSE_NAMESPACE_URI, cluId, contextInfo); CourseInfo courseInfo = courseService.getCourse(vdi.getId(), contextInfo); //sisCmDao.updateSisCourseInfo(courseInfo, "P");//FIXME we should test to see if there is a pushed record before we update vs create } } // end if weCare }
From source file:org.egov.pgr.dashboard.service.DashboardService.java
public List<Map<String, Object>> getMonthlyAggregate() { DateTime currentDate = new DateTime(); List<Map<String, Object>> dataHolder = constructMonthPlaceHolder(currentDate.minusMonths(6), currentDate, "MMM-yyyy"); for (Object[] compCnt : dashboardRepository.fetchMonthlyAggregateBetween( startOfGivenDate(currentDate.minusMonths(6).withDayOfMonth(1)).toDate(), endOfGivenDate(currentDate).toDate())) for (Map<String, Object> mapdata : dataHolder) if (mapdata.containsValue(StringUtils.capitalize(String.valueOf(compCnt[0]).toLowerCase()))) mapdata.put("y", Integer.valueOf(String.valueOf(compCnt[1]))); return dataHolder; }
From source file:io.cloudslang.content.amazon.factory.helpers.NetworkUtils.java
private void setSecondaryPrivateIpAddressCountQueryParams(Map<String, String> queryParamsMap, String inputString) {/*from www.j a va 2 s .co m*/ if (!queryParamsMap.containsKey(getQueryParamsSpecificString(NETWORK, ONE) + PRIMARY) && !queryParamsMap.containsValue(Boolean.FALSE.toString().toLowerCase())) { setOptionalMapEntry(queryParamsMap, SECONDARY_PRIVATE_IP_ADDRESS_COUNT, inputString, isNotBlank(inputString)); } }
From source file:com.flexive.core.search.cmis.impl.sql.generic.GenericInnerJoinConditionTableBuilder.java
/** {@inheritDoc} */ @Override//ww w .j a v a 2 s . c o m protected void onLeaveSubCondition(ConditionList.Connective type) { if (ConditionList.Connective.AND.equals(type)) { assert intersectTableAliases.peek() .size() > 1 : "AND subcondition must contain at least two conditions"; // create select from subtables - need to do this after the conditions have been rendered, // because now we know which subcondition maps to which content table (for conditions // in joined tables) final Map<TableReference, String> selectedTables = selectFromConjunction(); // append join conditions boolean hasPrevious = false; for (Map.Entry<String, List<TableReference>> entry : intersectTableReferences.peek().entrySet()) { final String alias = entry.getKey(); if (selectedTables.containsValue(alias)) { // this table is already selected, thus it does not need to be joined continue; } for (TableReference reference : entry.getValue()) { if (hasPrevious) { getOut().append(" AND "); } else { getOut().append(" WHERE "); hasPrevious = true; } // link subcondition tables through the first table final List<String> columns = new ArrayList<String>(joinedTables.getTableAliases().size() * 2); // link to selected table final String table = selectedTables.get(reference); if (table != null) { // if table was null, the corresponding table reference is not present in this subcondition // and does not need to / cannot be joined // TODO: this breaks with nested subconditions, because their selected tables do not appear in intersectTableReferences assert !table.equals(alias) : "Table cannot be joined to itself"; columns.add(joinCondition(table, alias, reference.getIdFilterColumn())); columns.add(joinCondition(table, alias, reference.getVersionFilterColumn())); } getOut().append(StringUtils.join(columns, " AND ")); } } getOut().append(')'); intersectTableAliases.pop(); intersectTableReferences.pop(); } else { super.onLeaveSubCondition(type); } }