List of usage examples for java.util Collections EMPTY_LIST
List EMPTY_LIST
To view the source code for java.util Collections EMPTY_LIST.
Click Source Link
From source file:eionet.cr.web.action.TypeSearchActionBean.java
/** * Exports search result as a file.//from w w w . j a v a 2 s .com * * @return Resolution. * @throws Exception */ @SuppressWarnings("unchecked") public Resolution export() throws Exception { logger.trace("************** START EXPORT REQUEST ***********"); restoreStateFromSession(); ExportFormat format = ExportFormat.fromName(exportFormat); Exporter exporter = Exporter.getExporter(format); exporter.setLanguages(getAcceptedLanguages() != null ? getAcceptedLanguages() : Collections.EMPTY_LIST); List<Pair<String, String>> columnPairs = new LinkedList<Pair<String, String>>(); exporter.setExportResourceUri(uriResourceIdentifier); selectedColumns = exportColumns == null || exportColumns.isEmpty() ? selectedColumns : exportColumns; for (String selectedColumn : selectedColumns) { columnPairs.add(new Pair<String, String>(selectedColumn, getAvailableColumns().get(selectedColumn))); } exporter.setSelectedColumns(columnPairs); Map<String, String> filters = new HashMap<String, String>(); if (selectedFilters != null) { filters.putAll(selectedFilters); } filters.put(Predicates.RDF_TYPE, type); exporter.setSelectedFilters(filters); getContext().getResponse().setHeader("Content-Disposition", "attachment;filename=" + format.getFilename()); getContext().getResponse().setHeader("Cache-Control", "no-cache, must-revalidate"); return new StreamingResolution(format.getContentType(), exporter.export()); }
From source file:pe.gob.mef.gescon.web.ui.PerfilMB.java
public void save(ActionEvent event) { try {//from ww w . ja va2s.c o m if (CollectionUtils.isEmpty(this.getListaPerfils())) { this.setListaPerfils(Collections.EMPTY_LIST); } Perfil perfil = new Perfil(); perfil.setVnombre(this.getNombre()); perfil.setVdescripcion(this.getDescripcion()); if (!errorValidation(perfil)) { LoginMB loginMB = (LoginMB) JSFUtils.getSessionAttribute("loginMB"); User user = loginMB.getUser(); PerfilService service = (PerfilService) ServiceFinder.findBean("PerfilService"); perfil.setNperfilid(service.getNextPK()); perfil.setVnombre(StringUtils.upperCase(this.getNombre().trim())); perfil.setVdescripcion(StringUtils.capitalize(this.getDescripcion().trim())); perfil.setNactivo(BigDecimal.ONE); perfil.setDfechacreacion(new Date()); perfil.setVusuariocreacion(user.getVlogin()); service.saveOrUpdate(perfil); this.setListaPerfils(service.getPerfils()); this.cleanAttributes(); RequestContext.getCurrentInstance().execute("PF('newDialog').hide();"); } } catch (Exception e) { log.error(e.getMessage()); e.printStackTrace(); } }
From source file:deployer.publishers.openshift.OpenShiftWebAppTest.java
private File runSingleInstanceTest(int existing, DeploymentArtifact deploymentArtifact) throws Exception { String strExisting = Integer.toString(existing); File remoteTestDir = Files.resolve(TestUtils.GIT_TEST_DIR, strExisting, "webappviaOpenshift.git"); File testDir = Files.resolve(TestUtils.GIT_TEST_DIR, strExisting, "webappviaOpenshift"); System.out.println(testDir);/*from www.j a va 2s. co m*/ Files.createDirectories(testDir); ArtifactHelpers.addFilesToArtifact(deploymentArtifact, new JavaWebAppArtifactContentsPublisher().generateEntries(deploymentArtifact)); TestUtils.AppDirs[] theGits = TestUtils.makeNumberGits(existing, existing + "/webappviaOpenshift"); Git.init().setBare(true).setDirectory(remoteTestDir).call(); Git git = Git.cloneRepository().setURI(remoteTestDir.toURI().toString()).setDirectory(testDir).call(); Rhc rhc = createMock(Rhc.class); IApplication instance = createMock(IApplication.class); IDomain domain = createMock(IDomain.class); IUser mockUser = createMock(IUser.class); EzReverseProxy.Client ezReverseProxyClient = createMock(EzReverseProxy.Client.class); ThriftClientPool clientPool = createMockPool((existing >= 2 ? existing : 1), "EzBakeFrontend", EzReverseProxyConstants.SERVICE_NAME, ezReverseProxyClient); EzReverseProxyRegister ezReverseProxyRegister = new EzReverseProxyRegister( new EzDeployerConfiguration(new Properties()), clientPool); expect(instance.getName()).andReturn("UnitTestApplication").anyTimes(); expect(instance.getApplicationUrl()).andReturn("http://unit-test.local/").anyTimes(); expect(domain.getUser()).andReturn(mockUser).anyTimes(); expect(mockUser.getRhlogin()).andReturn("UnitTestuser").anyTimes(); if (existing == 0 || existing == 1) { expect(rhc.listApplicationInstances(TestUtils.getOpenShiftAppName(), TestUtils.getOpenShiftDomainName())).andReturn(Collections.EMPTY_LIST).once(); } else { expect(rhc.listApplicationInstances(TestUtils.getOpenShiftAppName(), TestUtils.getOpenShiftDomainName())) .andReturn(TestUtils.getExistingInstances(theGits, instance, domain)).once(); instance.stop(); expectLastCall().times(existing - 1); instance.destroy(); expectLastCall().times(existing - 1); ezReverseProxyClient.removeUpstreamServerRegistration( eqRegistration("example.local/" + TestUtils.SERVICE_NAME + "/", TestUtils.SERVICE_NAME, "unit-test.local:443", "")); expectLastCall().times(existing - 1); } ezReverseProxyClient .addUpstreamServerRegistration(eqRegistration("example.local/" + TestUtils.SERVICE_NAME + "/", TestUtils.SERVICE_NAME, "unit-test.local:443", "")); expect(rhc.getOrCreateApplication(TestUtils.buildOpenShiftAppName(0), TestUtils.getOpenShiftDomainName(), new StandaloneCartridge("jbossas"), ApplicationScale.NO_SCALE, GearProfile.SMALL)) .andReturn(new RhcApplication(git, instance, domain, testDir, null)).once(); expect(instance.getEnvironmentVariable("OPENSHIFT_GEAR_DNS")) .andReturn(envVariableValue("OPENSHIFT_GEAR_DNS", "unit-test.local")).anyTimes(); expect(instance.getEnvironmentVariable("OPENSHIFT_JAVA_THRIFTRUNNER_TCP_PROXY_PORT")) .andReturn(envVariableValue("OPENSHIFT_JAVA_THRIFTRUNNER_TCP_PROXY_PORT", "32456")).anyTimes(); expect(instance.getEnvironmentVariables()).andReturn(new HashMap<String, IEnvironmentVariable>()) .anyTimes(); expect(instance.addEnvironmentVariable("EZBAKE_APPLICATION_NAME", TestUtils.APP_NAME)) .andReturn(envVariableValue("EZBAKE_APPLICATION_NAME", TestUtils.APP_NAME)).anyTimes(); expect(instance.addEnvironmentVariable("EZBAKE_SERVICE_NAME", TestUtils.SERVICE_NAME)) .andReturn(envVariableValue("EZBAKE_SERVICE_NAME", TestUtils.SERVICE_NAME)).anyTimes(); expect(instance.getEmbeddedCartridges()).andReturn(new ArrayList<IEmbeddedCartridge>()).anyTimes(); IEmbeddedCartridge cart = createMock(IEmbeddedCartridge.class); expect(instance.addEmbeddableCartridge(new EmbeddableCartridge("logstash"))).andReturn(cart).anyTimes(); expect(instance.addEmbeddableCartridge(new EmbeddableCartridge("cron"))).andReturn(cart).anyTimes(); replay(instance, domain, mockUser, ezReverseProxyClient, rhc); EzOpenShiftPublisher publisher = new EzOpenShiftPublisherMock(rhc, ezReverseProxyRegister); publisher.publish(deploymentArtifact, ThriftTestUtils.generateTestSecurityToken("U")); verify(instance, domain, mockUser, ezReverseProxyClient, rhc, clientPool); return testDir; }
From source file:com.adaptris.core.marshaller.xstream.AliasedElementReflectionConverter.java
protected void doMarshal(final Object source, final HierarchicalStreamWriter writer, final MarshallingContext context) { final List<FieldInfo> fields = new ArrayList<>(); final Map<String, java.lang.reflect.Field> defaultFieldDefinition = new HashMap<>(); // Attributes might be preferred to child elements ... reflectionProvider.visitSerializableFields(source, new ReflectionProvider.Visitor() { final Set<String> writtenAttributes = new HashSet<>(); public void visit(String fieldName, Class type, Class definedIn, Object value) { if (!mapper.shouldSerializeMember(definedIn, fieldName)) { return; }/* w w w. ja v a 2 s . c o m*/ if (!defaultFieldDefinition.containsKey(fieldName)) { Class lookupType = source.getClass(); // See XSTR-457 and OmitFieldsTest if (definedIn != source.getClass() && !mapper.shouldSerializeMember(lookupType, fieldName)) { lookupType = definedIn; } defaultFieldDefinition.put(fieldName, reflectionProvider.getField(lookupType, fieldName)); } SingleValueConverter converter = mapper.getConverterFromItemType(fieldName, type, definedIn); if (converter != null) { final String attribute = mapper .aliasForAttribute(mapper.serializedMember(definedIn, fieldName)); if (value != null) { if (writtenAttributes.contains(fieldName)) { // TODO: use attribute throw new ConversionException("Cannot write field with name '" + fieldName + "' twice as attribute for object of type " + source.getClass().getName()); } final String str = converter.toString(value); if (str != null) { writer.addAttribute(attribute, str); } } writtenAttributes.add(fieldName); // TODO: use attribute } else { fields.add(new FieldInfo(fieldName, type, definedIn, value)); } } }); new Object() { { for (Iterator<FieldInfo> fieldIter = fields.iterator(); fieldIter.hasNext();) { FieldInfo info = (FieldInfo) fieldIter.next(); // Check if the field is not null, we don't output null fields if (info.value != null) { Mapper.ImplicitCollectionMapping mapping = mapper .getImplicitCollectionDefForFieldName(source.getClass(), info.fieldName); if (mapping != null) { if (context instanceof ReferencingMarshallingContext) { if (info.value != Collections.EMPTY_LIST && info.value != Collections.EMPTY_SET && info.value != Collections.EMPTY_MAP) { ReferencingMarshallingContext refContext = (ReferencingMarshallingContext) context; refContext.registerImplicit(info.value); } } final boolean isCollection = info.value instanceof Collection; final boolean isMap = info.value instanceof Map; final boolean isEntry = isMap && mapping.getKeyFieldName() == null; final boolean isArray = info.value.getClass().isArray(); for (Iterator iter = isArray ? new ArrayIterator(info.value) : isCollection ? ((Collection) info.value).iterator() : isEntry ? ((Map) info.value).entrySet().iterator() : ((Map) info.value).values().iterator(); iter.hasNext();) { Object obj = iter.next(); final String itemName; final Class itemType; if (obj == null) { itemType = Object.class; itemName = mapper.serializedClass(null); } else if (isEntry) { final String entryName = mapping.getItemFieldName() != null ? mapping.getItemFieldName() : mapper.serializedClass(Map.Entry.class); Map.Entry entry = (Map.Entry) obj; ExtendedHierarchicalStreamWriterHelper.startNode(writer, entryName, entry.getClass()); writeItem(entry.getKey(), context, writer); writeItem(entry.getValue(), context, writer); writer.endNode(); continue; } else if (mapping.getItemFieldName() != null) { itemType = mapping.getItemType(); itemName = mapping.getItemFieldName(); } else { itemType = obj.getClass(); itemName = mapper.serializedClass(itemType); } writeField(info.fieldName, itemName, itemType, info.definedIn, obj); } } // Field is not an implicit collection else { writeField(info.fieldName, null, info.type, info.definedIn, info.value); } } } } // void writeFieldStandard(String fieldName, String aliasName, Class fieldType, Class definedIn, Object newObj) { // Class actualType = newObj != null ? newObj.getClass() : fieldType; // ExtendedHierarchicalStreamWriterHelper.startNode( // writer, // aliasName != null ? aliasName : mapper.serializedMember( // source.getClass(), fieldName), actualType); // // // We don't process null fields (field values) // if (newObj != null) { // Class defaultType = mapper.defaultImplementationOf(fieldType); // if (!actualType.equals(defaultType)) { // String serializedClassName = mapper.serializedClass(actualType); // if (!serializedClassName // .equals(mapper.serializedClass(defaultType))) { // String attributeName = mapper.aliasForSystemAttribute("class"); // if (attributeName != null) { // writer.addAttribute(attributeName, serializedClassName); // } // } // } // // final Field defaultField = (Field) defaultFieldDefinition // .get(fieldName); // if (defaultField.getDeclaringClass() != definedIn) { // String attributeName = mapper.aliasForSystemAttribute("defined-in"); // if (attributeName != null) { // writer.addAttribute(attributeName, // mapper.serializedClass(definedIn)); // } // } // // Field field = reflectionProvider.getField(definedIn, fieldName); // marshallField(context, newObj, field); // } // writer.endNode(); // } // Modified version of method from that super class void writeField(String fieldName, String aliasName, Class fieldType, Class definedIn, Object newObj) { Class<?> actualType = newObj != null ? newObj.getClass() : fieldType; String elementName = aliasName != null ? aliasName : mapper.serializedMember(source.getClass(), fieldName); String classAttributeName = null; String definedAttributeName = null; // We don't process null fields (field values) if (newObj != null) { Class defaultType = mapper.defaultImplementationOf(fieldType); if (!actualType.equals(defaultType)) { String serializedClassName = mapper.serializedClass(actualType); if (!serializedClassName.equals(mapper.serializedClass(defaultType))) { classAttributeName = mapper.aliasForSystemAttribute("class"); } } final Field defaultField = (Field) defaultFieldDefinition.get(fieldName); if (defaultField.getDeclaringClass() != definedIn) { definedAttributeName = mapper.aliasForSystemAttribute("defined-in"); } } writeOutElementBasedOnClassType(elementName, definedIn, actualType, classAttributeName, definedAttributeName); if (newObj != null) { Field field = reflectionProvider.getField(definedIn, fieldName); marshallField(context, newObj, field); } writer.endNode(); } // Now where the super class would have written out the field name // followed by a class attribute that contained the subclass name, // we will write out the subclass alias name as the element with no // class attribute at all. private void writeOutElementBasedOnClassType(String elementName, Class definedIn, Class<?> actualType, String classAttributeName, String definedAttributeName) { boolean isClassAttributeSet = !StringUtils.isBlank(classAttributeName); if (isClassAttributeSet) { String serializedClassName = mapper.serializedClass(actualType); ExtendedHierarchicalStreamWriterHelper.startNode(writer, serializedClassName, actualType); } else { String serializedClassName = mapper.serializedClass(actualType); ExtendedHierarchicalStreamWriterHelper.startNode(writer, elementName, actualType); if (classAttributeName != null) { writer.addAttribute(classAttributeName, serializedClassName); } if (definedAttributeName != null) { writer.addAttribute(definedAttributeName, mapper.serializedClass(definedIn)); } } } void writeItem(Object item, MarshallingContext context, HierarchicalStreamWriter writer) { if (item == null) { String name = mapper.serializedClass(null); ExtendedHierarchicalStreamWriterHelper.startNode(writer, name, Mapper.Null.class); writer.endNode(); } else { String name = mapper.serializedClass(item.getClass()); ExtendedHierarchicalStreamWriterHelper.startNode(writer, name, item.getClass()); context.convertAnother(item); writer.endNode(); } } }; }
From source file:com.redhat.rhn.frontend.action.kickstart.ssm.SsmKSScheduleAction.java
/** * {@inheritDoc}//w w w .j a va2 s . c o m */ public List getResult(RequestContext ctx) { if (isIP(ctx.getRequest())) { return Collections.EMPTY_LIST; } User user = ctx.getCurrentUser(); List profiles = KickstartLister.getInstance().listProfilesForSsm(user); if (profiles.isEmpty()) { addMessage(ctx.getRequest(), "kickstart.schedule.noprofiles"); } else { ctx.getRequest().setAttribute(ScheduleKickstartWizardAction.HAS_PROFILES, Boolean.TRUE); } return profiles; }
From source file:hr.fer.zemris.vhdllab.service.workspace.WorkspaceTest.java
@SuppressWarnings("unchecked") @Test(expected = IllegalArgumentException.class) public void addFile() { workspace.addFile(null, new Hierarchy(new Project(), Collections.EMPTY_LIST)); }
From source file:org.syncope.core.rest.data.AbstractAttributableDataBinder.java
protected void fillAttribute(final List<String> values, final AttributableUtil attributableUtil, final AbstractSchema schema, final AbstractAttr attribute, final SyncopeClientException invalidValues) { // if the schema is multivalue, all values are considered for // addition, otherwise only the fist one - if provided - is // considered List<String> valuesProvided = schema.isMultivalue() ? values : (values.isEmpty() ? Collections.EMPTY_LIST : Collections.singletonList(values.iterator().next())); for (String value : valuesProvided) { if (value == null || value.isEmpty()) { LOG.debug("Null value for {}, ignoring", schema.getName()); } else {/*from ww w . ja va 2 s. com*/ try { attribute.addValue(value, attributableUtil); } catch (ValidationException e) { LOG.error("Invalid value for attribute " + schema.getName() + ": " + value, e); invalidValues.addElement(schema.getName() + ": " + value); } } } }
From source file:com.cloudbees.jenkins.plugins.gogs.server.client.GogsServerAPIClient.java
/** {@inheritDoc} */ @Override//w ww . ja v a2 s . c o m public List<GogsServerBranch> getBranches() { String url = String.format(API_BRANCHES_PATH, getOwner(), repositoryName, 0); try { String response = getRequest(url); List<GogsServerBranch> branches = parseCollection(response, GogsServerBranch.class); return branches; } catch (IOException e) { LOGGER.log(Level.SEVERE, "invalid branches response", e); } return Collections.EMPTY_LIST; }
From source file:org.syncope.core.persistence.dao.impl.UserDAOImpl.java
@Override public List<SyncopeUser> findByAttrValue(final String schemaName, final UAttrValue attrValue) { USchema schema = schemaDAO.find(schemaName, USchema.class); if (schema == null) { LOG.error("Invalid schema name '{}'", schemaName); return Collections.EMPTY_LIST; }//from w ww. j a va 2s . c om final String entityName = schema.isUniqueConstraint() ? UAttrUniqueValue.class.getName() : UAttrValue.class.getName(); Query query = entityManager .createQuery("SELECT e FROM " + entityName + " e" + " WHERE e.attribute.schema.name = :schemaName " + " AND (e.stringValue IS NOT NULL" + " AND e.stringValue = :stringValue)" + " OR (e.booleanValue IS NOT NULL" + " AND e.booleanValue = :booleanValue)" + " OR (e.dateValue IS NOT NULL" + " AND e.dateValue = :dateValue)" + " OR (e.longValue IS NOT NULL" + " AND e.longValue = :longValue)" + " OR (e.doubleValue IS NOT NULL" + " AND e.doubleValue = :doubleValue)"); query.setParameter("schemaName", schemaName); query.setParameter("stringValue", attrValue.getStringValue()); query.setParameter("booleanValue", attrValue.getBooleanValue() == null ? null : attrValue.getBooleanAsInteger(attrValue.getBooleanValue())); if (attrValue.getDateValue() != null) { query.setParameter("dateValue", attrValue.getDateValue(), TemporalType.TIMESTAMP); } else { query.setParameter("dateValue", null); } query.setParameter("longValue", attrValue.getLongValue()); query.setParameter("doubleValue", attrValue.getDoubleValue()); List<SyncopeUser> result = new ArrayList<SyncopeUser>(); SyncopeUser user; for (AbstractAttrValue value : (List<AbstractAttrValue>) query.getResultList()) { user = (SyncopeUser) value.getAttribute().getOwner(); if (!result.contains(user)) { result.add(user); } } return result; }
From source file:de.hybris.platform.mpintgproductcockpit.cmscockpit.services.impl.SynchronizationServiceImpl.java
public Collection<TypedObject> performSynchronization(Collection<? extends Object> items, List<String> syncJobPkList, CatalogVersionModel targetCatalogVersion, String qualifier) { if ((items == null) || (items.isEmpty())) { return Collections.EMPTY_LIST; }// w ww. j a v a 2s . c o m return null; }