Example usage for org.apache.commons.lang StringUtils substringAfterLast

List of usage examples for org.apache.commons.lang StringUtils substringAfterLast

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils substringAfterLast.

Prototype

public static String substringAfterLast(String str, String separator) 

Source Link

Document

Gets the substring after the last occurrence of a separator.

Usage

From source file:org.apache.archiva.webdav.ArchivaDavResourceFactory.java

@Override
public DavResource createResource(final DavResourceLocator locator, final DavServletRequest request,
        final DavServletResponse response) throws DavException {
    ArchivaDavResourceLocator archivaLocator = checkLocatorIsInstanceOfRepositoryLocator(locator);

    RepositoryGroupConfiguration repoGroupConfig = archivaConfiguration.getConfiguration()
            .getRepositoryGroupsAsMap().get(archivaLocator.getRepositoryId());

    String activePrincipal = getActivePrincipal(request);

    List<String> resourcesInAbsolutePath = new ArrayList<>();

    boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
    DavResource resource;// w w  w .  ja  v  a  2s. co m
    if (repoGroupConfig != null) {
        if (!readMethod) {
            throw new DavException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
                    "Write method not allowed for repository groups.");
        }

        log.debug("Repository group '{}' accessed by '{}", repoGroupConfig.getId(), activePrincipal);

        // handle browse requests for virtual repos
        if (getLogicalResource(archivaLocator, null, true).endsWith("/")) {
            try {
                DavResource davResource = getResourceFromGroup(request, repoGroupConfig.getRepositories(),
                        archivaLocator, repoGroupConfig);

                setHeaders(response, locator, davResource, true);

                return davResource;

            } catch (RepositoryAdminException e) {
                throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
            }
        } else {
            // make a copy to avoid potential concurrent modifications (eg. by configuration)
            // TODO: ultimately, locking might be more efficient than copying in this fashion since updates are
            //  infrequent
            List<String> repositories = new ArrayList<>(repoGroupConfig.getRepositories());
            resource = processRepositoryGroup(request, archivaLocator, repositories, activePrincipal,
                    resourcesInAbsolutePath, repoGroupConfig);
        }
    } else {

        try {
            RemoteRepository remoteRepository = remoteRepositoryAdmin
                    .getRemoteRepository(archivaLocator.getRepositoryId());

            if (remoteRepository != null) {
                String logicalResource = getLogicalResource(archivaLocator, null, false);
                IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext(remoteRepository);
                File resourceFile = StringUtils.equals(logicalResource, "/")
                        ? new File(indexingContext.getIndexDirectoryFile().getParent())
                        : new File(indexingContext.getIndexDirectoryFile().getParent(), logicalResource);
                resource = new ArchivaDavResource(resourceFile.getAbsolutePath(), //
                        locator.getResourcePath(), //
                        null, //
                        request.getRemoteAddr(), //
                        activePrincipal, //
                        request.getDavSession(), //
                        archivaLocator, //
                        this, //
                        mimeTypes, //
                        auditListeners, //
                        scheduler, //
                        fileLockManager);
                setHeaders(response, locator, resource, false);
                return resource;
            }
        } catch (RepositoryAdminException e) {
            log.debug("RepositoryException remote repository with d'{}' not found, msg: {}",
                    archivaLocator.getRepositoryId(), e.getMessage());
        }

        ManagedRepositoryContent managedRepositoryContent = null;

        try {
            managedRepositoryContent = repositoryFactory
                    .getManagedRepositoryContent(archivaLocator.getRepositoryId());
        } catch (RepositoryNotFoundException e) {
            throw new DavException(HttpServletResponse.SC_NOT_FOUND,
                    "Invalid repository: " + archivaLocator.getRepositoryId());
        } catch (RepositoryException e) {
            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
        }

        log.debug("Managed repository '{}' accessed by '{}'", managedRepositoryContent.getId(),
                activePrincipal);

        try {
            resource = processRepository(request, archivaLocator, activePrincipal, managedRepositoryContent,
                    managedRepositoryAdmin.getManagedRepository(archivaLocator.getRepositoryId()));

            String logicalResource = getLogicalResource(archivaLocator, null, false);
            resourcesInAbsolutePath
                    .add(new File(managedRepositoryContent.getRepoRoot(), logicalResource).getAbsolutePath());

        } catch (RepositoryAdminException e) {
            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
        }
    }

    String requestedResource = request.getRequestURI();

    // MRM-872 : merge all available metadata
    // merge metadata only when requested via the repo group
    if ((repositoryRequest.isMetadata(requestedResource)
            || repositoryRequest.isMetadataSupportFile(requestedResource)) && repoGroupConfig != null) {
        // this should only be at the project level not version level!
        if (isProjectReference(requestedResource)) {

            ArchivaDavResource res = (ArchivaDavResource) resource;
            String filePath = StringUtils
                    .substringBeforeLast(res.getLocalResource().getAbsolutePath().replace('\\', '/'), "/");
            filePath = filePath + "/maven-metadata-" + repoGroupConfig.getId() + ".xml";

            // for MRM-872 handle checksums of the merged metadata files
            if (repositoryRequest.isSupportFile(requestedResource)) {
                File metadataChecksum = new File(
                        filePath + "." + StringUtils.substringAfterLast(requestedResource, "."));

                if (metadataChecksum.exists()) {
                    LogicalResource logicalResource = new LogicalResource(
                            getLogicalResource(archivaLocator, null, false));

                    resource = new ArchivaDavResource(metadataChecksum.getAbsolutePath(),
                            logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal,
                            request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler,
                            fileLockManager);
                }
            } else {
                if (resourcesInAbsolutePath != null && resourcesInAbsolutePath.size() > 1) {
                    // merge the metadata of all repos under group
                    ArchivaRepositoryMetadata mergedMetadata = new ArchivaRepositoryMetadata();
                    for (String resourceAbsPath : resourcesInAbsolutePath) {
                        try {
                            File metadataFile = new File(resourceAbsPath);
                            ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read(metadataFile);
                            mergedMetadata = RepositoryMetadataMerge.merge(mergedMetadata, repoMetadata);
                        } catch (XMLException e) {
                            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                    "Error occurred while reading metadata file.");
                        } catch (RepositoryMetadataException r) {
                            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                    "Error occurred while merging metadata file.");
                        }
                    }

                    try {
                        File resourceFile = writeMergedMetadataToFile(mergedMetadata, filePath);

                        LogicalResource logicalResource = new LogicalResource(
                                getLogicalResource(archivaLocator, null, false));

                        resource = new ArchivaDavResource(resourceFile.getAbsolutePath(),
                                logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal,
                                request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners,
                                scheduler, fileLockManager);
                    } catch (RepositoryMetadataException r) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                "Error occurred while writing metadata file.");
                    } catch (IOException ie) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                "Error occurred while generating checksum files.");
                    } catch (DigesterException de) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                "Error occurred while generating checksum files." + de.getMessage());
                    }
                }
            }
        }
    }

    setHeaders(response, locator, resource, false);

    // compatibility with MRM-440 to ensure browsing the repository works ok
    if (resource.isCollection() && !request.getRequestURI().endsWith("/")) {
        throw new BrowserRedirectException(resource.getHref());
    }
    resource.addLockManager(lockManager);
    return resource;
}

From source file:org.apache.archiva.webdav.ArchivaDavResourceFactory.java

private DavResource processRepositoryGroup(final DavServletRequest request,
        ArchivaDavResourceLocator archivaLocator, List<String> repositories, String activePrincipal,
        List<String> resourcesInAbsolutePath, RepositoryGroupConfiguration repoGroupConfig)
        throws DavException {
    DavResource resource = null;//from w w w .  ja  va2 s.c  o m
    List<DavException> storedExceptions = new ArrayList<>();

    String pathInfo = StringUtils.removeEnd(request.getPathInfo(), "/");

    String rootPath = StringUtils.substringBeforeLast(pathInfo, "/");

    if (StringUtils.endsWith(rootPath, repoGroupConfig.getMergedIndexPath())) {
        // we are in the case of index file request
        String requestedFileName = StringUtils.substringAfterLast(pathInfo, "/");
        File temporaryIndexDirectory = buildMergedIndexDirectory(repositories, activePrincipal, request,
                repoGroupConfig);

        File resourceFile = new File(temporaryIndexDirectory, requestedFileName);
        resource = new ArchivaDavResource(resourceFile.getAbsolutePath(), requestedFileName, null,
                request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this,
                mimeTypes, auditListeners, scheduler, fileLockManager);

    } else {
        for (String repositoryId : repositories) {
            ManagedRepositoryContent managedRepositoryContent;
            try {
                managedRepositoryContent = repositoryFactory.getManagedRepositoryContent(repositoryId);
            } catch (RepositoryNotFoundException e) {
                throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
            } catch (RepositoryException e) {
                throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
            }

            try {
                ManagedRepository managedRepository = managedRepositoryAdmin.getManagedRepository(repositoryId);
                DavResource updatedResource = processRepository(request, archivaLocator, activePrincipal,
                        managedRepositoryContent, managedRepository);
                if (resource == null) {
                    resource = updatedResource;
                }

                String logicalResource = getLogicalResource(archivaLocator, null, false);
                if (logicalResource.endsWith("/")) {
                    logicalResource = logicalResource.substring(1);
                }
                resourcesInAbsolutePath.add(
                        new File(managedRepositoryContent.getRepoRoot(), logicalResource).getAbsolutePath());
            } catch (DavException e) {
                storedExceptions.add(e);
            } catch (RepositoryAdminException e) {
                storedExceptions.add(new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e));
            }
        }
    }
    if (resource == null) {
        if (!storedExceptions.isEmpty()) {
            // MRM-1232
            for (DavException e : storedExceptions) {
                if (401 == e.getErrorCode()) {
                    throw e;
                }
            }

            throw new DavException(HttpServletResponse.SC_NOT_FOUND);
        } else {
            throw new DavException(HttpServletResponse.SC_NOT_FOUND);
        }
    }
    return resource;
}

From source file:org.apache.bookkeeper.replication.AuditorElector.java

/**
 * Performing the auditor election using the ZooKeeper ephemeral sequential
 * znode. The bookie which has created the least sequential will be elect as
 * Auditor.//from w w  w  . jav  a 2  s  . com
 */
@VisibleForTesting
void submitElectionTask() {

    Runnable r = new Runnable() {
        public void run() {
            if (!running.get()) {
                return;
            }
            try {
                // creating my vote in zk. Vote format is 'V_numeric'
                createMyVote();
                List<String> children = zkc.getChildren(getVotePath(""), false);

                if (0 >= children.size()) {
                    throw new IllegalArgumentException(
                            "Atleast one bookie server should present to elect the Auditor!");
                }

                // sorting in ascending order of sequential number
                Collections.sort(children, new ElectionComparator());
                String voteNode = StringUtils.substringAfterLast(myVote, PATH_SEPARATOR);

                // starting Auditing service
                if (children.get(AUDITOR_INDEX).equals(voteNode)) {
                    // update the auditor bookie id in the election path. This is
                    // done for debugging purpose
                    AuditorVoteFormat.Builder builder = AuditorVoteFormat.newBuilder().setBookieId(bookieId);

                    zkc.setData(getVotePath(""), TextFormat.printToString(builder.build()).getBytes(UTF_8), -1);
                    auditor = new Auditor(bookieId, conf, zkc, statsLogger);
                    auditor.start();
                } else {
                    // If not an auditor, will be watching to my predecessor and
                    // looking the previous node deletion.
                    Watcher electionWatcher = new ElectionWatcher();
                    int myIndex = children.indexOf(voteNode);
                    int prevNodeIndex = myIndex - 1;
                    if (null == zkc.exists(getVotePath(PATH_SEPARATOR) + children.get(prevNodeIndex),
                            electionWatcher)) {
                        // While adding, the previous znode doesn't exists.
                        // Again going to election.
                        submitElectionTask();
                    }
                    electionAttempts.inc();
                }
            } catch (KeeperException e) {
                LOG.error("Exception while performing auditor election", e);
                submitShutdownTask();
            } catch (InterruptedException e) {
                LOG.error("Interrupted while performing auditor election", e);
                Thread.currentThread().interrupt();
                submitShutdownTask();
            } catch (UnavailableException e) {
                LOG.error("Ledger underreplication manager unavailable during election", e);
                submitShutdownTask();
            }
        }
    };
    executor.submit(r);
}

From source file:org.apache.bookkeeper.replication.TestLedgerUnderreplicationManager.java

/**
 * Test enabling the ledger re-replication. After enableLedegerReplication,
 * should continue getLedgerToRereplicate() task
 *///  www  . j a va2  s  .c o  m
@Test(timeout = 20000)
public void testEnableLedgerReplication() throws Exception {
    isLedgerReplicationDisabled = true;
    final LedgerUnderreplicationManager replicaMgr = lmf1.newLedgerUnderreplicationManager();

    // simulate few urLedgers before disabling
    final Long ledgerA = 0xfeadeefdacL;
    final String missingReplica = "localhost:3181";
    try {
        replicaMgr.markLedgerUnderreplicated(ledgerA, missingReplica);
    } catch (UnavailableException e) {
        LOG.debug("Unexpected exception while marking urLedger", e);
        fail("Unexpected exception while marking urLedger" + e.getMessage());
    }

    // disabling replication
    replicaMgr.disableLedgerReplication();
    LOG.debug("Disabled Ledeger Replication");

    String znodeA = getUrLedgerZnode(ledgerA);
    final CountDownLatch znodeLatch = new CountDownLatch(2);
    String urledgerA = StringUtils.substringAfterLast(znodeA, "/");
    String urLockLedgerA = basePath + "/locks/" + urledgerA;
    zkc1.exists(urLockLedgerA, new Watcher() {
        @Override
        public void process(WatchedEvent event) {
            if (event.getType() == EventType.NodeCreated) {
                znodeLatch.countDown();
                LOG.debug("Recieved node creation event for the zNodePath:" + event.getPath());
            }

        }
    });
    // getLedgerToRereplicate is waiting until enable rereplication
    Thread thread1 = new Thread() {
        @Override
        public void run() {
            try {
                Long lA = replicaMgr.getLedgerToRereplicate();
                assertEquals("Should be the ledger I just marked", lA, ledgerA);
                isLedgerReplicationDisabled = false;
                znodeLatch.countDown();
            } catch (UnavailableException e) {
                LOG.debug("Unexpected exception while marking urLedger", e);
                isLedgerReplicationDisabled = false;
            }
        }
    };
    thread1.start();

    try {
        assertFalse("shouldn't complete", znodeLatch.await(1, TimeUnit.SECONDS));
        assertTrue("Ledger replication is not disabled!", isLedgerReplicationDisabled);
        assertEquals("Failed to disable ledger replication!", 2, znodeLatch.getCount());

        replicaMgr.enableLedgerReplication();
        znodeLatch.await(5, TimeUnit.SECONDS);
        LOG.debug("Enabled Ledeger Replication");
        assertTrue("Ledger replication is not disabled!", !isLedgerReplicationDisabled);
        assertEquals("Failed to disable ledger replication!", 0, znodeLatch.getCount());
    } finally {
        thread1.interrupt();
    }
}

From source file:org.apache.camel.component.hdfs.HdfsConsumer.java

protected int doPoll() throws Exception {
    class ExcludePathFilter implements PathFilter {
        public boolean accept(Path path) {
            return !(path.toString().endsWith(config.getOpenedSuffix())
                    || path.toString().endsWith(config.getReadSuffix()));
        }//from  w w w  .j  a  v  a2 s.c  o m
    }

    int numMessages = 0;

    HdfsInfo info = setupHdfs(false);
    FileStatus fileStatuses[];
    if (info.getFileSystem().isFile(info.getPath())) {
        fileStatuses = info.getFileSystem().globStatus(info.getPath());
    } else {
        Path pattern = info.getPath().suffix("/" + this.config.getPattern());
        fileStatuses = info.getFileSystem().globStatus(pattern, new ExcludePathFilter());
    }

    for (FileStatus status : fileStatuses) {
        if (normalFileIsDirectoryNoSuccessFile(status, info)) {
            continue;
        }
        try {
            this.rwlock.writeLock().lock();
            this.istream = HdfsInputStream.createInputStream(status.getPath().toString(), this.config);
        } finally {
            this.rwlock.writeLock().unlock();
        }

        try {
            Holder<Object> key = new Holder<Object>();
            Holder<Object> value = new Holder<Object>();
            while (this.istream.next(key, value) != 0) {
                Exchange exchange = this.getEndpoint().createExchange();
                Message message = new DefaultMessage();
                String fileName = StringUtils.substringAfterLast(status.getPath().toString(), "/");
                message.setHeader(Exchange.FILE_NAME, fileName);
                if (key.value != null) {
                    message.setHeader(HdfsHeader.KEY.name(), key.value);
                }
                message.setBody(value.value);
                exchange.setIn(message);

                log.debug("Processing file {}", fileName);
                try {
                    processor.process(exchange);
                } catch (Exception e) {
                    exchange.setException(e);
                }

                // in case of unhandled exceptions then let the exception handler handle them
                if (exchange.getException() != null) {
                    getExceptionHandler().handleException(exchange.getException());
                }

                numMessages++;
            }
        } finally {
            IOHelper.close(istream, "input stream", log);
        }
    }

    return numMessages;
}

From source file:org.apache.camel.component.hdfs2.HdfsConsumer.java

protected int doPoll() throws Exception {
    class ExcludePathFilter implements PathFilter {
        public boolean accept(Path path) {
            return !(path.toString().endsWith(config.getOpenedSuffix())
                    || path.toString().endsWith(config.getReadSuffix()));
        }//from  w  w  w . j a  v a2 s.com
    }

    int numMessages = 0;

    HdfsInfo info = setupHdfs(false);
    FileStatus fileStatuses[];
    if (info.getFileSystem().isFile(info.getPath())) {
        fileStatuses = info.getFileSystem().globStatus(info.getPath());
    } else {
        Path pattern = info.getPath().suffix("/" + this.config.getPattern());
        fileStatuses = info.getFileSystem().globStatus(pattern, new ExcludePathFilter());
    }

    for (FileStatus status : fileStatuses) {

        if (normalFileIsDirectoryNoSuccessFile(status, info)) {
            continue;
        }

        if (config.getOwner() != null) {
            // must match owner
            if (!config.getOwner().equals(status.getOwner())) {
                if (log.isDebugEnabled()) {
                    log.debug("Skipping file: {} as not matching owner: {}", status.getPath().toString(),
                            config.getOwner());
                }
                continue;
            }
        }

        try {
            this.rwlock.writeLock().lock();
            this.istream = HdfsInputStream.createInputStream(status.getPath().toString(), this.config);
        } finally {
            this.rwlock.writeLock().unlock();
        }

        try {
            Holder<Object> key = new Holder<Object>();
            Holder<Object> value = new Holder<Object>();
            while (this.istream.next(key, value) != 0) {
                Exchange exchange = this.getEndpoint().createExchange();
                Message message = new DefaultMessage();
                String fileName = StringUtils.substringAfterLast(status.getPath().toString(), "/");
                message.setHeader(Exchange.FILE_NAME, fileName);
                if (key.value != null) {
                    message.setHeader(HdfsHeader.KEY.name(), key.value);
                }
                message.setBody(value.value);
                exchange.setIn(message);

                log.debug("Processing file {}", fileName);
                try {
                    processor.process(exchange);
                } catch (Exception e) {
                    exchange.setException(e);
                }

                // in case of unhandled exceptions then let the exception handler handle them
                if (exchange.getException() != null) {
                    getExceptionHandler().handleException(exchange.getException());
                }

                numMessages++;
            }
        } finally {
            IOHelper.close(istream, "input stream", log);
        }
    }

    return numMessages;
}

From source file:org.apache.jackrabbit.core.query.lucene.FacetHandler.java

private void extractFacetInfo(NamedList<Object> info, SolrParams solrParams) {
    // Parse the queries
    _facetQuery = new LinkedHashMap<String, Long>();
    NamedList<Long> fq = (NamedList<Long>) info.get("facet_queries");
    if (fq != null) {
        for (Map.Entry<String, Long> entry : fq) {
            _facetQuery.put(entry.getKey(), entry.getValue());
        }//  w w  w .  ja v  a 2 s  . c  o  m
    }

    // Parse the facet info into fields
    // TODO?? The list could be <int> or <long>? If always <long> then we can switch to <Long>
    NamedList<NamedList<Number>> ff = (NamedList<NamedList<Number>>) info.get("facet_fields");
    Map<String, FieldType> fieldTypeMap = new HashMap<>();
    if (ff != null) {
        _facetFields = new ArrayList<FacetField>(ff.size());
        _limitingFacets = new ArrayList<FacetField>(ff.size());
        long minsize = totalSize;
        for (Map.Entry<String, NamedList<Number>> facet : ff) {
            String key = StringUtils.substringBeforeLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
            String fieldInIndex = StringUtils.substringAfterLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
            FacetField f = new FacetField(key);
            if (!fieldTypeMap.containsKey(key)) {
                try {
                    //Find a key like f.field_name#unknownumber.facet.nodetype
                    Pattern facetNodetype = Pattern.compile("f\\." + key + "#[0-9]+\\.facet\\.nodetype");
                    String nodetypeName = null;
                    Iterator<String> parameterNamesIterator = solrParams.getParameterNamesIterator();
                    while (parameterNamesIterator.hasNext()) {
                        String next = parameterNamesIterator.next();
                        if (facetNodetype.matcher(next).matches()) {
                            nodetypeName = solrParams.get(next);
                            break;
                        }
                    }
                    ExtendedPropertyDefinition epd = NodeTypeRegistry.getInstance().getNodeType(nodetypeName)
                            .getPropertyDefinition(key);
                    fieldTypeMap.put(key, getType(epd));
                } catch (NoSuchNodeTypeException e) {
                    log.error(e.getMessage(), e);
                }
            }
            for (Map.Entry<String, Number> entry : facet.getValue()) {
                String facetValue = entry.getKey();
                String query = fieldTypeMap.get(key).toInternal(entry.getKey());
                Matcher matcher = valueWithQuery.matcher(facetValue);
                if (matcher.matches()) {
                    query = matcher.group(2);
                    facetValue = matcher.replaceFirst("$1");
                }
                f.add(facetValue, entry.getValue().longValue());
                f.getValues().get(f.getValueCount() - 1).setFilterQuery(
                        ClientUtils.escapeQueryChars(fieldInIndex) + ":" + ClientUtils.escapeQueryChars(query));
            }

            _facetFields.add(f);
            FacetField nl = f.getLimitingFields(minsize);
            if (nl.getValueCount() > 0) {
                _limitingFacets.add(nl);
            }
        }
    }

    // Parse date facets
    NamedList<NamedList<Object>> df = (NamedList<NamedList<Object>>) info.get("facet_dates");
    if (df != null) {
        // System.out.println(df);
        _facetDates = new ArrayList<FacetField>(df.size());
        for (Map.Entry<String, NamedList<Object>> facet : df) {
            // System.out.println("Key: " + facet.getKey() + " Value: " + facet.getValue());
            NamedList<Object> values = facet.getValue();
            String gap = (String) values.get("gap");
            Date end = (Date) values.get("end");
            FacetField f = new FacetField(StringUtils.substringBeforeLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), gap, end);

            for (Map.Entry<String, Object> entry : values) {
                try {
                    String key = StringUtils.substringBeforeLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    String query = StringUtils.substringAfterLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    f.add(key, Long.parseLong(entry.getValue().toString()));
                    if (!StringUtils.isEmpty(query)) {
                        String rangePrefix = null;
                        if (query.contains(RANGEFROM_EXCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_EXCLUSIVE_PREFIX;
                        } else if (query.contains(RANGEFROM_INCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_INCLUSIVE_PREFIX;
                        }
                        if (!StringUtils.isEmpty(rangePrefix)) {
                            f.getValues().get(f.getValueCount() - 1)
                                    .setFilterQuery(ClientUtils
                                            .escapeQueryChars(StringUtils.substringBefore(query, rangePrefix))
                                            + rangePrefix + StringUtils.substringAfter(query, rangePrefix));
                        }
                    }
                } catch (NumberFormatException e) {
                    // Ignore for non-number responses which are already handled above
                }
            }

            _facetDates.add(f);
        }
    }

    // Parse range facets
    NamedList<NamedList<Object>> rf = (NamedList<NamedList<Object>>) info.get("facet_ranges");
    if (rf != null) {
        // System.out.println(df);
        _facetRanges = new ArrayList<RangeFacet>(rf.size());
        for (Map.Entry<String, NamedList<Object>> facet : rf) {
            NamedList<Object> values = facet.getValue();
            Object rawGap = values.get("gap");

            RangeFacet rangeFacet;
            if (rawGap instanceof Number) {
                Number gap = (Number) rawGap;
                Number start = (Number) values.get("start");
                Number end = (Number) values.get("end");

                Number before = (Number) values.get("before");
                Number after = (Number) values.get("after");

                rangeFacet = new RangeFacet.Numeric(StringUtils.substringBeforeLast(facet.getKey(),
                        SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), start, end, gap, before, after);
            } else {
                String gap = (String) rawGap;
                Date start = (Date) values.get("start");
                Date end = (Date) values.get("end");

                Number before = (Number) values.get("before");
                Number after = (Number) values.get("after");

                rangeFacet = new RangeFacet.Date(StringUtils.substringBeforeLast(facet.getKey(),
                        SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), start, end, gap, before, after);
            }

            NamedList<Integer> counts = (NamedList<Integer>) values.get("counts");
            for (Map.Entry<String, Integer> entry : counts) {
                try {
                    String key = StringUtils.substringBeforeLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    String query = StringUtils.substringAfterLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);

                    rangeFacet.addCount(key, entry.getValue());

                    if (!StringUtils.isEmpty(query)) {
                        String rangePrefix = null;
                        if (query.contains(RANGEFROM_EXCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_EXCLUSIVE_PREFIX;
                        } else if (query.contains(RANGEFROM_INCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_INCLUSIVE_PREFIX;
                        }
                        if (!StringUtils.isEmpty(rangePrefix)) {
                            ((RangeFacet.Count) rangeFacet.getCounts().get(rangeFacet.getCounts().size() - 1))
                                    .setFilterQuery(ClientUtils
                                            .escapeQueryChars(StringUtils.substringBefore(query, rangePrefix))
                                            + rangePrefix + StringUtils.substringAfter(query, rangePrefix));
                        }
                    }
                } catch (NumberFormatException e) {
                    // Ignore for non-number responses which are already handled above
                }
            }

            _facetRanges.add(rangeFacet);
        }
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.JahiaLuceneQueryFactoryImpl.java

private boolean checkIndexedAcl(Map<String, Boolean> checkedAcls, IndexedNodeInfo infos)
        throws RepositoryException {
    boolean canRead = true;

    String[] acls = infos.getAclUuid() != null ? Patterns.SPACE.split(infos.getAclUuid())
            : ArrayUtils.EMPTY_STRING_ARRAY;
    ArrayUtils.reverse(acls);/*www.j  a v a 2s.c  o  m*/

    for (String acl : acls) {
        if (acl.contains("/")) {
            // ACL indexed contains a single user ACE, get the username
            String singleUser = StringUtils.substringAfter(acl, "/");
            acl = StringUtils.substringBefore(acl, "/");
            if (singleUser.contains("/")) {
                // Granted roles are specified in the indexed entry
                String roles = StringUtils.substringBeforeLast(singleUser, "/");
                singleUser = StringUtils.substringAfterLast(singleUser, "/");
                if (!singleUser.equals(session.getUserID())) {
                    // If user does not match, skip this ACL
                    continue;
                } else {
                    // If user matches, check if one the roles gives the read permission
                    for (String role : StringUtils.split(roles, '/')) {
                        if (((JahiaAccessManager) session.getAccessControlManager()).matchPermission(
                                Sets.newHashSet(Privilege.JCR_READ + "_" + session.getWorkspace().getName()),
                                role)) {
                            // User and role matches, read is granted
                            return true;
                        }
                    }
                }
            } else {
                if (!singleUser.equals(session.getUserID())) {
                    // If user does not match, skip this ACL
                    continue;
                }
                // Otherwise, do normal ACL check.
            }
        }
        // Verify first if this acl has already been checked
        Boolean aclChecked = checkedAcls.get(acl);
        if (aclChecked == null) {
            try {
                canRead = session.getAccessManager().canRead(null, new NodeId(acl));
                checkedAcls.put(acl, canRead);
            } catch (RepositoryException e) {
            }
        } else {
            canRead = aclChecked;
        }
        break;
    }
    return canRead;
}

From source file:org.apache.jackrabbit.core.security.JahiaPrivilegeRegistry.java

/**
 * Returns the privilege with the specified <code>privilegeName</code>.
 *
 * @param privilegeName Name of the principal.
 * @return the privilege with the specified <code>privilegeName</code>.
 * @throws AccessControlException If no privilege with the given name exists.
 * @throws RepositoryException If another error occurs.
 *//*from   w  w  w  .  j a v  a2s . c  o  m*/
public Privilege getPrivilege(String privilegeName, String workspaceName)
        throws AccessControlException, RepositoryException {
    if (!privilegeName.contains("{") && privilegeName.contains("/")) {
        privilegeName = StringUtils.substringAfterLast(privilegeName, "/");
    }

    privilegeName = JCRContentUtils.getExpandedName(privilegeName, ns);

    String s = JahiaAccessManager.getPrivilegeName(privilegeName, workspaceName);
    Privilege privilege = getPrivilegeByName(s, privilegeName);
    if (privilege != null) {
        return privilege;
    }
    throw new AccessControlException("Unknown privilege " + privilegeName);
}

From source file:org.apache.kylin.storage.jdbc.ITJDBCResourceStoreTest.java

@Test
public void testResourceStoreBasic() throws Exception {
    Assume.assumeTrue(jdbcConnectable);/*from   www  .j av a2  s  . c  o  m*/
    ResourceStoreTest.testAStore(ResourceStoreTest.mockUrl(
            StringUtils.substringAfterLast(mainIdentifier + jdbcMetadataUrlNoIdentifier, "@"), kylinConfig),
            kylinConfig);
}