Example usage for java.util Collections newSetFromMap

List of usage examples for java.util Collections newSetFromMap

Introduction

In this page you can find the example usage for java.util Collections newSetFromMap.

Prototype

public static <E> Set<E> newSetFromMap(Map<E, Boolean> map) 

Source Link

Document

Returns a set backed by the specified map.

Usage

From source file:org.elasticsoftware.elasticactors.rabbitmq.cpt.RabbitMQMessagingService.java

@Override
public void addChannelListener(final Channel channel, final ChannelListener channelListener) {
    Set<ChannelListener> listeners = this.channelListenerRegistry.get(channel);
    if (listeners == null) {
        listeners = Collections.newSetFromMap(new ConcurrentHashMap<ChannelListener, Boolean>());
        if (this.channelListenerRegistry.putIfAbsent(channel, listeners) != null) {
            // was already created
            listeners = this.channelListenerRegistry.get(channel);
        }//from w w w.j ava 2 s  .c om
    }
    listeners.add(channelListener);
}

From source file:org.apache.tez.runtime.library.shuffle.common.impl.ShuffleManager.java

public ShuffleManager(InputContext inputContext, Configuration conf, int numInputs, int bufferSize,
        boolean ifileReadAheadEnabled, int ifileReadAheadLength, CompressionCodec codec,
        FetchedInputAllocator inputAllocator) throws IOException {
    this.inputContext = inputContext;
    this.numInputs = numInputs;

    this.shuffledInputsCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_SHUFFLED_INPUTS);
    this.failedShufflesCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_FAILED_SHUFFLE_INPUTS);
    this.bytesShuffledCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES);
    this.decompressedDataSizeCounter = inputContext.getCounters()
            .findCounter(TaskCounter.SHUFFLE_BYTES_DECOMPRESSED);
    this.bytesShuffledToDiskCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_DISK);
    this.bytesShuffledToMemCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_MEM);
    this.bytesShuffledDirectDiskCounter = inputContext.getCounters()
            .findCounter(TaskCounter.SHUFFLE_BYTES_DISK_DIRECT);

    this.ifileBufferSize = bufferSize;
    this.ifileReadAhead = ifileReadAheadEnabled;
    this.ifileReadAheadLength = ifileReadAheadLength;
    this.codec = codec;
    this.inputManager = inputAllocator;
    this.localDiskFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH,
            TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH_DEFAULT);
    this.sharedFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH,
            TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH_DEFAULT);

    this.srcNameTrimmed = TezUtilsInternal.cleanVertexName(inputContext.getSourceVertexName());

    completedInputSet = Collections.newSetFromMap(new ConcurrentHashMap<InputIdentifier, Boolean>(numInputs));
    completedInputs = new LinkedBlockingQueue<FetchedInput>(numInputs);
    knownSrcHosts = new ConcurrentHashMap<String, InputHost>();
    pendingHosts = new LinkedBlockingQueue<InputHost>();
    obsoletedInputs = Collections.newSetFromMap(new ConcurrentHashMap<InputAttemptIdentifier, Boolean>());
    runningFetchers = Collections.newSetFromMap(new ConcurrentHashMap<Fetcher, Boolean>());

    int maxConfiguredFetchers = conf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES,
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES_DEFAULT);

    this.numFetchers = Math.min(maxConfiguredFetchers, numInputs);

    ExecutorService fetcherRawExecutor = Executors.newFixedThreadPool(numFetchers, new ThreadFactoryBuilder()
            .setDaemon(true).setNameFormat("Fetcher [" + srcNameTrimmed + "] #%d " + localhostName).build());
    this.fetcherExecutor = MoreExecutors.listeningDecorator(fetcherRawExecutor);

    ExecutorService schedulerRawExecutor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder()
            .setDaemon(true).setNameFormat("ShuffleRunner [" + srcNameTrimmed + "]").build());
    this.schedulerExecutor = MoreExecutors.listeningDecorator(schedulerRawExecutor);
    this.schedulerCallable = new RunShuffleCallable(conf);

    this.startTime = System.currentTimeMillis();
    this.lastProgressTime = startTime;

    this.shuffleSecret = ShuffleUtils.getJobTokenSecretFromTokenBytes(
            inputContext.getServiceConsumerMetaData(TezConstants.TEZ_SHUFFLE_HANDLER_SERVICE_ID));
    httpConnectionParams = ShuffleUtils.constructHttpShuffleConnectionParams(conf);

    this.localFs = (RawLocalFileSystem) FileSystem.getLocal(conf).getRaw();

    this.localDirAllocator = new LocalDirAllocator(TezRuntimeFrameworkConfigs.LOCAL_DIRS);

    this.localDisks = Iterables.toArray(localDirAllocator.getAllLocalPathsToRead(".", conf), Path.class);

    Arrays.sort(this.localDisks);

    LOG.info(this.getClass().getSimpleName() + " : numInputs=" + numInputs + ", compressionCodec="
            + (codec == null ? "NoCompressionCodec" : codec.getClass().getName()) + ", numFetchers="
            + numFetchers + ", ifileBufferSize=" + ifileBufferSize + ", ifileReadAheadEnabled=" + ifileReadAhead
            + ", ifileReadAheadLength=" + ifileReadAheadLength + ", " + "localDiskFetchEnabled="
            + localDiskFetchEnabled + ", " + "sharedFetchEnabled=" + sharedFetchEnabled + ", "
            + httpConnectionParams.toString());
}

From source file:org.ow2.proactive_grid_cloud_portal.cli.cmd.sched.PackageDownloader.java

/**
 * This method browses a web directory and all its subdirectories and returns a set containing all the urls of their contents.
 *
 * @param dirUrl/*from   ww w.  j  av  a 2 s  .co m*/
 * @param cummulativeRelativeUrl a string used for keeping track of a directory structure in the recursive context. MUST BE empty ("") in the first call.
 * @return
 * @throws IOException
 * @throws URISyntaxException
 */
private Set<String> listWebDirectoryContent(URL dirUrl, String cummulativeRelativeUrl)
        throws IOException, URISyntaxException {
    Set<String> result = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());

    //Load the directory listing page and extract all the links it contains.
    Document doc = Jsoup.connect(dirUrl.toString()).get();
    logger.info("Listing directories from: " + doc.location());
    for (Element file : doc.select("a[href]")) {
        String relativeURL = file.attr("href");
        // skip sort urls in Apache Tomcat
        if (relativeURL.startsWith("?")) {
            continue;
        }
        // skip parent directory url
        if (isRelativeParentDirectoryUrl(relativeURL)) {
            continue;
        }
        result.add(cummulativeRelativeUrl + relativeURL);
    }

    for (String relativeURL : result) {
        URL absoluteUrl = new URL(dirUrl, relativeURL);
        if (!isFileURL(relativeURL)) {
            result.addAll(listWebDirectoryContent(absoluteUrl, relativeURL));
        }
    }
    return result;
}

From source file:org.apache.tez.runtime.library.common.shuffle.impl.ShuffleManager.java

public ShuffleManager(InputContext inputContext, Configuration conf, int numInputs, int bufferSize,
        boolean ifileReadAheadEnabled, int ifileReadAheadLength, CompressionCodec codec,
        FetchedInputAllocator inputAllocator) throws IOException {
    this.inputContext = inputContext;
    this.numInputs = numInputs;

    this.shuffledInputsCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_SHUFFLED_INPUTS);
    this.failedShufflesCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_FAILED_SHUFFLE_INPUTS);
    this.bytesShuffledCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES);
    this.decompressedDataSizeCounter = inputContext.getCounters()
            .findCounter(TaskCounter.SHUFFLE_BYTES_DECOMPRESSED);
    this.bytesShuffledToDiskCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_DISK);
    this.bytesShuffledToMemCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_MEM);
    this.bytesShuffledDirectDiskCounter = inputContext.getCounters()
            .findCounter(TaskCounter.SHUFFLE_BYTES_DISK_DIRECT);

    this.ifileBufferSize = bufferSize;
    this.ifileReadAhead = ifileReadAheadEnabled;
    this.ifileReadAheadLength = ifileReadAheadLength;
    this.codec = codec;
    this.inputManager = inputAllocator;
    this.localDiskFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH,
            TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH_DEFAULT);
    this.sharedFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH,
            TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH_DEFAULT);

    this.srcNameTrimmed = TezUtilsInternal.cleanVertexName(inputContext.getSourceVertexName());

    completedInputSet = Collections.newSetFromMap(new ConcurrentHashMap<InputIdentifier, Boolean>(numInputs));
    completedInputs = new LinkedBlockingQueue<FetchedInput>(numInputs);
    knownSrcHosts = new ConcurrentHashMap<String, InputHost>();
    pendingHosts = new LinkedBlockingQueue<InputHost>();
    obsoletedInputs = Collections.newSetFromMap(new ConcurrentHashMap<InputAttemptIdentifier, Boolean>());
    runningFetchers = Collections.newSetFromMap(new ConcurrentHashMap<Fetcher, Boolean>());

    int maxConfiguredFetchers = conf.getInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES,
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES_DEFAULT);

    this.numFetchers = Math.min(maxConfiguredFetchers, numInputs);

    ExecutorService fetcherRawExecutor = Executors.newFixedThreadPool(numFetchers, new ThreadFactoryBuilder()
            .setDaemon(true).setNameFormat("Fetcher [" + srcNameTrimmed + "] #%d").build());
    this.fetcherExecutor = MoreExecutors.listeningDecorator(fetcherRawExecutor);

    ExecutorService schedulerRawExecutor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder()
            .setDaemon(true).setNameFormat("ShuffleRunner [" + srcNameTrimmed + "]").build());
    this.schedulerExecutor = MoreExecutors.listeningDecorator(schedulerRawExecutor);
    this.schedulerCallable = new RunShuffleCallable(conf);

    this.startTime = System.currentTimeMillis();
    this.lastProgressTime = startTime;

    SecretKey shuffleSecret = ShuffleUtils.getJobTokenSecretFromTokenBytes(
            inputContext.getServiceConsumerMetaData(TezConstants.TEZ_SHUFFLE_HANDLER_SERVICE_ID));
    this.jobTokenSecretMgr = new JobTokenSecretManager(shuffleSecret);
    httpConnectionParams = ShuffleUtils.constructHttpShuffleConnectionParams(conf);

    this.localFs = (RawLocalFileSystem) FileSystem.getLocal(conf).getRaw();

    this.localDirAllocator = new LocalDirAllocator(TezRuntimeFrameworkConfigs.LOCAL_DIRS);

    this.localDisks = Iterables.toArray(localDirAllocator.getAllLocalPathsToRead(".", conf), Path.class);

    Arrays.sort(this.localDisks);

    LOG.info(this.getClass().getSimpleName() + " : numInputs=" + numInputs + ", compressionCodec="
            + (codec == null ? "NoCompressionCodec" : codec.getClass().getName()) + ", numFetchers="
            + numFetchers + ", ifileBufferSize=" + ifileBufferSize + ", ifileReadAheadEnabled=" + ifileReadAhead
            + ", ifileReadAheadLength=" + ifileReadAheadLength + ", " + "localDiskFetchEnabled="
            + localDiskFetchEnabled + ", " + "sharedFetchEnabled=" + sharedFetchEnabled + ", "
            + httpConnectionParams.toString());
}

From source file:org.codice.ddf.spatial.ogc.wfs.v2_0_0.catalog.source.WfsFilterDelegate.java

private final void updateAllowedOperations(FilterCapabilities filterCapabilities) {
    comparisonOps = Collections.newSetFromMap(new ConcurrentHashMap<COMPARISON_OPERATORS, Boolean>(
            new EnumMap<COMPARISON_OPERATORS, Boolean>(COMPARISON_OPERATORS.class)));

    geometryOperands = new ArrayList<QName>();
    temporalOperands = new ArrayList<QName>();

    if (filterCapabilities == null) {
        LOGGER.error("WFS 2.0 Service doesn't support any filters");
        return;//from  ww w.j  a  v a 2s.c  o  m
    }

    // CONFORMANCE
    configureConformance(filterCapabilities.getConformance());

    ScalarCapabilitiesType scalarCapabilities = filterCapabilities.getScalarCapabilities();
    if (scalarCapabilities != null) {
        // LOGICAL OPERATORS
        if (scalarCapabilities.getLogicalOperators() != null) {
            logicalOps = true;
        }

        // COMPARISON OPERATORS
        ComparisonOperatorsType comparisonOperators = scalarCapabilities.getComparisonOperators();
        if (comparisonOperators != null) {
            for (ComparisonOperatorType comp : comparisonOperators.getComparisonOperator()) {
                if (null != comp) {
                    comparisonOps.add(COMPARISON_OPERATORS.valueOf(comp.getName()));
                }
            }
        }
    }

    // SPATIAL OPERATORS
    SpatialCapabilitiesType spatialCapabilities = filterCapabilities.getSpatialCapabilities();
    if (spatialCapabilities != null) {
        if (spatialCapabilities.getSpatialOperators() != null) {
            setSpatialOps(spatialCapabilities.getSpatialOperators());
        }

        // GEOMETRY OPERANDS
        GeometryOperandsType geometryOperandsType = spatialCapabilities.getGeometryOperands();
        if (geometryOperandsType != null) {
            for (GeometryOperandsType.GeometryOperand geoOperand : geometryOperandsType.getGeometryOperand()) {
                if (geoOperand.getName() != null) {
                    geometryOperands.add(geoOperand.getName());
                }
            }
            LOGGER.debug("geometryOperands: {}", geometryOperands);
        }
    }

    // TEMPORAL OPERATORS
    TemporalCapabilitiesType temporalCapabilitiesType = filterCapabilities.getTemporalCapabilities();
    if (temporalCapabilitiesType != null) {
        if (temporalCapabilitiesType.getTemporalOperators() != null) {
            setTemporalOps(temporalCapabilitiesType.getTemporalOperators());
        }

        // TEMPORAL OPERANDS
        TemporalOperandsType temporalOperandsType = temporalCapabilitiesType.getTemporalOperands();
        if (temporalOperandsType != null) {
            for (TemporalOperandsType.TemporalOperand temporalOperand : temporalOperandsType
                    .getTemporalOperand()) {
                if (temporalOperand.getName() != null) {
                    temporalOperands.add(temporalOperand.getName());
                }
            }
            LOGGER.debug("temporalOperands: {}", temporalOperands);
        }
    }
}

From source file:com.squid.kraken.v4.core.analysis.engine.index.DimensionStore.java

private void addCorrelations(DimensionMember member, Collection<DimensionMember> accu) {
    Set<DimensionMember> correlations = correlationMap.get(member.getID());
    if (correlations == null) {
        correlations = Collections.newSetFromMap(new ConcurrentHashMap<DimensionMember, Boolean>());
        Set<DimensionMember> previous = correlationMap.putIfAbsent(member.getID(), correlations);
        if (previous != null) {
            correlations = previous;//from w  ww  .ja va2 s. c  o  m
        }
    }
    correlations.addAll(accu);
}

From source file:com.google.uzaygezen.core.BoundedRollupTest.java

private static <V, K> Set<MapNode<K, V>> toIdentitySet(List<MapNode<K, V>> list) {
    Set<MapNode<K, V>> set = Collections.newSetFromMap(new IdentityHashMap<MapNode<K, V>, Boolean>());
    set.addAll(list);//from   w  ww  .  j  ava  2s .c om
    Assert.assertEquals(list.size(), set.size());
    return set;
}

From source file:org.talend.daikon.properties.PropertiesImpl.java

@Override
public void accept(AnyPropertyVisitor visitor, Properties parent) {
    // uses a set that uses reference-equality instead of instance-equality to avoid stackoveflow with hashcode() using a
    // visitor./* w  w w.  ja  v  a  2 s.c o  m*/
    Set<Properties> visited = Collections.newSetFromMap(new IdentityHashMap<Properties, Boolean>());
    acceptInternal(visitor, parent, visited);
}

From source file:org.apache.cassandra.db.index.SecondaryIndexManager.java

/**
 * @return all indexes which do *not* use a backing CFS internally
 *//*ww  w. j  a  va  2s.c  o  m*/
public Set<SecondaryIndex> getIndexesNotBackedByCfs() {
    // we use identity map because per row indexes use same instance across many columns
    Set<SecondaryIndex> indexes = Collections.newSetFromMap(new IdentityHashMap<SecondaryIndex, Boolean>());
    for (SecondaryIndex index : allIndexes)
        if (index.getIndexCfs() == null)
            indexes.add(index);
    return indexes;
}

From source file:org.opendaylight.groupbasedpolicy.renderer.opflex.EndpointManagerTest.java

@Test
public void testOnDataChangedRemoved() throws Exception {
    List<DataObject> daoList = new ArrayList<DataObject>();
    Endpoint mockEp = mock(Endpoint.class);
    EndpointGroupId mockEpgId = mock(EndpointGroupId.class);
    TenantId mockTid = mock(TenantId.class);
    L2BridgeDomainId mockBdId = mock(L2BridgeDomainId.class);
    MacAddress mockMac = mock(MacAddress.class);
    OpflexOverlayContext mockCtx = mock(OpflexOverlayContext.class);
    Map<InstanceIdentifier<?>, DataObject> emptyMap = new ConcurrentHashMap<InstanceIdentifier<?>, DataObject>();
    Map<InstanceIdentifier<?>, Boolean> dummyMap = new ConcurrentHashMap<InstanceIdentifier<?>, Boolean>();
    Set<InstanceIdentifier<?>> dummySet = Collections.newSetFromMap(dummyMap);
    dummySet.add(mockIid);/*from w w  w.  ja  va  2 s .  com*/
    daoList.add(mockEp);
    when(mockEp.getEndpointGroup()).thenReturn(mockEpgId);
    when(mockEp.getTenant()).thenReturn(mockTid);
    when(mockEp.getL2Context()).thenReturn(mockBdId);
    when(mockBdId.getValue()).thenReturn(TEST_CONTEXT);
    when(mockEp.getMacAddress()).thenReturn(mockMac);
    when(mockEp.getAugmentation(OpflexOverlayContext.class)).thenReturn(mockCtx);
    when(mockCtx.getAgentId()).thenReturn(TEST_AGENT_ID);
    when(mockMac.getValue()).thenReturn(TEST_IDENTIFIER);

    when(mockChange.getCreatedData()).thenReturn(emptyMap);
    when(mockChange.getRemovedPaths()).thenReturn(dummySet);
    when(mockChange.getOriginalData()).thenReturn(mockDaoMap);
    when(mockChange.getUpdatedData()).thenReturn(emptyMap);
    when(mockDaoMap.get(Matchers.<InstanceIdentifier<?>>any())).thenReturn(mockEp);

    epManager.onDataChanged(mockChange);
    verify(mockChange).getOriginalData();
}