Example usage for java.util.concurrent BlockingQueue size

List of usage examples for java.util.concurrent BlockingQueue size

Introduction

In this page you can find the example usage for java.util.concurrent BlockingQueue size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this collection.

Usage

From source file:org.springframework.amqp.rabbit.connection.CachingConnectionFactoryTests.java

@Test
public void testWithConnectionFactoryCachedConnection() throws Exception {
    com.rabbitmq.client.ConnectionFactory mockConnectionFactory = mock(
            com.rabbitmq.client.ConnectionFactory.class);

    final List<com.rabbitmq.client.Connection> mockConnections = new ArrayList<com.rabbitmq.client.Connection>();
    final List<Channel> mockChannels = new ArrayList<Channel>();

    doAnswer(new Answer<com.rabbitmq.client.Connection>() {

        private int connectionNumber;

        @Override/*w  ww.jav a  2s. com*/
        public com.rabbitmq.client.Connection answer(InvocationOnMock invocation) throws Throwable {
            com.rabbitmq.client.Connection connection = mock(com.rabbitmq.client.Connection.class);
            doAnswer(new Answer<Channel>() {

                private int channelNumber;

                @Override
                public Channel answer(InvocationOnMock invocation) throws Throwable {
                    Channel channel = mock(Channel.class);
                    when(channel.isOpen()).thenReturn(true);
                    int channelNumnber = ++this.channelNumber;
                    when(channel.toString())
                            .thenReturn("mockChannel" + connectionNumber + ":" + channelNumnber);
                    mockChannels.add(channel);
                    return channel;
                }
            }).when(connection).createChannel();
            int connectionNumber = ++this.connectionNumber;
            when(connection.toString()).thenReturn("mockConnection" + connectionNumber);
            when(connection.isOpen()).thenReturn(true);
            mockConnections.add(connection);
            return connection;
        }
    }).when(mockConnectionFactory).newConnection(any(ExecutorService.class), anyString());

    CachingConnectionFactory ccf = new CachingConnectionFactory(mockConnectionFactory);
    ccf.setExecutor(mock(ExecutorService.class));
    ccf.setCacheMode(CacheMode.CONNECTION);
    ccf.afterPropertiesSet();

    Set<?> allocatedConnections = TestUtils.getPropertyValue(ccf, "allocatedConnections", Set.class);
    assertEquals(0, allocatedConnections.size());
    BlockingQueue<?> idleConnections = TestUtils.getPropertyValue(ccf, "idleConnections", BlockingQueue.class);
    assertEquals(0, idleConnections.size());

    final AtomicReference<com.rabbitmq.client.Connection> createNotification = new AtomicReference<com.rabbitmq.client.Connection>();
    final AtomicReference<com.rabbitmq.client.Connection> closedNotification = new AtomicReference<com.rabbitmq.client.Connection>();
    ccf.setConnectionListeners(Collections.singletonList(new ConnectionListener() {

        @Override
        public void onCreate(Connection connection) {
            assertNull(createNotification.get());
            createNotification.set(targetDelegate(connection));
        }

        @Override
        public void onClose(Connection connection) {
            assertNull(closedNotification.get());
            closedNotification.set(targetDelegate(connection));
        }
    }));

    Connection con1 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con1);
    assertEquals(1, allocatedConnections.size());
    assertEquals(0, idleConnections.size());
    assertNotNull(createNotification.get());
    assertSame(mockConnections.get(0), createNotification.getAndSet(null));

    Channel channel1 = con1.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel1);
    channel1.close();
    //AMQP-358
    verify(mockChannels.get(0), never()).close();

    con1.close(); // should be ignored, and placed into connection cache.
    verify(mockConnections.get(0), never()).close();
    assertEquals(1, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertNull(closedNotification.get());

    /*
     * will retrieve same connection that was just put into cache, and reuse single channel from cache as well
     */
    Connection con2 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con2);
    Channel channel2 = con2.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel2);
    channel2.close();
    verify(mockChannels.get(0), never()).close();
    con2.close();
    verify(mockConnections.get(0), never()).close();
    assertEquals(1, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertNull(createNotification.get());

    /*
     * Now check for multiple connections/channels
     */
    con1 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con1);
    con2 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(1), con2);
    channel1 = con1.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel1);
    channel2 = con2.createChannel(false);
    verifyChannelIs(mockChannels.get(1), channel2);
    assertEquals(2, allocatedConnections.size());
    assertEquals(0, idleConnections.size());
    assertNotNull(createNotification.get());
    assertSame(mockConnections.get(1), createNotification.getAndSet(null));

    // put mock1 in cache
    channel1.close();
    verify(mockChannels.get(1), never()).close();
    con1.close();
    verify(mockConnections.get(0), never()).close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertNull(closedNotification.get());

    Connection con3 = ccf.createConnection();
    assertNull(createNotification.get());
    verifyConnectionIs(mockConnections.get(0), con3);
    Channel channel3 = con3.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel3);

    assertEquals(2, allocatedConnections.size());
    assertEquals(0, idleConnections.size());

    channel2.close();
    con2.close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    channel3.close();
    con3.close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());
    assertEquals("1", ccf.getCacheProperties().get("openConnections"));
    /*
     *  Cache size is 1; con3 (mock1) should have been a real close.
     *  con2 (mock2) should still be in the cache.
     */
    verify(mockConnections.get(0)).close(30000);
    assertNotNull(closedNotification.get());
    assertSame(mockConnections.get(0), closedNotification.getAndSet(null));
    verify(mockChannels.get(1), never()).close();
    verify(mockConnections.get(1), never()).close(30000);
    verify(mockChannels.get(1), never()).close();
    verifyConnectionIs(mockConnections.get(1), idleConnections.iterator().next());
    /*
     * Now a closed cached connection
     */
    when(mockConnections.get(1).isOpen()).thenReturn(false);
    when(mockChannels.get(1).isOpen()).thenReturn(false);
    con3 = ccf.createConnection();
    assertNotNull(closedNotification.get());
    assertSame(mockConnections.get(1), closedNotification.getAndSet(null));
    verifyConnectionIs(mockConnections.get(2), con3);
    assertNotNull(createNotification.get());
    assertSame(mockConnections.get(2), createNotification.getAndSet(null));
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertEquals("1", ccf.getCacheProperties().get("openConnections"));
    channel3 = con3.createChannel(false);
    verifyChannelIs(mockChannels.get(2), channel3);
    channel3.close();
    con3.close();
    assertNull(closedNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());
    assertEquals("1", ccf.getCacheProperties().get("openConnections"));

    /*
     * Now a closed cached connection when creating a channel
     */
    con3 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(2), con3);
    assertNull(createNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    when(mockConnections.get(2).isOpen()).thenReturn(false);
    channel3 = con3.createChannel(false);
    assertNotNull(closedNotification.getAndSet(null));
    assertNotNull(createNotification.getAndSet(null));

    verifyChannelIs(mockChannels.get(3), channel3);
    channel3.close();
    con3.close();
    assertNull(closedNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());
    assertEquals("1", ccf.getCacheProperties().get("openConnections"));

    // destroy
    ccf.destroy();
    assertNotNull(closedNotification.get());
    verify(mockConnections.get(3)).close(30000);
}

From source file:com.datatorrent.stram.engine.StreamingContainer.java

public void heartbeatLoop() throws Exception {
    umbilical.log(containerId, "[" + containerId + "] Entering heartbeat loop..");
    logger.debug("Entering heartbeat loop (interval is {} ms)", this.heartbeatIntervalMillis);
    final YarnConfiguration conf = new YarnConfiguration();
    long tokenLifeTime = (long) (containerContext.getValue(LogicalPlan.TOKEN_REFRESH_ANTICIPATORY_FACTOR)
            * containerContext.getValue(LogicalPlan.HDFS_TOKEN_LIFE_TIME));
    long expiryTime = System.currentTimeMillis();
    final Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
    Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
    while (iter.hasNext()) {
        Token<?> token = iter.next();
        logger.debug("token: {}", token);
    }// w w  w.j a v a2s .c om
    String hdfsKeyTabFile = containerContext.getValue(LogicalPlan.KEY_TAB_FILE);
    while (!exitHeartbeatLoop) {

        if (UserGroupInformation.isSecurityEnabled() && System.currentTimeMillis() >= expiryTime
                && hdfsKeyTabFile != null) {
            expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, FileUtils.getTempDirectoryPath(),
                    containerId, conf, hdfsKeyTabFile, credentials, null, false);
        }
        synchronized (this.heartbeatTrigger) {
            try {
                this.heartbeatTrigger.wait(heartbeatIntervalMillis);
            } catch (InterruptedException e1) {
                logger.warn("Interrupted in heartbeat loop, exiting..");
                break;
            }
        }

        long currentTime = System.currentTimeMillis();
        ContainerHeartbeat msg = new ContainerHeartbeat();
        msg.jvmName = jvmName;
        if (this.bufferServerAddress != null) {
            msg.bufferServerHost = this.bufferServerAddress.getHostName();
            msg.bufferServerPort = this.bufferServerAddress.getPort();
            if (bufferServer != null && !eventloop.isActive()) {
                logger.warn("Requesting restart due to terminated event loop");
                msg.restartRequested = true;
            }
        }
        msg.memoryMBFree = ((int) (Runtime.getRuntime().freeMemory() / (1024 * 1024)));
        garbageCollectorMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
        for (GarbageCollectorMXBean bean : garbageCollectorMXBeans) {
            msg.gcCollectionTime += bean.getCollectionTime();
            msg.gcCollectionCount += bean.getCollectionCount();
        }

        ContainerHeartbeatResponse rsp;
        do {
            ContainerStats stats = new ContainerStats(containerId);
            // gather heartbeat info for all operators
            for (Map.Entry<Integer, Node<?>> e : nodes.entrySet()) {
                OperatorHeartbeat hb = new OperatorHeartbeat();
                hb.setNodeId(e.getKey());
                hb.setGeneratedTms(currentTime);
                hb.setIntervalMs(heartbeatIntervalMillis);
                if (e.getValue().commandResponse.size() > 0) {
                    BlockingQueue<StatsListener.OperatorResponse> commandResponse = e
                            .getValue().commandResponse;
                    ArrayList<StatsListener.OperatorResponse> response = new ArrayList<StatsListener.OperatorResponse>();
                    for (int i = 0; i < commandResponse.size(); i++) {
                        response.add(commandResponse.poll());
                    }
                    hb.requestResponse = response;
                }
                OperatorContext context = e.getValue().context;
                context.drainStats(hb.getOperatorStatsContainer());

                if (context.getThread() == null || context.getThread().getState() != Thread.State.TERMINATED) {
                    hb.setState(DeployState.ACTIVE);
                } else if (failedNodes.contains(hb.nodeId)) {
                    hb.setState(DeployState.FAILED);
                } else {
                    logger.debug("Reporting SHUTDOWN state because thread is {} and failedNodes is {}",
                            context.getThread(), failedNodes);
                    hb.setState(DeployState.SHUTDOWN);
                }

                stats.addNodeStats(hb);
            }

            /**
             * Container stats published for whoever is interested in listening.
             * Currently interested candidates are TupleRecorderCollection and BufferServerStatsSubscriber
             */
            eventBus.publish(new ContainerStatsEvent(stats));

            msg.setContainerStats(stats);

            // heartbeat call and follow-up processing
            //logger.debug("Sending heartbeat for {} operators.", msg.getContainerStats().size());
            msg.sentTms = System.currentTimeMillis();
            rsp = umbilical.processHeartbeat(msg);
            processHeartbeatResponse(rsp);
            if (rsp.hasPendingRequests) {
                logger.info("Waiting for pending request.");
                synchronized (this.heartbeatTrigger) {
                    try {
                        this.heartbeatTrigger.wait(500);
                    } catch (InterruptedException ie) {
                        logger.warn("Interrupted in heartbeat loop", ie);
                        break;
                    }
                }
            }
        } while (rsp.hasPendingRequests);

    }
    logger.debug("Exiting hearbeat loop");
    umbilical.log(containerId, "[" + containerId + "] Exiting heartbeat loop..");
}

From source file:org.springframework.amqp.rabbit.connection.CachingConnectionFactoryTests.java

@Test
public void testWithConnectionFactoryCachedConnectionAndChannels() throws Exception {
    com.rabbitmq.client.ConnectionFactory mockConnectionFactory = mock(
            com.rabbitmq.client.ConnectionFactory.class);

    final List<com.rabbitmq.client.Connection> mockConnections = new ArrayList<com.rabbitmq.client.Connection>();
    final List<Channel> mockChannels = new ArrayList<Channel>();

    doAnswer(new Answer<com.rabbitmq.client.Connection>() {

        private int connectionNumber;

        @Override// w w w. jav a 2 s. c  om
        public com.rabbitmq.client.Connection answer(InvocationOnMock invocation) throws Throwable {
            com.rabbitmq.client.Connection connection = mock(com.rabbitmq.client.Connection.class);
            doAnswer(new Answer<Channel>() {

                private int channelNumber;

                @Override
                public Channel answer(InvocationOnMock invocation) throws Throwable {
                    Channel channel = mock(Channel.class);
                    when(channel.isOpen()).thenReturn(true);
                    int channelNumnber = ++this.channelNumber;
                    when(channel.toString())
                            .thenReturn("mockChannel" + connectionNumber + ":" + channelNumnber);
                    mockChannels.add(channel);
                    return channel;
                }
            }).when(connection).createChannel();
            int connectionNumber = ++this.connectionNumber;
            when(connection.toString()).thenReturn("mockConnection" + connectionNumber);
            when(connection.isOpen()).thenReturn(true);
            mockConnections.add(connection);
            return connection;
        }
    }).when(mockConnectionFactory).newConnection(any(ExecutorService.class), anyString());

    CachingConnectionFactory ccf = new CachingConnectionFactory(mockConnectionFactory);
    ccf.setExecutor(mock(ExecutorService.class));
    ccf.setCacheMode(CacheMode.CONNECTION);
    ccf.setConnectionCacheSize(2);
    ccf.setChannelCacheSize(2);
    ccf.afterPropertiesSet();

    Set<?> allocatedConnections = TestUtils.getPropertyValue(ccf, "allocatedConnections", Set.class);
    assertEquals(0, allocatedConnections.size());
    BlockingQueue<?> idleConnections = TestUtils.getPropertyValue(ccf, "idleConnections", BlockingQueue.class);
    assertEquals(0, idleConnections.size());
    @SuppressWarnings("unchecked")
    Map<?, List<?>> cachedChannels = TestUtils.getPropertyValue(ccf,
            "allocatedConnectionNonTransactionalChannels", Map.class);

    final AtomicReference<com.rabbitmq.client.Connection> createNotification = new AtomicReference<com.rabbitmq.client.Connection>();
    final AtomicReference<com.rabbitmq.client.Connection> closedNotification = new AtomicReference<com.rabbitmq.client.Connection>();
    ccf.setConnectionListeners(Collections.singletonList(new ConnectionListener() {

        @Override
        public void onCreate(Connection connection) {
            assertNull(createNotification.get());
            createNotification.set(targetDelegate(connection));
        }

        @Override
        public void onClose(Connection connection) {
            assertNull(closedNotification.get());
            closedNotification.set(targetDelegate(connection));
        }
    }));

    Connection con1 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con1);
    assertEquals(1, allocatedConnections.size());
    assertEquals(0, idleConnections.size());
    assertNotNull(createNotification.get());
    assertSame(mockConnections.get(0), createNotification.getAndSet(null));

    Channel channel1 = con1.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel1);
    channel1.close();
    //AMQP-358
    verify(mockChannels.get(0), never()).close();

    con1.close(); // should be ignored, and placed into connection cache.
    verify(mockConnections.get(0), never()).close();
    assertEquals(1, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertEquals(1, cachedChannels.get(con1).size());
    assertNull(closedNotification.get());

    /*
     * will retrieve same connection that was just put into cache, and reuse single channel from cache as well
     */
    Connection con2 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con2);
    Channel channel2 = con2.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel2);
    channel2.close();
    verify(mockChannels.get(0), never()).close();
    con2.close();
    verify(mockConnections.get(0), never()).close();
    assertEquals(1, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertNull(createNotification.get());

    /*
     * Now check for multiple connections/channels
     */
    con1 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con1);
    con2 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(1), con2);
    channel1 = con1.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel1);
    channel2 = con2.createChannel(false);
    verifyChannelIs(mockChannels.get(1), channel2);
    assertEquals(2, allocatedConnections.size());
    assertEquals(0, idleConnections.size());
    assertNotNull(createNotification.get());
    assertSame(mockConnections.get(1), createNotification.getAndSet(null));

    // put mock1 in cache
    channel1.close();
    verify(mockChannels.get(1), never()).close();
    con1.close();
    verify(mockConnections.get(0), never()).close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    assertNull(closedNotification.get());

    Connection con3 = ccf.createConnection();
    assertNull(createNotification.get());
    verifyConnectionIs(mockConnections.get(0), con3);
    Channel channel3 = con3.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel3);

    assertEquals(2, allocatedConnections.size());
    assertEquals(0, idleConnections.size());

    channel2.close();
    con2.close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    channel3.close();
    con3.close();
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());
    assertEquals(1, cachedChannels.get(con1).size());
    assertEquals(1, cachedChannels.get(con2).size());
    /*
     *  Cache size is 2; neither should have been a real close.
     *  con2 (mock2) and con1 should still be in the cache.
     */
    verify(mockConnections.get(0), never()).close(30000);
    assertNull(closedNotification.get());
    verify(mockChannels.get(1), never()).close();
    verify(mockConnections.get(1), never()).close(30000);
    verify(mockChannels.get(1), never()).close();
    assertEquals(2, idleConnections.size());
    Iterator<?> iterator = idleConnections.iterator();
    verifyConnectionIs(mockConnections.get(1), iterator.next());
    verifyConnectionIs(mockConnections.get(0), iterator.next());
    /*
     * Now a closed cached connection
     */
    when(mockConnections.get(1).isOpen()).thenReturn(false);
    con3 = ccf.createConnection();
    assertNotNull(closedNotification.get());
    assertSame(mockConnections.get(1), closedNotification.getAndSet(null));
    verifyConnectionIs(mockConnections.get(0), con3);
    assertNull(createNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    channel3 = con3.createChannel(false);
    verifyChannelIs(mockChannels.get(0), channel3);
    channel3.close();
    con3.close();
    assertNull(closedNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());

    /*
     * Now a closed cached connection when creating a channel
     */
    con3 = ccf.createConnection();
    verifyConnectionIs(mockConnections.get(0), con3);
    assertNull(createNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(1, idleConnections.size());
    when(mockConnections.get(0).isOpen()).thenReturn(false);
    channel3 = con3.createChannel(false);
    assertNotNull(closedNotification.getAndSet(null));
    assertNotNull(createNotification.getAndSet(null));

    verifyChannelIs(mockChannels.get(2), channel3);
    channel3.close();
    con3.close();
    assertNull(closedNotification.get());
    assertEquals(2, allocatedConnections.size());
    assertEquals(2, idleConnections.size());

    Connection con4 = ccf.createConnection();
    assertSame(con3, con4);
    assertEquals(1, idleConnections.size());
    Channel channelA = con4.createChannel(false);
    Channel channelB = con4.createChannel(false);
    Channel channelC = con4.createChannel(false);
    channelA.close();
    assertEquals(1, cachedChannels.get(con4).size());
    channelB.close();
    assertEquals(2, cachedChannels.get(con4).size());
    channelC.close();
    assertEquals(2, cachedChannels.get(con4).size());

    // destroy
    ccf.destroy();
    assertNotNull(closedNotification.get());
    // physical wasn't invoked, because this mockConnection marked with 'false' for 'isOpen()'
    verify(mockConnections.get(0)).close(30000);
    verify(mockConnections.get(1)).close(30000);
    verify(mockConnections.get(2)).close(30000);
}

From source file:com.xorcode.andtweet.AndTweetService.java

private int saveQueue(BlockingQueue<CommandData> q, String prefsFileName) {
    Context context = MyPreferences.getContext();
    int count = 0;
    // Delete any existing saved queue
    SharedPreferencesUtil.delete(context, prefsFileName);
    if (q.size() > 0) {
        SharedPreferences sp = MyPreferences.getSharedPreferences(prefsFileName, MODE_PRIVATE);
        while (q.size() > 0) {
            CommandData cd = q.poll();/* w  w w.j  a v a 2 s .  c  o  m*/
            cd.save(sp, count);
            MyLog.v(TAG, "Command saved: " + cd.toString());
            count += 1;
        }
        MyLog.d(TAG, "Queue saved to " + prefsFileName + ", " + count + " msgs");
    }
    return count;
}

From source file:ubic.gemma.loader.protein.StringProteinInteractionLoader.java

/**
 * Poll the queue to see if any Gene2GeneProteinAssociation to load into database. If so firstly check to see if the
 * genes are in the gemma db as these identifiers came from biomart If both genes found load.
 * /*from  w  w  w  .  j  ava 2  s  .c o m*/
 * @param geneQueue queue of Gene2GeneProteinAssociation to load
 */
void doLoad(final BlockingQueue<Gene2GeneProteinAssociation> gene2GeneProteinAssociationQueue) {
    log.info("starting processing ");
    while (!(converterDone.get() && gene2GeneProteinAssociationQueue.isEmpty())) {

        try {
            Gene2GeneProteinAssociation gene2GeneProteinAssociation = gene2GeneProteinAssociationQueue.poll();
            if (gene2GeneProteinAssociation == null) {
                continue;
            }
            // check they are genes gemma knows about
            Gene geneOne = geneService.findByNCBIId(gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId());
            Gene geneTwo = geneService
                    .findByNCBIId(gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId());

            if (geneOne == null) {
                log.warn("Gene with NCBI id=" + gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId()
                        + " not in Gemma");
                continue;
            }
            if (geneTwo == null) {
                log.warn("Gene with NCBI id=" + gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId()
                        + " not in Gemma");
                continue;
            }

            gene2GeneProteinAssociation.setFirstGene(geneOne);
            gene2GeneProteinAssociation.setSecondGene(geneTwo);
            persisterHelper.persist(gene2GeneProteinAssociation);

            if (++loadedGeneCount % 1000 == 0) {
                log.info("Proceesed " + loadedGeneCount + " protein protein interactions. "
                        + "Current queue has " + gene2GeneProteinAssociationQueue.size() + " items.");
            }

        } catch (Exception e) {
            log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    log.info("Loaded " + loadedGeneCount + " protein protein interactions. ");
    loaderDone.set(true);
}

From source file:com.amazon.janusgraph.example.MarvelGraphFactory.java

public static void load(final JanusGraph graph, final int rowsToLoad, final boolean report) throws Exception {

    JanusGraphManagement mgmt = graph.openManagement();
    if (mgmt.getGraphIndex(CHARACTER) == null) {
        final PropertyKey characterKey = mgmt.makePropertyKey(CHARACTER).dataType(String.class).make();
        mgmt.buildIndex(CHARACTER, Vertex.class).addKey(characterKey).unique().buildCompositeIndex();
    }//from w w  w .  ja  va2s  .c o m
    if (mgmt.getGraphIndex(COMIC_BOOK) == null) {
        final PropertyKey comicBookKey = mgmt.makePropertyKey(COMIC_BOOK).dataType(String.class).make();
        mgmt.buildIndex(COMIC_BOOK, Vertex.class).addKey(comicBookKey).unique().buildCompositeIndex();
        mgmt.makePropertyKey(WEAPON).dataType(String.class).make();
        mgmt.makeEdgeLabel(APPEARED).multiplicity(Multiplicity.MULTI).make();
    }
    mgmt.commit();

    ClassLoader classLoader = MarvelGraphFactory.class.getClassLoader();
    URL resource = classLoader.getResource("META-INF/marvel.csv");
    int line = 0;
    Map<String, Set<String>> comicToCharacter = new HashMap<>();
    Map<String, Set<String>> characterToComic = new HashMap<>();
    Set<String> characters = new HashSet<>();
    BlockingQueue<Runnable> creationQueue = new LinkedBlockingQueue<>();
    try (CSVReader reader = new CSVReader(new InputStreamReader(resource.openStream()))) {
        String[] nextLine;
        while ((nextLine = reader.readNext()) != null && line < rowsToLoad) {
            line++;
            String comicBook = nextLine[1];
            String[] characterNames = nextLine[0].split("/");
            if (!comicToCharacter.containsKey(comicBook)) {
                comicToCharacter.put(comicBook, new HashSet<String>());
            }
            List<String> comicCharacters = Arrays.asList(characterNames);
            comicToCharacter.get(comicBook).addAll(comicCharacters);
            characters.addAll(comicCharacters);

        }
    }

    for (String character : characters) {
        creationQueue.add(new CharacterCreationCommand(character, graph));
    }

    BlockingQueue<Runnable> appearedQueue = new LinkedBlockingQueue<>();
    for (String comicBook : comicToCharacter.keySet()) {
        creationQueue.add(new ComicBookCreationCommand(comicBook, graph));
        Set<String> comicCharacters = comicToCharacter.get(comicBook);
        for (String character : comicCharacters) {
            AppearedCommand lineCommand = new AppearedCommand(graph, new Appeared(character, comicBook));
            appearedQueue.add(lineCommand);
            if (!characterToComic.containsKey(character)) {
                characterToComic.put(character, new HashSet<String>());
            }
            characterToComic.get(character).add(comicBook);
        }
        REGISTRY.histogram("histogram.comic-to-character").update(comicCharacters.size());
    }

    int maxAppearances = 0;
    String maxCharacter = "";
    for (String character : characterToComic.keySet()) {
        Set<String> comicBookSet = characterToComic.get(character);
        int numberOfAppearances = comicBookSet.size();
        REGISTRY.histogram("histogram.character-to-comic").update(numberOfAppearances);
        if (numberOfAppearances > maxAppearances) {
            maxCharacter = character;
            maxAppearances = numberOfAppearances;
        }
    }
    LOG.info("Character {} has most appearances at {}", maxCharacter, maxAppearances);

    ExecutorService executor = Executors.newFixedThreadPool(POOL_SIZE);
    for (int i = 0; i < POOL_SIZE; i++) {
        executor.execute(new BatchCommand(graph, creationQueue));
    }
    executor.shutdown();
    while (!executor.awaitTermination(60, TimeUnit.SECONDS)) {
        LOG.info("Awaiting:" + creationQueue.size());
        if (report) {
            REPORTER.report();
        }
    }

    executor = Executors.newSingleThreadExecutor();
    executor.execute(new BatchCommand(graph, appearedQueue));

    executor.shutdown();
    while (!executor.awaitTermination(60, TimeUnit.SECONDS)) {
        LOG.info("Awaiting:" + appearedQueue.size());
        if (report) {
            REPORTER.report();
        }
    }
    LOG.info("MarvelGraphFactory.load complete");
}

From source file:ubic.gemma.core.loader.protein.StringProteinInteractionLoader.java

/**
 * Poll the queue to see if any Gene2GeneProteinAssociation to load into database. If so firstly check to see if the
 * genes are in the gemma db as these identifiers came from biomart If both genes found load.
 *
 * @param gene2GeneProteinAssociationQueue queue of Gene2GeneProteinAssociation to load
 *///  w  w  w . j a  v a  2 s  . co m
private void doLoad(final BlockingQueue<Gene2GeneProteinAssociation> gene2GeneProteinAssociationQueue) {
    StringProteinInteractionLoader.log.info("starting processing ");
    while (!(converterDone.get() && gene2GeneProteinAssociationQueue.isEmpty())) {

        try {
            Gene2GeneProteinAssociation gene2GeneProteinAssociation = gene2GeneProteinAssociationQueue.poll();
            if (gene2GeneProteinAssociation == null) {
                continue;
            }
            // check they are genes gemma knows about
            Gene geneOne = geneService.findByNCBIId(gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId());
            Gene geneTwo = geneService
                    .findByNCBIId(gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId());

            if (geneOne == null) {
                StringProteinInteractionLoader.log.warn("Gene with NCBI id="
                        + gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId() + " not in Gemma");
                continue;
            }
            if (geneTwo == null) {
                StringProteinInteractionLoader.log.warn("Gene with NCBI id="
                        + gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId() + " not in Gemma");
                continue;
            }

            FieldUtils.writeField(gene2GeneProteinAssociation, "firstGene", geneOne, true);
            FieldUtils.writeField(gene2GeneProteinAssociation, "secondGene", geneTwo, true);

            persisterHelper.persist(gene2GeneProteinAssociation);

            if (++loadedGeneCount % 1000 == 0) {
                StringProteinInteractionLoader.log
                        .info("Proceesed " + loadedGeneCount + " protein protein interactions. "
                                + "Current queue has " + gene2GeneProteinAssociationQueue.size() + " items.");
            }

        } catch (Exception e) {
            StringProteinInteractionLoader.log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    StringProteinInteractionLoader.log.info("Loaded " + loadedGeneCount + " protein protein interactions. ");
    loaderDone.set(true);
}

From source file:org.opennms.newts.gsod.ImportRunner.java

private Observable<Boolean> parMap(Observable<List<Sample>> samples, MetricRegistry metrics,
        Func1<List<Sample>, Boolean> insert) {

    final Timer waitTime = metrics.timer("wait-time");

    @SuppressWarnings("serial")
    final BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<Runnable>(
            m_maxThreadQueueSize == 0 ? m_threadCount * 3 : m_maxThreadQueueSize) {

        @Override/*from   w w w.ja  va  2  s .com*/
        public boolean offer(Runnable r) {
            try (Context time = waitTime.time()) {
                this.put(r);
                return true;
            } catch (InterruptedException e) {
                throw Exceptions.propagate(e);
            }
        }

        @Override
        public boolean add(Runnable r) {
            try (Context time = waitTime.time()) {
                this.put(r);
                return true;
            } catch (InterruptedException e) {
                throw Exceptions.propagate(e);
            }
        }

    };
    final ThreadPoolExecutor executor = new ThreadPoolExecutor(m_threadCount, m_threadCount, 0L,
            TimeUnit.MILLISECONDS, workQueue);

    metrics.register("active-threads", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getActiveCount();
        }

    });

    metrics.register("pool-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getPoolSize();
        }

    });
    metrics.register("largest-pool-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return executor.getLargestPoolSize();
        }

    });

    metrics.register("work-queue-size", new Gauge<Integer>() {

        @Override
        public Integer getValue() {
            return workQueue.size();
        }

    });

    return parMap(samples, executor, metrics, insert);
}

From source file:ubic.gemma.loader.expression.arrayDesign.ArrayDesignProbeMapperServiceImpl.java

/**
 * @param queue//from   w  ww  . ja v a2s  .  com
 * @param generatorDone
 * @param loaderDone
 */
void doLoad(final BlockingQueue<BlatAssociation> queue, AtomicBoolean generatorDone, AtomicBoolean loaderDone) {
    int loadedAssociationCount = 0;
    while (!(generatorDone.get() && queue.isEmpty())) {

        try {
            BlatAssociation ba = queue.poll();
            if (ba == null) {
                continue;
            }

            GeneProduct geneProduct = ba.getGeneProduct();
            if (geneProduct.getId() == null) {
                GeneProduct existing = geneProductService.find(geneProduct);

                if (existing == null) {

                    existing = checkForAlias(geneProduct);
                    if (existing == null) {
                        /*
                         * We have to be careful not to cruft up the gene table now that I so carefully cleaned it.
                         * But this is a problem if we aren't adding some other association to the gene at least.
                         * But generally the mRNAs that GP has that NCBI doesn't are "alternative" or "additional".
                         */
                        if (log.isDebugEnabled())
                            log.debug("New gene product from GoldenPath is not in Gemma: " + geneProduct
                                    + " skipping association to " + ba.getBioSequence()
                                    + " [skipping policy in place]");
                        continue;
                    }
                }
                ba.setGeneProduct(existing);
            }

            persisterHelper.persist(ba);

            if (++loadedAssociationCount % 1000 == 0) {
                log.info("Persisted " + loadedAssociationCount + " blat associations. " + "Current queue has "
                        + queue.size() + " items.");
            }

        } catch (Exception e) {
            log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    log.info("Load thread done: loaded " + loadedAssociationCount + " blat associations. ");
    loaderDone.set(true);
}

From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignProbeMapperServiceImpl.java

private void doLoad(final BlockingQueue<BACS> queue, AtomicBoolean generatorDone, AtomicBoolean loaderDone,
        boolean persist) {
    int loadedAssociationCount = 0;
    while (!(generatorDone.get() && queue.isEmpty())) {

        try {/*from w  w  w  .j  a  v a2 s  . co  m*/
            BACS bacs = queue.poll();
            if (bacs == null) {
                continue;
            }

            GeneProduct geneProduct = bacs.ba.getGeneProduct();

            if (geneProduct.getId() == null) {
                GeneProduct existing = geneProductService.find(geneProduct);

                if (existing == null) {

                    existing = this.checkForAlias(geneProduct);
                    if (existing == null) {
                        /*
                         * We have to be careful not to cruft up the gene table now that I so carefully cleaned it.
                         * But this is a problem if we aren't adding some other association to the gene at least.
                         * But generally the mRNAs that GP has that NCBI doesn't are "alternative" or "additional".
                         */
                        if (ArrayDesignProbeMapperServiceImpl.log.isDebugEnabled())
                            ArrayDesignProbeMapperServiceImpl.log
                                    .debug("New gene product from GoldenPath is not in Gemma: " + geneProduct
                                            + " skipping association to " + bacs.ba.getBioSequence()
                                            + " [skipping policy in place]");
                        continue;
                    }
                }
                bacs.ba.setGeneProduct(existing);
            }

            if (persist) {
                persisterHelper.persist(bacs.ba);

                if (++loadedAssociationCount % 1000 == 0) {
                    ArrayDesignProbeMapperServiceImpl.log.info("Persisted " + loadedAssociationCount
                            + " blat associations. " + "Current queue has " + queue.size() + " items.");
                }
            } else {
                this.printResult(bacs.cs, bacs.ba);
            }

        } catch (Exception e) {
            ArrayDesignProbeMapperServiceImpl.log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    ArrayDesignProbeMapperServiceImpl.log
            .info("Load thread done: loaded " + loadedAssociationCount + " blat associations. ");
    loaderDone.set(true);
}