Example usage for java.util.concurrent TimeUnit MINUTES

List of usage examples for java.util.concurrent TimeUnit MINUTES

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MINUTES.

Prototype

TimeUnit MINUTES

To view the source code for java.util.concurrent TimeUnit MINUTES.

Click Source Link

Document

Time unit representing sixty seconds.

Usage

From source file:org.codelibs.robot.S2RobotTest.java

@Override
protected void setUp() throws Exception {
    super.setUp();

    final Map<String, String> featureMap = newHashMap();
    featureMap.put("http://xml.org/sax/features/namespaces", "false");
    final Map<String, String> propertyMap = newHashMap();
    final Map<String, String> childUrlRuleMap = newHashMap();
    childUrlRuleMap.put("//A", "href");
    childUrlRuleMap.put("//AREA", "href");
    childUrlRuleMap.put("//FRAME", "src");
    childUrlRuleMap.put("//IFRAME", "src");
    childUrlRuleMap.put("//IMG", "src");
    childUrlRuleMap.put("//LINK", "href");
    childUrlRuleMap.put("//SCRIPT", "src");

    container = new StandardRobotContainer();
    container.<HcHttpClient>prototype("internalHttpClient", HcHttpClient.class, client -> {
        client.setCookieSpec(CookieSpecs.BEST_MATCH);
        client.setClientConnectionManager(container.getComponent("clientConnectionManager"));
    }).prototype("httpClient", FaultTolerantClient.class, client -> {
        client.setRobotClient(container.getComponent("internalHttpClient"));
        client.setMaxRetryCount(5);// w w w . j  av a  2s  .  c om
        client.setRetryInterval(500);
    }).prototype("fsClient", FileSystemClient.class)
            .prototype("ruleManager", RuleManagerImpl.class, manager -> {
                manager.addRule(container.getComponent("sitemapsRule"));
                manager.addRule(container.getComponent("fileRule"));
            }).prototype("accessResult", AccessResultImpl.class).prototype("urlQueue", UrlQueueImpl.class)
            .prototype("robotThread", S2RobotThread.class).prototype("s2Robot", S2Robot.class)
            .prototype("urlFilterService", UrlFilterServiceImpl.class)
            .prototype("urlQueueService", UrlQueueServiceImpl.class)
            .prototype("dataService", DataServiceImpl.class).prototype("urlFilter", UrlFilterImpl.class)
            .singleton("urlConvertHelper", UrlConvertHelper.class)
            .singleton("intervalController", DefaultIntervalController.class)
            .singleton("sitemapsHelper", SitemapsHelper.class).singleton("logHelper", LogHelperImpl.class)
            .singleton("encodingHelper", EncodingHelper.class)
            .singleton("contentLengthHelper", ContentLengthHelper.class)
            .singleton("mimeTypeHelper", MimeTypeHelperImpl.class)
            .<FileTransformer>singleton("fileTransformer", FileTransformer.class, transformer -> {
                transformer.setName("fileTransformer");
                transformer.setFeatureMap(featureMap);
                transformer.setPropertyMap(propertyMap);
                transformer.setChildUrlRuleMap(childUrlRuleMap);
            }).singleton("dataHelper", MemoryDataHelper.class)
            .singleton("robotsTxtHelper", RobotsTxtHelper.class)
            .<S2RobotClientFactory>singleton("clientFactory", S2RobotClientFactory.class, factory -> {
                factory.addClient("http:.*", container.getComponent("httpClient"));
                factory.addClient("file:.*", container.getComponent("fsClient"));
            }).singleton("tikaExtractor", TikaExtractor.class)
            .<ExtractorFactory>singleton("extractorFactory", ExtractorFactory.class, factory -> {
                TikaExtractor tikaExtractor = container.getComponent("tikaExtractor");
                factory.addExtractor("text/plain", tikaExtractor);
                factory.addExtractor("text/html", tikaExtractor);
            })//
            .singleton("httpClient", HcHttpClient.class)//
            .singleton("sitemapsResponseProcessor", SitemapsResponseProcessor.class)//
            .<SitemapsRule>singleton("sitemapsRule", SitemapsRule.class, rule -> {
                rule.setResponseProcessor(container.getComponent("sitemapsResponseProcessor"));
                rule.setRuleId("sitemapsRule");
                rule.addRule("url", ".*sitemap.*");
            })//
            .<DefaultResponseProcessor>singleton("defaultResponseProcessor", DefaultResponseProcessor.class,
                    processor -> {
                        processor.setTransformer(container.getComponent("fileTransformer"));
                        processor.setSuccessfulHttpCodes(new int[] { 200 });
                        processor.setNotModifiedHttpCodes(new int[] { 304 });
                    })//
            .<RegexRule>singleton("fileRule", RegexRule.class, rule -> {
                rule.setRuleId("fileRule");
                rule.setDefaultRule(true);
                rule.setResponseProcessor(container.getComponent("defaultResponseProcessor"));
            })//
            .<PoolingHttpClientConnectionManager>singleton("clientConnectionManager",
                    new PoolingHttpClientConnectionManager(5, TimeUnit.MINUTES), manager -> {
                        manager.setMaxTotal(200);
                        manager.setDefaultMaxPerRoute(20);
                    });

    s2Robot = container.getComponent("s2Robot");
    dataService = container.getComponent("dataService");
    urlQueueService = container.getComponent("urlQueueService");
    fileTransformer = container.getComponent("fileTransformer");

}

From source file:org.obm.opush.PingHandlerTest.java

@Test
@Ignore("OBMFULL-4125")
public void test3BlockingClient() throws Exception {
    prepareMockNoChange(Arrays.asList(users.jaures));

    opushServer.start();//from   w w w. jav a2 s .  c  om

    OPClient opClient = testUtils.buildWBXMLOpushClient(users.jaures, opushServer.getHttpPort(), httpClient);

    ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(20, 20, 1, TimeUnit.MINUTES,
            new LinkedBlockingQueue<Runnable>());

    Stopwatch stopwatch = Stopwatch.createStarted();

    List<Future<Document>> futures = new ArrayList<Future<Document>>();
    for (int i = 0; i < 4; ++i) {
        futures.add(queuePingCommand(opClient, users.jaures, threadPoolExecutor));
    }

    for (Future<Document> f : futures) {
        Document response = f.get();
        checkNoChangeResponse(response);
    }

    checkExecutionTime(2, 5, stopwatch);
}

From source file:com.enitalk.configs.DateCache.java

@Bean(name = "exchangeCache")
public LoadingCache<String, BigDecimal> exchange() {
    CacheBuilder<Object, Object> ccc = CacheBuilder.newBuilder();
    ccc.expireAfterWrite(30, TimeUnit.MINUTES);

    LoadingCache<String, BigDecimal> cache = ccc.build(new CacheLoader<String, BigDecimal>() {

        @Override/*from  w  w  w.ja  va  2 s .  com*/
        public BigDecimal load(String key) throws Exception {
            try {
                return TinkoffController.exchangeRate(jackson);

            } catch (Exception e) {
                logger.error(ExceptionUtils.getFullStackTrace(e));
            }
            return null;
        }

    });

    return cache;
}

From source file:contestWebsite.AdminPanel.java

@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    VelocityEngine ve = new VelocityEngine();
    ve.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, "html/pages, html/snippets");
    ve.init();/*from  w ww  .j  av a2s.  c o  m*/
    VelocityContext context = new VelocityContext();
    Pair<Entity, UserCookie> infoAndCookie = init(context, req);

    UserCookie userCookie = infoAndCookie.y;
    boolean loggedIn = (boolean) context.get("loggedIn");

    String updated = req.getParameter("updated");
    if (updated != null && updated.equals("1") && !loggedIn) {
        resp.sendRedirect("/adminPanel?updated=1");
    }
    context.put("updated", req.getParameter("updated"));

    if (loggedIn && userCookie.isAdmin()) {
        Entity contestInfo = infoAndCookie.x;
        context.put("contestInfo", contestInfo);

        String confPassError = req.getParameter("confPassError");
        context.put("confPassError",
                confPassError != null && confPassError.equals("1") ? "Those passwords didn't match, try again."
                        : null);
        String passError = req.getParameter("passError");
        context.put("passError",
                passError != null && passError.equals("1") ? "That password is incorrect, try again." : null);

        context.put("middleSubjects", Test.getTests(Level.MIDDLE));
        context.put("Level", Level.class);
        context.put("subjects", Subject.values());

        String[] defaultEmails = { "forgotPass", "question", "registration" };
        for (String defaultEmail : defaultEmails) {
            String email;
            if (contestInfo.hasProperty(defaultEmail + "Email")) {
                email = ((Text) contestInfo.getProperty(defaultEmail + "Email")).getValue();
            } else {
                InputStream emailStream = getServletContext()
                        .getResourceAsStream("/html/email/" + defaultEmail + ".html");
                email = CharStreams.toString(new InputStreamReader(emailStream, Charsets.UTF_8));
                emailStream.close();
            }
            context.put(defaultEmail + "Email", email);
        }

        try {
            context.put("awardCriteria", Retrieve.awardCriteria(contestInfo));
            context.put("qualifyingCriteria", Retrieve.qualifyingCriteria(contestInfo));
            context.put("clientId", contestInfo.getProperty("OAuth2ClientId"));
        } catch (Exception e) {
            System.err.println("Surpressing exception while loading admin panel");
            e.printStackTrace();
        }

        SimpleDateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
        dateFormat.setTimeZone(TimeZone.getTimeZone("GMT+6"));

        try {
            Date endDate = dateFormat.parse((String) contestInfo.getProperty("editEndDate"));
            Date startDate = dateFormat.parse((String) contestInfo.getProperty("editStartDate"));
            if (new Date().after(endDate) || new Date().before(startDate)) {
                context.put("regEditClosed", true);
            }
        } catch (Exception e) {
            context.put("regEditClosed", true);
        }

        try {
            Date endDate = dateFormat.parse((String) contestInfo.getProperty("endDate"));
            Date startDate = dateFormat.parse((String) contestInfo.getProperty("startDate"));
            if (new Date().after(endDate) || new Date().before(startDate)) {
                context.put("regClosed", true);
            }
        } catch (Exception e) {
            context.put("regClosed", true);
        }

        MemcacheService memCache = MemcacheServiceFactory.getMemcacheService();
        memCache.setErrorHandler(ErrorHandlers.getConsistentLogAndContinue(java.util.logging.Level.INFO));
        byte[] tabulationTaskStatusBytes = (byte[]) memCache.get("tabulationTaskStatus");
        if (tabulationTaskStatusBytes != null) {
            String[] tabulationTaskStatus = new String(tabulationTaskStatusBytes).split("_");
            context.put("tabulationTaskStatus", tabulationTaskStatus[0]);
            List<String> tabulationTaskStatusTime = new ArrayList<String>();
            long timeAgo = new Date().getTime() - new Date(Long.parseLong(tabulationTaskStatus[1])).getTime();
            List<Pair<TimeUnit, String>> timeUnits = new ArrayList<Pair<TimeUnit, String>>() {
                {
                    add(new Pair<TimeUnit, String>(TimeUnit.DAYS, "day"));
                    add(new Pair<TimeUnit, String>(TimeUnit.HOURS, "hour"));
                    add(new Pair<TimeUnit, String>(TimeUnit.MINUTES, "minute"));
                    add(new Pair<TimeUnit, String>(TimeUnit.SECONDS, "second"));
                }
            };
            for (Pair<TimeUnit, String> entry : timeUnits) {
                if (entry.getX().convert(timeAgo, TimeUnit.MILLISECONDS) > 0) {
                    long numUnit = entry.getX().convert(timeAgo, TimeUnit.MILLISECONDS);
                    tabulationTaskStatusTime.add(numUnit + " " + entry.getY() + (numUnit == 1 ? "" : "s"));
                    timeAgo -= TimeUnit.MILLISECONDS.convert(numUnit, entry.getX());
                }
            }
            if (tabulationTaskStatusTime.size() >= 1) {
                context.put("tabulationTaskStatusTime", StringUtils.join(tabulationTaskStatusTime, ", "));
            } else {
                context.put("tabulationTaskStatusTime", timeAgo + " milliseconds");
            }
        }

        close(context, ve.getTemplate("adminPanel.html"), resp);
    } else {
        resp.sendError(HttpServletResponse.SC_FORBIDDEN,
                "Contest Administrator privileges required for that operation");
    }
}

From source file:com.zaxxer.hikari.benchmark.BenchBase.java

private void setupDBCP2() {
    org.apache.commons.pool2.impl.GenericObjectPool<org.apache.commons.dbcp2.PoolableConnection> connectionPool;
    DriverManagerConnectionFactory connectionFactory = new DriverManagerConnectionFactory(jdbcURL, "sa", "");

    // Wrap the connections and statements with pooled variants
    org.apache.commons.dbcp2.PoolableConnectionFactory poolableCF = null;
    poolableCF = new org.apache.commons.dbcp2.PoolableConnectionFactory(connectionFactory, null);

    poolableCF.setValidationQuery("VALUES 1");
    poolableCF.setDefaultTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
    poolableCF.setDefaultAutoCommit(false);
    poolableCF.setRollbackOnReturn(true);

    // Create the actual pool of connections, and apply any properties
    connectionPool = new org.apache.commons.pool2.impl.GenericObjectPool(poolableCF);
    connectionPool.setTestOnBorrow(true);
    connectionPool.setMaxIdle(maxPoolSize);

    connectionPool.setMinIdle(MIN_POOL_SIZE);
    connectionPool.setMaxTotal(maxPoolSize);
    connectionPool.setMaxWaitMillis(8000);
    connectionPool.setMinEvictableIdleTimeMillis((int) TimeUnit.MINUTES.toMillis(30));
    poolableCF.setPool(connectionPool);//from  w w  w.  j a  v  a  2  s  . c o  m
    DS = new org.apache.commons.dbcp2.PoolingDataSource(connectionPool);
}

From source file:com.ibm.jaggr.core.impl.cache.CacheManagerImpl.java

/**
 * Starts up the cache. Attempts to de-serialize a previously serialized
 * cache from disk and starts the periodic serializer task.
 *
 * @param aggregator/*from  w w w .j  a v  a  2s  .  co  m*/
 *            the aggregator instance this cache manager belongs to
 * @param stamp
 *            a time stamp used to determine if the cache should be cleared.
 *            The cache should be cleared if the time stamp is later than
 *            the one associated with the cached resources.
 * @throws IOException
 */
public CacheManagerImpl(IAggregator aggregator, long stamp) throws IOException {

    _directory = new File(aggregator.getWorkingDirectory(), CACHEDIR_NAME);
    _aggregator = aggregator;
    // Make sure the cache directory exists
    if (!_directory.exists()) {
        if (!_directory.mkdirs()) {
            throw new IOException(MessageFormat.format(Messages.CacheManagerImpl_0,
                    new Object[] { _directory.getAbsoluteFile() }));
        }
    }
    // Attempt to de-serialize the cache from disk
    CacheImpl cache = null;
    try {
        File file = new File(_directory, CACHE_META_FILENAME);
        ObjectInputStream is = new ObjectInputStream(new FileInputStream(file));
        try {
            cache = (CacheImpl) is.readObject();
        } finally {
            try {
                is.close();
            } catch (Exception ignore) {
            }
        }
    } catch (FileNotFoundException e) {
        if (log.isLoggable(Level.INFO))
            log.log(Level.INFO, Messages.CacheManagerImpl_1);
    } catch (InvalidClassException e) {
        if (log.isLoggable(Level.INFO))
            log.log(Level.INFO, Messages.CacheManagerImpl_2);
        // one or more of the serializable classes has changed.  Delete the stale
        // cache files
    } catch (Exception e) {
        if (log.isLoggable(Level.SEVERE))
            log.log(Level.SEVERE, e.getMessage(), e);
    }
    if (cache != null) {
        _control = (CacheControl) cache.getControlObj();
    }
    if (_control != null) {
        // stamp == 0 means no overrides.  Need to check for this explicitly
        // in case the overrides directory has been removed.
        if (stamp == 0 && _control.initStamp == 0 || stamp != 0 && stamp <= _control.initStamp) {
            // Use AggregatorProxy so that getCacheManager will return non-null
            // if called from within setAggregator.  Need to do this because
            // IAggregator.getCacheManager() is unable to return this object
            // since it is still being constructed.
            cache.setAggregator(AggregatorProxy.newInstance(_aggregator, this));
            _cache.set(cache);
        }
    } else {
        _control = new CacheControl();
        _control.initStamp = stamp;
    }

    // Start up the periodic serializer task.  Serializes the cache every 10 minutes.
    // This is done so that we can recover from an unexpected shutdown
    aggregator.getExecutors().getScheduledExecutor().scheduleAtFixedRate(new Runnable() {
        public void run() {
            try {
                File file = new File(_directory, CACHE_META_FILENAME);
                // Synchronize on the cache object to keep the scheduled cache sync thread and
                // the thread processing servlet destroy from colliding.
                synchronized (cacheSerializerSyncObj) {
                    ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(file));
                    try {
                        os.writeObject(_cache.get());
                    } finally {
                        try {
                            os.close();
                        } catch (Exception ignore) {
                        }
                    }
                }
            } catch (Exception e) {
                if (log.isLoggable(Level.SEVERE))
                    log.log(Level.SEVERE, e.getMessage(), e);
            }
        }
    }, 10, 10, TimeUnit.MINUTES);

    Dictionary<String, String> dict;

    if (_aggregator.getPlatformServices() != null) {
        dict = new Hashtable<String, String>();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _serviceRegistrations.add(_aggregator.getPlatformServices()
                .registerService(IShutdownListener.class.getName(), this, dict));

        dict = new Hashtable<String, String>();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _serviceRegistrations.add(
                _aggregator.getPlatformServices().registerService(IConfigListener.class.getName(), this, dict));

        dict = new Hashtable<String, String>();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _serviceRegistrations.add(_aggregator.getPlatformServices()
                .registerService(IDependenciesListener.class.getName(), this, dict));

        dict = new Hashtable<String, String>();
        dict.put("name", aggregator.getName()); //$NON-NLS-1$
        _serviceRegistrations.add(_aggregator.getPlatformServices()
                .registerService(IOptionsListener.class.getName(), this, dict));
    }

    optionsUpdated(aggregator.getOptions(), 1);
    configLoaded(aggregator.getConfig(), 1);
    dependenciesLoaded(aggregator.getDependencies(), 1);

    // Now invoke the listeners for objects that have already been initialized
    IOptions options = _aggregator.getOptions();
    if (options != null) {
        optionsUpdated(options, 1);
    }
    IConfig config = _aggregator.getConfig();
    if (config != null) {
        configLoaded(config, 1);
    }

    IDependencies deps = _aggregator.getDependencies();
    if (deps != null) {
        dependenciesLoaded(deps, 1);
    }
}

From source file:com.spotify.heroic.cluster.CoreClusterManager.java

AsyncFuture<Void> start() {
    final AsyncFuture<Void> startup;

    if (!options.isOneshot()) {
        startup = context.startedFuture().directTransform(result -> {
            scheduler.periodically("cluster-refresh", 1, TimeUnit.MINUTES, () -> refresh().get());

            return null;
        });//from   w  w  w. j a va2 s  .  com
    } else {
        startup = context.startedFuture();
    }

    startup.lazyTransform(result -> refresh().catchFailed((Throwable e) -> {
        log.error("initial metadata refresh failed", e);
        return null;
    }));

    return async.resolved();
}

From source file:net.kmycode.javaspeechserver.cloud.StreamingRecognizeClient.java

/** Send streaming recognize requests to server. */
public void recognize() throws InterruptedException, IOException {
    final AudioRecorder recorder = AudioRecorder.getDefault();
    final StopWatch stopwatch = new StopWatch();

    final CountDownLatch finishLatch = new CountDownLatch(1);
    StreamObserver<StreamingRecognizeResponse> responseObserver = new StreamObserver<StreamingRecognizeResponse>() {
        private int sentenceLength = 1;

        /**//from  w ww .  java2  s .  c  o  m
        * Prints the transcription results. Interim results are overwritten by subsequent
        * results, until a final one is returned, at which point we start a new line.
        *
        * Flags the program to exit when it hears "exit".
        */
        @Override
        public void onNext(StreamingRecognizeResponse response) {

            byteStringQueue.clear();
            stopwatch.reset();

            List<StreamingRecognitionResult> results = response.getResultsList();
            if (results.size() < 1) {
                return;
            }

            StreamingRecognitionResult result = results.get(0);
            String transcript = result.getAlternatives(0).getTranscript();

            // Print interim results with a line feed, so subsequent transcriptions will overwrite
            // it. Final result will print a newline.
            String format = "%-" + this.sentenceLength + 's';
            format += " (" + result.getAlternatives(0).getConfidence() + ") ";
            if (result.getIsFinal()) {
                format += '\n';
                this.sentenceLength = 1;
                finishLatch.countDown();
            } else {
                format += '\r';
                this.sentenceLength = transcript.length();
            }
            System.out.print(String.format(format, transcript));
        }

        @Override
        public void onError(Throwable error) {
            logger.log(Level.ERROR, "recognize failed: {0}", error);
            finishLatch.countDown();
        }

        @Override
        public void onCompleted() {
            logger.info("recognize completed.");
            finishLatch.countDown();
        }
    };

    this.requestObserver = this.speechClient.streamingRecognize(responseObserver);
    try {
        // Build and send a StreamingRecognizeRequest containing the parameters for
        // processing the audio.
        RecognitionConfig config = RecognitionConfig.newBuilder()
                .setEncoding(RecognitionConfig.AudioEncoding.LINEAR16).setSampleRate(recorder.getSamplingRate())
                .setLanguageCode("ja-JP").build();
        StreamingRecognitionConfig streamingConfig = StreamingRecognitionConfig.newBuilder().setConfig(config)
                .setInterimResults(true).setSingleUtterance(false).build();

        StreamingRecognizeRequest initial = StreamingRecognizeRequest.newBuilder()
                .setStreamingConfig(streamingConfig).build();
        requestObserver.onNext(initial);

        while (this.byteStringQueue.size() > 0) {
            ByteString data = this.byteStringQueue.poll();
            this.request(data);
        }

        // Read and send sequential buffers of audio as additional RecognizeRequests.
        while (finishLatch.getCount() > 0 && recorder.read()) {
            if (recorder.isSound()) {
                ByteString data = this.recorder.getBufferAsByteString();
                this.byteStringQueue.add(data);

                if (!stopwatch.isStarted()) {
                    stopwatch.start();
                } else if (stopwatch.getTime() > 2000) {
                    this.byteStringQueue.clear();
                    break;
                }

                this.request(data);
            } else {
                this.notSoundCount++;
                if (this.notSoundCount >= 3) {
                    // stop recognizition
                    break;
                }
            }
        }
    } catch (RuntimeException e) {
        // Cancel RPC.
        requestObserver.onError(e);
        throw e;
    }
    // Mark the end of requests.
    requestObserver.onCompleted();

    // Receiving happens asynchronously.
    finishLatch.await(1, TimeUnit.MINUTES);
}

From source file:com.stimulus.archiva.index.VolumeIndex.java

public void indexMessage(Email message) throws MessageSearchException {

    long s = (new Date()).getTime();
    if (message == null)
        throw new MessageSearchException("assertion failure: null message", logger);
    logger.debug("indexing message {" + message + "}");

    Document doc = new Document();
    try {//from www.j a  va2 s .co m

        DocumentIndex docIndex = new DocumentIndex(indexer);
        String language = doc.get("lang");
        if (language == null)
            language = indexer.getIndexLanguage();
        IndexInfo indexInfo = new IndexInfo();

        docIndex.write(message, doc, indexInfo);
        try {
            boolean isLocked = indexLock.tryLock(10, TimeUnit.MINUTES);
            if (!isLocked) {
                throw new MessageSearchException(
                        "failed to write email to index. could not acquire lock on index.", logger);
            }
            openIndex();
            writer.addDocument(doc, AnalyzerFactory.getAnalyzer(language, AnalyzerFactory.Operation.INDEX));
        } catch (InterruptedException ie) {
            throw new MessageSearchException("failed to write email to index. interrupted.", logger);
        } finally {
            indexLock.unlock();
            indexInfo.cleanup();
        }
        doc = null;

        logger.debug("message indexed successfully {" + message + ",language='" + language + "'}");
    } catch (MessagingException me) {
        throw new MessageSearchException("failed to decode message during indexing", me, logger,
                ChainedException.Level.DEBUG);
    } catch (IOException me) {
        throw new MessageSearchException("failed to index message {" + message + "}", me, logger,
                ChainedException.Level.DEBUG);
    } catch (ExtractionException ee) {
        // we will want to continue indexing
        //throw new MessageSearchException("failed to decode attachments in message {"+message+"}",ee,logger, ChainedException.Level.DEBUG);
    } catch (AlreadyClosedException ace) {
        indexMessage(message);
    } catch (Throwable e) {
        throw new MessageSearchException("failed to index message", e, logger, ChainedException.Level.DEBUG);
    }
    logger.debug("indexing message end {" + message + "}");

    long e = (new Date()).getTime();
    logger.debug("indexing time {time='" + (e - s) + "'}");
}