Example usage for java.sql Timestamp getTime

List of usage examples for java.sql Timestamp getTime

Introduction

In this page you can find the example usage for java.sql Timestamp getTime.

Prototype

public long getTime() 

Source Link

Document

Returns the number of milliseconds since January 1, 1970, 00:00:00 GMT represented by this Timestamp object.

Usage

From source file:com.emc.smartcomm.UregApplication.java

/**
 * @param register A SmartRegister instance 
 *//*from w  w w  . j a  v a 2  s  .  c  om*/
public void uregApplication(SmartRegister register) {

    logger.info("Processing UREG Tranasctional Flow:");
    GrsiRequest greq = PopulateData.setGrsiRequest();
    GrsiResponse grsiResponse = PopulateData.setGrsiResponse();
    BabProtocolRequest breq = PopulateData.setBabRequest();
    BabProtocolResponse bres = PopulateData.setBabResponse();
    ValidateRegRequest vreg = PopulateData.setValidateRegRequest();
    ValidateRegResponse vres = PopulateData.setValidateRegResponse();
    greq.setSvcFlag(register.getChannel());
    String msg = register.toString();
    String path = "/appl/LogTransaction.txt";

    Timestamp txStartTime = PrepareLog.getCurrentTimeStamp();
    StopWatch sw = new StopWatch("UREG Transaction");
    sw.start("UREG Transaction");
    logger.debug("UREG Transaction Initiated:{}", txStartTime);
    StopWatch sw0 = new StopWatch("UREG REQUEST");
    sw0.start("UREG REQUEST");
    uregTemplate.convertAndSend(msg);
    sw0.stop();
    logger.debug(sw0.prettyPrint());
    logger.debug(sw0.shortSummary());

    StopWatch sw1 = new StopWatch("GRSI Request");
    sw1.start("GRSI Request");
    grsiTemplate.convertAndSend(greq);
    sw1.stop();
    logger.debug(sw1.prettyPrint());
    logger.debug(sw1.shortSummary());

    if ("PVS".equals(grsiResponse.getsx12())) // || "BAB".equals(grsiResponse.getsx13()))
    {
        StopWatch sw2 = new StopWatch("Validate Request:");
        sw2.start("Validate Request:");
        String validateRegText = vreg.toString();
        validateRegTemplate.convertAndSend(validateRegText);
        sw2.stop();
        logger.debug(sw2.prettyPrint());
        logger.debug(sw2.shortSummary());
    }

    if ("PPC".equals(grsiResponse.getsx03())) {

        StopWatch sw3 = new StopWatch("BAB Request");
        sw3.start("BAB Request:");
        babTemplate.convertAndSend("bab.Request", breq.toString());
        sw3.stop();
        logger.debug(sw3.prettyPrint());
        logger.debug(sw3.shortSummary());
    }

    grsiResponse.setsx03("NSN");
    if ("NSN".equals(grsiResponse.getsx03())) {

        InputStream is = getClass().getResourceAsStream("/mock/SOAPProtocolRecharge.txt");
        String message = FileReader.readFile(is);
        StopWatch sw4 = new StopWatch("SOAP Recharge Request: ");
        sw4.start("SOAP Recharge Request:");
        soapRechargeTemplate.convertAndSend(message);
        sw4.stop();
        logger.debug(sw4.prettyPrint());
        logger.debug(sw4.shortSummary());

    }

    Timestamp txEndTime = PrepareLog.getCurrentTimeStamp();
    logger.debug("Persisting Transaction log in gemxd and oracle");
    LogTransaction logTransaction = PrepareLog.prepareLog(greq, grsiResponse, breq, bres, vreg, vres);
    logTransaction.setTxnStartTime(txStartTime);
    logTransaction.setTxnEndTime(txEndTime);
    StopWatch sw5 = new StopWatch("Transaction Persistence: ");
    sw5.start("Transaction Persistence:");
    logTransactionService.logTransaction(logTransaction);
    sw5.stop();
    logger.debug(sw5.prettyPrint());
    logger.debug(sw5.shortSummary());
    ExternalFileWriter.writeToFile(path, PopulateData.populateLog());

    sw.stop();
    logger.debug(sw.prettyPrint());
    logger.debug(sw.shortSummary());
    logger.debug("UREG Transaction is Completed:{}", txEndTime);
    logger.info("UREG Transaction TimeSpan:{}", (txEndTime.getTime() - txStartTime.getTime()));

}

From source file:com.jkoolcloud.tnt4j.core.UsecTimestamp.java

/**
 * @see #UsecTimestamp(Timestamp, long)/*from   w  w w .j a va  2s  . c om*/
 */
private void initFromTimestamp(Timestamp timestamp, long usecs) {
    if (timestamp == null)
        throw new NullPointerException("timestamp must be non-null");
    if (usecs < 0 || usecs > 999999)
        throw new IllegalArgumentException("usecs must be in the range [0,999999], inclusive");

    this.msecs = timestamp.getTime();
    if (usecs > 999) {
        // extract milliseconds portion from usecs and add to msecs
        long msecs = usecs / 1000;
        this.msecs += msecs;
        usecs -= msecs * 1000;
    }
    this.usecs = usecs;
}

From source file:uk.ac.soton.itinnovation.sad.service.controllers.JobsController.java

/**
 * Returns data for job with job ID, sample url mapping: /jobs/1/data
 *
 * @param jobId/*from   www .  ja  va2  s  . co m*/
 * @return SAD job data as JSON
 */
@RequestMapping(method = RequestMethod.GET, value = "/{jobId}/data")
@ResponseBody
public JsonResponse getDataForJobWithId(@PathVariable String jobId) {

    logger.debug("Returning data for job with ID: " + jobId);

    schedulingService.pushMethodCalledName("/jobs/" + jobId + "/data");

    long startTime = System.currentTimeMillis();

    try {

        JSONObject response = new JSONObject();
        JSONObject series = new JSONObject();

        SADJob theJob = schedulingService.getJob(jobId);
        ArrayList<SADJobData> jobData = schedulingService.getDataForJobWithId(jobId);
        logger.debug("Found: " + jobData.size() + " job data entries");

        // return data as is
        JSONObject dataEntryAsJson, rawJsonData;
        Timestamp tempTimestamp;
        int counter = 0;
        for (SADJobData dataEntry : jobData) {
            dataEntryAsJson = new JSONObject();

            rawJsonData = JSONObject.fromObject(dataEntry.getJsonData());
            dataEntryAsJson.put("jsonData", rawJsonData);
            dataEntryAsJson.put("type", dataEntry.getDataType());

            tempTimestamp = dataEntry.getWhenCollected();
            if (tempTimestamp == null) {
                dataEntryAsJson.put("WhenCreated_as_string", "");
                dataEntryAsJson.put("WhenCreated_in_msec", "");
            } else {
                dataEntryAsJson.put("WhenCreated_as_string", Util.timestampToString(tempTimestamp));
                dataEntryAsJson.put("WhenCreated_in_msec", tempTimestamp.getTime());
            }
            series.put(counter, dataEntryAsJson);
            counter++;
        }
        response.put("series", series);

        response.put("num", jobData.size());
        response.put("jobstatus", theJob.getStatus());

        return new JsonResponse("ok", response);

    } catch (Throwable ex) {
        return new JsonResponse("error",
                Util.dealWithException("Failed to return data for job with requested ID", ex, logger));
    } finally {
        schedulingService.pushTimeSpent(Long.toString(System.currentTimeMillis() - startTime));
    }
}

From source file:org.kuali.kfs.module.tem.service.impl.PerDiemServiceImpl.java

/**
 * Uses travelDocumentDao to look up per diem records (TravelDocumentDao handles the the date wierdnesses of per diem date)
 * @see org.kuali.kfs.module.tem.service.PerDiemService#getPerDiem(int, java.sql.Date, java.sql.Date)
 *///from   ww  w.j  a v a 2s.c om
@Override
public PerDiem getPerDiem(int primaryDestinationId, java.sql.Timestamp perDiemDate,
        java.sql.Date effectiveDate) {
    final List<PerDiem> possiblePerDiems = getTravelDocumentDao().findEffectivePerDiems(primaryDestinationId,
            effectiveDate);
    Date date = KfsDateUtils.clearTimeFields(new Date(perDiemDate.getTime()));

    if (possiblePerDiems.isEmpty()) {
        return null;
    }
    if (possiblePerDiems.size() == 1) {
        return possiblePerDiems.get(0);
    }

    Collections.sort(possiblePerDiems, new PerDiemComparator());
    PerDiem foundPerDiem = null;
    for (PerDiem perDiem : possiblePerDiems) {
        if (isOnOrAfterSeasonBegin(perDiem.getSeasonBeginMonthAndDay(), perDiemDate)) {
            foundPerDiem = perDiem;
        }
    }
    if (foundPerDiem == null) {
        // no found per diem, so let's take the *last* one of the list (because years are circular and so the last of the list represents the beginning of the year; see KFSTP-926 for a discussion of this)
        foundPerDiem = possiblePerDiems.get(possiblePerDiems.size() - 1);
    }

    return foundPerDiem;
}

From source file:org.eevolution.form.VSCRP.java

public CategoryDataset createWeightDataset(Timestamp start, MResource r) {

    GregorianCalendar gc1 = new GregorianCalendar();
    gc1.setTimeInMillis(start.getTime());
    gc1.clear(Calendar.MILLISECOND);
    gc1.clear(Calendar.SECOND);/*from   www  .  j ava2  s.  c o m*/
    gc1.clear(Calendar.MINUTE);
    gc1.clear(Calendar.HOUR_OF_DAY);

    String namecapacity = Msg.translate(Env.getCtx(), "Capacity");
    String nameload = Msg.translate(Env.getCtx(), "Load");
    String namesummary = Msg.translate(Env.getCtx(), "Summary");
    String namepossiblecapacity = "Possible Capacity";

    MResourceType t = new MResourceType(Env.getCtx(), r.getS_ResourceType_ID(), null);

    DefaultCategoryDataset dataset = new DefaultCategoryDataset();

    double currentweight = DB.getSQLValue(null,
            "Select SUM( (mo.qtyordered-mo.qtydelivered)*(Select mp.weight From m_product mp Where mo.m_product_id=mp.m_product_id )  )From mpc_order mo Where ad_client_id=?",
            r.getAD_Client_ID());
    // fjviejo e-evolution machineqty capacidad por el numero de maquinas
    // double dailyCapacity = DB.getSQLValue(null,"Select dailycapacity From s_resource Where s_resource_id=?",r.getS_Resource_ID());
    double dailyCapacity = DB.getSQLValue(null,
            "Select dailycapacity*MachineQty From s_resource Where s_resource_id=?", r.getS_Resource_ID());
    System.out.println("***** Capacidad diaria " + dailyCapacity);
    // e-evolution end
    double utilization = DB.getSQLValue(null,
            "Select percentutillization From s_resource Where s_resource_id=?", r.getS_Resource_ID());

    double summary = 0;

    int day = 0;
    while (day < 32) {

        day++;

        switch (gc1.get(Calendar.DAY_OF_WEEK)) {

        case Calendar.SUNDAY:

            if (t.isOnSunday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.MONDAY:

            if (t.isOnMonday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.TUESDAY:

            if (t.isOnTuesday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.WEDNESDAY:

            if (t.isOnWednesday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.THURSDAY:

            if (t.isOnThursday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.FRIDAY:

            if (t.isOnFriday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;

        case Calendar.SATURDAY:

            if (t.isOnSaturday()) {

                currentweight -= (dailyCapacity * utilization) / 100;
                summary += ((dailyCapacity * utilization) / 100);

                dataset.addValue(dailyCapacity, namepossiblecapacity, new Integer(day));
                dataset.addValue((dailyCapacity * utilization) / 100, namecapacity, new Integer(day));
            } else {

                dataset.addValue(0, namepossiblecapacity, new Integer(day));
                dataset.addValue(0, namecapacity, new Integer(day));
            }

            break;
        }

        dataset.addValue(currentweight, nameload, new Integer(day));
        dataset.addValue(summary, namesummary, new Integer(day));

        gc1.add(Calendar.DATE, 1);
    }

    return dataset;
}

From source file:org.kuali.rice.kew.impl.document.lookup.DocumentLookupGeneratorImpl.java

protected DocumentLookupResult.Builder processRow(DocumentLookupCriteria criteria,
        Statement searchAttributeStatement, ResultSet rs) throws SQLException {

    String documentId = rs.getString("DOC_HDR_ID");
    String initiatorPrincipalId = rs.getString("INITR_PRNCPL_ID");
    String documentTypeName = rs.getString("DOC_TYP_NM");
    org.kuali.rice.kew.api.doctype.DocumentType documentType = KewApiServiceLocator.getDocumentTypeService()
            .getDocumentTypeByName(documentTypeName);
    if (documentType == null) {
        throw new IllegalStateException(
                "Failed to locate a document type with the given name: " + documentTypeName);
    }//ww w.  j  a  v a  2s.c  o  m
    String documentTypeId = documentType.getId();

    Document.Builder documentBuilder = Document.Builder.create(documentId, initiatorPrincipalId,
            documentTypeName, documentTypeId);
    DocumentLookupResult.Builder resultBuilder = DocumentLookupResult.Builder.create(documentBuilder);

    String statusCode = rs.getString("DOC_HDR_STAT_CD");
    Timestamp createTimestamp = rs.getTimestamp("CRTE_DT");
    String title = rs.getString("TTL");
    String applicationDocumentStatus = rs.getString("APP_DOC_STAT");

    documentBuilder.setStatus(DocumentStatus.fromCode(statusCode));
    documentBuilder.setDateCreated(new DateTime(createTimestamp.getTime()));
    documentBuilder.setTitle(title);
    documentBuilder.setApplicationDocumentStatus(applicationDocumentStatus);

    // TODO - Rice 2.0 - should probably set as many properties on the document as we can

    if (isUsingAtLeastOneSearchAttribute(criteria)) {
        populateDocumentAttributesValues(resultBuilder, searchAttributeStatement);
    }

    return resultBuilder;
}

From source file:org.dcache.chimera.PgSQLFsSqlDriver.java

@Override
protected FsInode createInodeInParent(FsInode parent, String name, String id, int owner, int group, int mode,
        int type, int nlink, long size) {
    Timestamp now = new Timestamp(System.currentTimeMillis());

    Long inumber = _jdbc.query("SELECT f_create_inode(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", cs -> {
        cs.setLong(1, parent.ino());//from   ww w .j  a va2 s  .co  m
        cs.setString(2, name);
        cs.setString(3, id);
        cs.setInt(4, type);
        cs.setInt(5, mode & UnixPermission.S_PERMS);
        cs.setInt(6, nlink);
        cs.setInt(7, owner);
        cs.setInt(8, group);
        cs.setLong(9, size);
        cs.setInt(10, _ioMode);
        cs.setTimestamp(11, now);
    }, rs -> rs.next() ? rs.getLong(1) : null);
    if (inumber == null) {
        throw new IncorrectUpdateSemanticsDataAccessException("f_create_inode failed to return an inumber.");
    }

    Stat stat = new Stat();
    stat.setIno(inumber);
    stat.setId(id);
    stat.setCrTime(now.getTime());
    stat.setGeneration(0);
    stat.setSize(size);
    stat.setATime(now.getTime());
    stat.setCTime(now.getTime());
    stat.setMTime(now.getTime());
    stat.setUid(owner);
    stat.setGid(group);
    stat.setMode(mode & UnixPermission.S_PERMS | type);
    stat.setNlink(nlink);
    stat.setDev(17);
    stat.setRdev(13);

    return new FsInode(parent.getFs(), inumber, FsInodeType.INODE, 0, stat);
}

From source file:DateUtils.java

/**
 * Copy constructor which will create exact copy of the timestamp including
 * the nanosecond portion.//from w  w  w.  j  a  v a 2s . c om
 * 
 * @param original - original timestamp to copy
 */
public TimestampCopy(Timestamp original) {
    // Pass the time portion here
    super(original.getTime());
    // And now set the correct nanoseconds since it is required.
    setNanos(original.getNanos());
}

From source file:uk.ac.soton.itinnovation.sad.service.controllers.ExecutionsController.java

/**
 * Returns all executions on the service, url mapping: /executions
 *
 * @return all executions as JSON/*from w ww  .j a  v  a  2 s  .c  o m*/
 */
@RequestMapping(method = RequestMethod.GET)
@ResponseBody
public JsonResponse getExecutions() {

    logger.debug("Returning all executions");

    schedulingService.pushMethodCalledName("executions");

    long startTime = System.currentTimeMillis();

    try {
        JSONObject response = new JSONObject();

        ArrayList<SADJobExecution> allExecutions = schedulingService.getExecutions();

        if (allExecutions.isEmpty()) {
            logger.debug("No executions were found.");
            response.put("message", "No executions were found.");
            return new JsonResponse("error", response);
        } else {
            int allExecutionsSize = allExecutions.size();
            response.put("num", allExecutionsSize);
            JSONArray allExecutionsAsJsonArray = new JSONArray();

            JSONObject executionAsJson;
            Timestamp tempTimestamp;
            for (SADJobExecution execution : allExecutions) {
                executionAsJson = new JSONObject();

                executionAsJson.put("DatabaseId", execution.getCountID());
                executionAsJson.put("ID", execution.getId());
                executionAsJson.put("SADJobID", execution.getSADJobID());
                executionAsJson.put("Description", execution.getDescription());
                executionAsJson.put("Status", execution.getStatus());

                tempTimestamp = execution.getWhenStarted();
                if (tempTimestamp == null) {
                    executionAsJson.put("WhenStarted_as_string", "");
                    executionAsJson.put("WhenStarted_in_msec", "");
                } else {
                    executionAsJson.put("WhenStarted_as_string", tempTimestamp.toString());
                    executionAsJson.put("WhenStarted_in_msec", tempTimestamp.getTime());
                }

                tempTimestamp = execution.getWhenFinished();
                if (tempTimestamp == null) {
                    executionAsJson.put("WhenFinished_as_string", "");
                    executionAsJson.put("WhenFinished_in_msec", "");
                } else {
                    executionAsJson.put("WhenFinished_as_string", tempTimestamp.toString());
                    executionAsJson.put("WhenFinished_in_msec", tempTimestamp.getTime());
                }

                allExecutionsAsJsonArray.add(executionAsJson);
            }

            if (allExecutionsSize < 2) {
                logger.debug("Returning " + allExecutions.size() + " execution");
            } else {
                logger.debug("Returning " + allExecutions.size() + " executions");
            }
            response.put("list", allExecutionsAsJsonArray);
            return new JsonResponse("ok", response);
        }
    } catch (Throwable ex) {
        return new JsonResponse("error", Util.dealWithException("Failed to return all executions", ex, logger));
    } finally {
        schedulingService.pushTimeSpent(Long.toString(System.currentTimeMillis() - startTime));
    }
}

From source file:fr.ericlab.sondy.core.DataManipulation.java

public void prepareStream(String datasetName, int intervalDuration, int ngram, String stemLanguage,
        boolean lemmatization, AppVariables appVariables) {
    try {/*from  w w w.j  a v a  2  s.  c o  m*/
        Connection connection;
        Class.forName("com.mysql.jdbc.Driver").newInstance();
        connection = DriverManager.getConnection("jdbc:mysql://" + appVariables.configuration.getHost(),
                appVariables.configuration.getUsername(), appVariables.configuration.getPassword());
        Statement statement = connection.createStatement();
        Statement statement2 = connection.createStatement();

        String lemStr = (lemmatization) ? "_lem1" : "_lem0";
        statement.executeUpdate("CREATE TABLE " + appVariables.configuration.getSchema() + "." + datasetName
                + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram ( id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, msg_author VARCHAR(100), msg_post_time TIMESTAMP, msg_text VARCHAR(600), time_slice INT)ENGINE=myisam;");
        //            statement.executeUpdate("CREATE INDEX index_time ON "+appVariables.configuration.getSchema()+"."+datasetName+"_messages (msg_post_time)");

        ResultSet rsTMin = statement.executeQuery("select min(msg_post_time) from "
                + appVariables.configuration.getSchema() + "." + datasetName + "_messages;");
        rsTMin.next();
        Timestamp tMin = rsTMin.getTimestamp(1);
        ResultSet rsTMax = statement.executeQuery("select max(msg_post_time) from "
                + appVariables.configuration.getSchema() + "." + datasetName + "_messages;");
        rsTMax.next();
        Timestamp tMax = rsTMax.getTimestamp(1);
        Timestamp tRef = new Timestamp(0);
        long base = (tMin.getTime() - tRef.getTime()) * 1L;
        long streamDuration = (tMax.getTime() - tMin.getTime()) * 1L;
        long streamDurationMin = (streamDuration / 1000) / 60;

        String path = appVariables.configuration.getWorkspace() + "/datasets/" + datasetName + "/"
                + intervalDuration + "min-" + stemLanguage;
        path += (lemmatization) ? "-lem1" : "-lem0";
        path += "-" + ngram + "gram";
        String pathMention = path + "-m";

        FSDirectory indexGlobal = FSDirectory.open(new File(path));
        FSDirectory indexMention = FSDirectory.open(new File(pathMention));
        Analyzer analyzer;
        Properties props = new Properties();
        props.put("annotators", "tokenize,ssplit,parse,lemma");
        StanfordCoreNLP pipeline = new StanfordCoreNLP(props);
        Annotation annotation;
        if (stemLanguage.equalsIgnoreCase("Standard")) {
            analyzer = new StandardAnalyzer(Version.LUCENE_36);
        } else {
            Class cl;
            if (stemLanguage.equals("Chinese")) {
                analyzer = new SmartChineseAnalyzer(Version.LUCENE_36);
            } else {
                String packageName = stemLanguage.substring(0, 2).toLowerCase();
                cl = Class
                        .forName("org.apache.lucene.analysis." + packageName + "." + stemLanguage + "Analyzer");
                Class[] types = new Class[] { Version.class, Set.class };
                Constructor ct = cl.getConstructor(types);
                analyzer = (Analyzer) ct.newInstance(Version.LUCENE_36, appVariables.currentStopWords.getSet());
            }
        }
        IndexWriterConfig configGlobal;
        IndexWriterConfig configMention;
        ShingleAnalyzerWrapper shingleAnalyzer = null;
        if (ngram > 1) {
            shingleAnalyzer = new ShingleAnalyzerWrapper(analyzer, ngram, ngram, " ", false, false);
            WhitespaceAnalyzer whitespaceAnalyzer = new WhitespaceAnalyzer(Version.LUCENE_36);
            configGlobal = new IndexWriterConfig(Version.LUCENE_36, whitespaceAnalyzer);
            configMention = new IndexWriterConfig(Version.LUCENE_36, whitespaceAnalyzer);
        } else {
            configGlobal = new IndexWriterConfig(Version.LUCENE_36, analyzer);
            configMention = new IndexWriterConfig(Version.LUCENE_36, analyzer);
        }
        IndexWriter wGlobal = new IndexWriter(indexGlobal, configGlobal);
        IndexWriter wMention = new IndexWriter(indexMention, configMention);

        int docId = 0;
        for (int i = 0; i < streamDurationMin; i += intervalDuration) {
            statement = connection.createStatement();
            long infBound = base + i * 60 * 1000L;
            long supBound = base + (i + intervalDuration) * 60 * 1000L;
            Timestamp infTime = new Timestamp(infBound);
            Timestamp supTime = new Timestamp(supBound);
            ResultSet rs = statement.executeQuery("SELECT msg_text, msg_post_time, msg_author FROM "
                    + appVariables.configuration.getSchema() + "." + datasetName
                    + "_messages WHERE msg_post_time>'" + infTime + "' AND msg_post_time< '" + supTime + "'");
            String globalContent = new String();
            String mentionContent = new String();
            String timestamps = new String();
            NumberFormat formatter = new DecimalFormat("00000000");
            int bulk = 0;
            String bulkString = "";
            boolean mention;
            while (rs.next()) {
                String message = rs.getString(1).toLowerCase();
                mention = message.contains("@");
                if (lemmatization) {
                    annotation = new Annotation(message);
                    message = "";
                    pipeline.annotate(annotation);
                    List<CoreMap> lem = annotation.get(SentencesAnnotation.class);
                    for (CoreMap l : lem) {
                        for (CoreLabel token : l.get(TokensAnnotation.class)) {
                            message += token.get(LemmaAnnotation.class) + " ";
                        }
                    }
                }
                if (ngram > 1) {
                    String processedMessage = "";
                    TokenStream tokenStream = shingleAnalyzer.tokenStream("text", new StringReader(message));
                    CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
                    while (tokenStream.incrementToken()) {
                        String termToken = charTermAttribute.toString();
                        if (!termToken.contains("_")) {
                            processedMessage += termToken.replace(" ", "=") + " ";
                        }
                    }
                    message = processedMessage;
                }
                bulk++;
                if (bulk < _BULK_SIZE_) {
                    bulkString += " (" + docId + ",'" + rs.getString(2) + "',\"" + message + "\",\""
                            + rs.getString(3) + "\"),";
                } else {
                    bulk = 0;
                    bulkString += " (" + docId + ",'" + rs.getString(2) + "',\"" + message + "\",\""
                            + rs.getString(3) + "\");";
                    statement2.executeUpdate("INSERT INTO " + appVariables.configuration.getSchema() + "."
                            + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_"
                            + ngram + "gram (time_slice,msg_post_time,msg_text,msg_author) VALUES"
                            + bulkString);
                    bulkString = "";
                }
                globalContent += message + "\n";
                if (mention) {
                    mentionContent += message + "\n";
                }
                timestamps += rs.getString(2) + "\n";
            }
            if (bulk > 0 && bulkString.length() > 0) {
                statement2.executeUpdate("INSERT INTO " + appVariables.configuration.getSchema() + "."
                        + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                        + "gram (time_slice,msg_post_time,msg_text,msg_author) VALUES"
                        + bulkString.substring(0, bulkString.length() - 1) + ";");
            }
            Document docGlobal = new Document();
            docGlobal.add(new Field("content", globalContent, Field.Store.YES, Field.Index.ANALYZED,
                    Field.TermVector.YES));
            docGlobal.add(new Field("id", Integer.toString(docId), Field.Store.YES, Field.Index.NOT_ANALYZED));
            wGlobal.addDocument(docGlobal);
            wGlobal.commit();
            Document docMention = new Document();
            docMention.add(new Field("content", mentionContent, Field.Store.YES, Field.Index.ANALYZED,
                    Field.TermVector.YES));
            docMention.add(new Field("id", Integer.toString(docId), Field.Store.YES, Field.Index.NOT_ANALYZED));
            wMention.addDocument(docMention);
            wMention.commit();

            File textFile = new File(path + "/input/" + formatter.format(docId) + ".text");
            FileUtils.writeStringToFile(textFile, globalContent);
            File timeFile = new File(path + "/input/" + formatter.format(docId) + ".time");
            FileUtils.writeStringToFile(timeFile, timestamps);

            docId++;
            statement.close();
        }
        statement2.executeUpdate("CREATE INDEX index_time_slice ON " + appVariables.configuration.getSchema()
                + "." + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram (time_slice);");
        statement2.executeUpdate("CREATE FULLTEXT INDEX index_text ON " + appVariables.configuration.getSchema()
                + "." + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram (msg_text);");
        statement2.close();
        connection.close();
        wGlobal.close();
        wMention.close();
    } catch (IOException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    } catch (SQLException | InstantiationException | IllegalAccessException | ClassNotFoundException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    } catch (NoSuchMethodException | SecurityException | IllegalArgumentException
            | InvocationTargetException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    }
}