Example usage for java.text SimpleDateFormat applyPattern

List of usage examples for java.text SimpleDateFormat applyPattern

Introduction

In this page you can find the example usage for java.text SimpleDateFormat applyPattern.

Prototype

public void applyPattern(String pattern) 

Source Link

Document

Applies the given pattern string to this date format.

Usage

From source file:org.sakaiproject.contentreview.compilatio.CompilatioReviewServiceImpl.java

@SuppressWarnings({ "deprecation" })
@Override/*from  w w w .  ja va 2 s  . com*/
public void checkForReports() {
    SimpleDateFormat dform = ((SimpleDateFormat) DateFormat.getDateInstance());
    dform.applyPattern(COMPILATIO_DATETIME_FORMAT);

    log.info("Fetching reports from Compilatio");

    // get the list of all items that are waiting for reports
    List<ContentReviewItem> awaitingReport = crqs.getAwaitingReports(getProviderId());

    Iterator<ContentReviewItem> listIterator = awaitingReport.iterator();
    HashMap<String, Integer> reportTable = new HashMap<>();

    log.debug("There are " + awaitingReport.size() + " submissions awaiting reports");

    int errors = 0;
    int success = 0;
    int inprogress = 0;
    ContentReviewItem currentItem;
    while (listIterator.hasNext()) {
        currentItem = (ContentReviewItem) listIterator.next();

        // has the item reached its next retry time?
        if (currentItem.getNextRetryTime() == null) {
            currentItem.setNextRetryTime(new Date());
        }

        if (currentItem.getNextRetryTime().after(new Date())) {
            // we haven't reached the next retry time
            log.info("checkForReports :: next retry time not yet reached for item: " + currentItem.getId());
            crqs.update(currentItem);
            continue;
        }

        if (currentItem.getRetryCount() == null) {
            currentItem.setRetryCount(Long.valueOf(0));
            currentItem.setNextRetryTime(this.getNextRetryTime(0));
        } else if (currentItem.getRetryCount().intValue() > maxRetry) {
            processError(currentItem,
                    ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_EXCEEDED_CODE, null, null);
            errors++;
            continue;
        } else {
            long l = currentItem.getRetryCount().longValue();
            log.debug("Still have retries left (" + l + " <= " + maxRetry + "), continuing. ItemID: "
                    + currentItem.getId());
            l++;
            currentItem.setRetryCount(Long.valueOf(l));
            currentItem.setNextRetryTime(this.getNextRetryTime(Long.valueOf(l)));
            crqs.update(currentItem);
        }

        //back to analysis (this should not happen)
        if (StringUtils.isBlank(currentItem.getExternalId())) {
            currentItem
                    .setStatus(Long.valueOf(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_CODE));
            crqs.update(currentItem);
            errors++;
            continue;
        }

        if (!reportTable.containsKey(currentItem.getExternalId())) {
            // get the list from compilatio and see if the review is
            // available

            log.debug("Attempting to update hashtable with reports for site " + currentItem.getSiteId());

            Map<String, String> params = CompilatioAPIUtil.packMap("action", "getDocument", "idDocument",
                    currentItem.getExternalId());

            Document document = null;
            try {
                document = compilatioConn.callCompilatioReturnDocument(params);
            } catch (TransientSubmissionException | SubmissionException e) {
                log.warn("Update failed : " + e.toString(), e);
                processError(currentItem, ContentReviewConstants.CONTENT_REVIEW_REPORT_ERROR_RETRY_CODE,
                        e.getMessage(), null);
                errors++;
                continue;
            }

            Element root = document.getDocumentElement();
            if (root.getElementsByTagName("documentStatus").item(0) != null) {
                log.debug("Report list returned successfully");

                NodeList objects = root.getElementsByTagName("documentStatus");
                log.debug(objects.getLength() + " objects in the returned list");

                String status = getNodeValue("status", root);

                if ("ANALYSE_NOT_STARTED".equals(status)) {
                    //send back to the process queue, we need no analyze it again
                    processError(currentItem, ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_CODE,
                            "ANALYSE_NOT_STARTED", null);
                    errors++;
                    continue;
                } else if ("ANALYSE_COMPLETE".equals(status)) {
                    String reportVal = getNodeValue("indice", root);
                    currentItem.setReviewScore((int) Math.round(Double.parseDouble(reportVal)));
                    currentItem
                            .setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMITTED_REPORT_AVAILABLE_CODE);
                    success++;
                } else {
                    String progression = getNodeValue("progression", root);
                    if (StringUtils.isNotBlank(progression)) {
                        currentItem.setReviewScore((int) Double.parseDouble(progression));
                        inprogress++;
                    }
                }
                currentItem.setDateReportReceived(new Date());
                crqs.update(currentItem);
                log.debug("new report received: " + currentItem.getExternalId() + " -> "
                        + currentItem.getReviewScore());

            } else {
                log.debug("Report list request not successful");
                log.debug(document.getTextContent());
                errors++;
            }
        }
    }

    log.info("Finished fetching reports from Compilatio : " + success + " success items, " + inprogress
            + " in progress, " + errors + " errors");
}

From source file:org.pentaho.di.job.entries.mssqlbulkload.JobEntryMssqlBulkLoad.java

public Result execute(Result previousResult, int nr) {
    String TakeFirstNbrLines = "";
    String LineTerminatedby = "";
    String FieldTerminatedby = "";
    boolean useFieldSeparator = false;
    String UseCodepage = "";
    String ErrorfileName = "";

    Result result = previousResult;
    result.setResult(false);/*from  w w  w  .  j a v a  2s  . c o  m*/

    String vfsFilename = environmentSubstitute(filename);
    FileObject fileObject = null;
    // Let's check the filename ...
    if (!Const.isEmpty(vfsFilename)) {
        try {
            // User has specified a file, We can continue ...
            //
            // This is running over VFS but we need a normal file.
            // As such, we're going to verify that it's a local file...
            // We're also going to convert VFS FileObject to File
            //
            fileObject = KettleVFS.getFileObject(vfsFilename, this);
            if (!(fileObject instanceof LocalFile)) {
                // MSSQL BUKL INSERT can only use local files, so that's what we limit ourselves to.
                //
                throw new KettleException(BaseMessages.getString(PKG,
                        "JobMssqlBulkLoad.Error.OnlyLocalFileSupported", vfsFilename));
            }

            // Convert it to a regular platform specific file name
            //
            String realFilename = KettleVFS.getFilename(fileObject);

            // Here we go... back to the regular scheduled program...
            //
            File file = new File(realFilename);
            if (file.exists() && file.canRead()) {
                // User has specified an existing file, We can continue ...
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.FileExists.Label", realFilename));
                }

                if (connection != null) {
                    // User has specified a connection, We can continue ...
                    Database db = new Database(this, connection);

                    if (!(db.getDatabaseMeta().getDatabaseInterface() instanceof MSSQLServerDatabaseMeta)) {
                        logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.DbNotMSSQL",
                                connection.getDatabaseName()));
                        return result;
                    }
                    db.shareVariablesWith(this);
                    try {
                        db.connect(parentJob.getTransactionId(), null);
                        // Get schemaname
                        String realSchemaname = environmentSubstitute(schemaname);
                        // Get tablename
                        String realTablename = environmentSubstitute(tablename);

                        // Add schemaname (Most the time Schemaname.Tablename)
                        if (schemaname != null) {
                            realTablename = realSchemaname + "." + realTablename;
                        }

                        if (db.checkTableExists(realTablename)) {
                            // The table existe, We can continue ...
                            if (log.isDetailed()) {
                                logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.TableExists.Label",
                                        realTablename));
                            }

                            // FIELDTERMINATOR
                            String Fieldterminator = getRealFieldTerminator();
                            if (Const.isEmpty(Fieldterminator)
                                    && (datafiletype.equals("char") || datafiletype.equals("widechar"))) {
                                logError(BaseMessages.getString(PKG,
                                        "JobMssqlBulkLoad.Error.FieldTerminatorMissing"));
                                return result;
                            } else {
                                if (datafiletype.equals("char") || datafiletype.equals("widechar")) {
                                    useFieldSeparator = true;
                                    FieldTerminatedby = "FIELDTERMINATOR='" + Fieldterminator + "'";
                                }
                            }
                            // Check Specific Code page
                            if (codepage.equals("Specific")) {
                                String realCodePage = environmentSubstitute(codepage);
                                if (specificcodepage.length() < 0) {
                                    logError(BaseMessages.getString(PKG,
                                            "JobMssqlBulkLoad.Error.SpecificCodePageMissing"));
                                    return result;

                                } else {
                                    UseCodepage = "CODEPAGE = '" + realCodePage + "'";
                                }
                            } else {
                                UseCodepage = "CODEPAGE = '" + codepage + "'";
                            }

                            // Check Error file
                            String realErrorFile = environmentSubstitute(errorfilename);
                            if (realErrorFile != null) {
                                File errorfile = new File(realErrorFile);
                                if (errorfile.exists() && !adddatetime) {
                                    // The error file is created when the command is executed. An error occurs if the file already
                                    // exists.
                                    logError(BaseMessages.getString(PKG,
                                            "JobMssqlBulkLoad.Error.ErrorFileExists"));
                                    return result;
                                }
                                if (adddatetime) {
                                    // Add date time to filename...
                                    SimpleDateFormat daf = new SimpleDateFormat();
                                    Date now = new Date();
                                    daf.applyPattern("yyyMMdd_HHmmss");
                                    String d = daf.format(now);

                                    ErrorfileName = "ERRORFILE ='" + realErrorFile + "_" + d + "'";
                                } else {
                                    ErrorfileName = "ERRORFILE ='" + realErrorFile + "'";
                                }
                            }

                            // ROWTERMINATOR
                            String Rowterminator = getRealLineterminated();
                            if (!Const.isEmpty(Rowterminator)) {
                                LineTerminatedby = "ROWTERMINATOR='" + Rowterminator + "'";
                            }

                            // Start file at
                            if (startfile > 0) {
                                TakeFirstNbrLines = "FIRSTROW=" + startfile;
                            }

                            // End file at
                            if (endfile > 0) {
                                TakeFirstNbrLines = "LASTROW=" + endfile;
                            }

                            // Truncate table?
                            String SQLBULKLOAD = "";
                            if (truncate) {
                                SQLBULKLOAD = "TRUNCATE TABLE " + realTablename + ";";
                            }

                            // Build BULK Command
                            SQLBULKLOAD = SQLBULKLOAD + "BULK INSERT " + realTablename + " FROM " + "'"
                                    + realFilename.replace('\\', '/') + "'";
                            SQLBULKLOAD = SQLBULKLOAD + " WITH (";
                            if (useFieldSeparator) {
                                SQLBULKLOAD = SQLBULKLOAD + FieldTerminatedby;
                            } else {
                                SQLBULKLOAD = SQLBULKLOAD + "DATAFILETYPE ='" + datafiletype + "'";
                            }

                            if (LineTerminatedby.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + LineTerminatedby;
                            }
                            if (TakeFirstNbrLines.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + TakeFirstNbrLines;
                            }
                            if (UseCodepage.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + UseCodepage;
                            }
                            String realFormatFile = environmentSubstitute(formatfilename);
                            if (realFormatFile != null) {
                                SQLBULKLOAD = SQLBULKLOAD + ", FORMATFILE='" + realFormatFile + "'";
                            }
                            if (firetriggers) {
                                SQLBULKLOAD = SQLBULKLOAD + ",FIRE_TRIGGERS";
                            }
                            if (keepnulls) {
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPNULLS";
                            }
                            if (keepidentity) {
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPIDENTITY";
                            }
                            if (checkconstraints) {
                                SQLBULKLOAD = SQLBULKLOAD + ",CHECK_CONSTRAINTS";
                            }
                            if (tablock) {
                                SQLBULKLOAD = SQLBULKLOAD + ",TABLOCK";
                            }
                            if (orderby != null) {
                                SQLBULKLOAD = SQLBULKLOAD + ",ORDER ( " + orderby + " " + orderdirection + ")";
                            }
                            if (ErrorfileName.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", " + ErrorfileName;
                            }
                            if (maxerrors > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", MAXERRORS=" + maxerrors;
                            }
                            if (batchsize > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", BATCHSIZE=" + batchsize;
                            }
                            if (rowsperbatch > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", ROWS_PER_BATCH=" + rowsperbatch;
                            }
                            // End of Bulk command
                            SQLBULKLOAD = SQLBULKLOAD + ")";

                            try {
                                // Run the SQL
                                db.execStatement(SQLBULKLOAD);

                                // Everything is OK...we can disconnect now
                                db.disconnect();

                                if (isAddFileToResult()) {
                                    // Add filename to output files
                                    ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL,
                                            KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(),
                                            toString());
                                    result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                                }

                                result.setResult(true);
                            } catch (KettleDatabaseException je) {
                                result.setNrErrors(1);
                                logError("An error occurred executing this job entry : " + je.getMessage(), je);
                            } catch (KettleFileException e) {
                                logError("An error occurred executing this job entry : " + e.getMessage(), e);
                                result.setNrErrors(1);
                            } finally {
                                if (db != null) {
                                    db.disconnect();
                                    db = null;
                                }
                            }
                        } else {
                            // Of course, the table should have been created already before the bulk load operation
                            db.disconnect();
                            result.setNrErrors(1);
                            if (log.isDetailed()) {
                                logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.TableNotExists",
                                        realTablename));
                            }
                        }
                    } catch (KettleDatabaseException dbe) {
                        db.disconnect();
                        result.setNrErrors(1);
                        logError("An error occurred executing this entry: " + dbe.getMessage());
                    }
                } else {
                    // No database connection is defined
                    result.setNrErrors(1);
                    logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nodatabase.Label"));
                }
            } else {
                // the file doesn't exist
                result.setNrErrors(1);
                logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FileNotExists", realFilename));
            }
        } catch (Exception e) {
            // An unexpected error occurred
            result.setNrErrors(1);
            logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.UnexpectedError.Label"), e);
        } finally {
            try {
                if (fileObject != null) {
                    fileObject.close();
                }
            } catch (Exception e) {
                // Ignore errors
            }
        }
    } else {
        // No file was specified
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nofilename.Label"));
    }
    return result;
}

From source file:com.mjhram.geodata.GpsMainActivity.java

public void captureAndShareGMap() {
    GoogleMap.SnapshotReadyCallback callback = new GoogleMap.SnapshotReadyCallback() {
        @Override/*from  w  ww.j  a  v a 2 s .c o m*/
        public void onSnapshotReady(Bitmap snapshot1) {
            // TODO Auto-generated method stub
            SimpleDateFormat sdf = new SimpleDateFormat("HH:mm");
            Calendar calendar = Calendar.getInstance();
            calendar.setTimeInMillis(lastMarkerTime);
            String eTime = sdf.format(calendar.getTime());
            calendar.setTimeInMillis(firstMarkerTime);
            String fTime = sdf.format(calendar.getTime());
            sdf.applyPattern("dd MMM,yyyy");
            String fDate = sdf.format(calendar.getTime());
            String dateString = fDate + " " + fTime + "-" + eTime;

            Bitmap snapshot = drawMultilineTextToBitmap(GpsMainActivity.this, snapshot1, dateString);
            //bitmap = snapshot;
            String filePath = System.currentTimeMillis() + ".jpeg";
            File folder = new File(Environment.getExternalStorageDirectory() + "/geodatatmp");
            boolean success = true;
            if (!folder.exists()) {
                success = folder.mkdir();
            }
            if (!success) {
                return;
            }
            //filePath = Environment.getExternalStorageDirectory().toString() + "/" + filePath;
            try {
                File imageFile = new File(folder, filePath);

                FileOutputStream fout = new FileOutputStream(imageFile);
                // Write the string to the file
                snapshot.compress(Bitmap.CompressFormat.JPEG, 90, fout);
                fout.flush();
                fout.close();
                //Toast.makeText(GpsMainActivity.this, "Stored in: " + filePath, Toast.LENGTH_LONG).show();
                Intent shareIntent = new Intent();
                shareIntent.setAction(Intent.ACTION_SEND);
                shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(imageFile));
                shareIntent.setType("image/jpeg");
                startActivity(Intent.createChooser(shareIntent, "Sahre..."));

            } catch (FileNotFoundException e) {
                // TODO Auto-generated catch block
                Log.d("ImageCapture", "FileNotFoundException");
                Log.d("ImageCapture", e.getMessage());
                filePath = "";
            } catch (IOException e) {
                // TODO Auto-generated catch block
                Log.d("ImageCapture", "IOException");
                Log.d("ImageCapture", e.getMessage());
                filePath = "";
            }

            //openShareImageDialog(filePath);
        }
    };

    googleMap.snapshot(callback);
}

From source file:com.panet.imeta.job.entries.mssqlbulkload.JobEntryMssqlBulkLoad.java

public Result execute(Result previousResult, int nr, Repository rep, Job parentJob) {
    String TakeFirstNbrLines = "";
    String LineTerminatedby = "";
    String FieldTerminatedby = "";
    boolean useFieldSeparator = false;
    String UseCodepage = "";
    String ErrorfileName = "";

    LogWriter log = LogWriter.getInstance();

    Result result = previousResult;
    result.setResult(false);//w w  w.  ja va2s. c o m

    String vfsFilename = environmentSubstitute(filename);
    FileObject fileObject = null;
    // Let's check the filename ...
    if (!Const.isEmpty(vfsFilename)) {
        try {
            // User has specified a file, We can continue ...
            //
            // This is running over VFS but we need a normal file.
            // As such, we're going to verify that it's a local file...
            // We're also going to convert VFS FileObject to File
            //
            fileObject = KettleVFS.getFileObject(vfsFilename);
            if (!(fileObject instanceof LocalFile)) {
                // MSSQL BUKL INSERT can only use local files, so that's
                // what we limit ourselves to.
                //
                throw new KettleException(
                        Messages.getString("JobMssqlBulkLoad.Error.OnlyLocalFileSupported", vfsFilename));
            }

            // Convert it to a regular platform specific file name
            //
            String realFilename = KettleVFS.getFilename(fileObject);

            // Here we go... back to the regular scheduled program...
            //
            File file = new File(realFilename);
            if (file.exists() && file.canRead()) {
                // User has specified an existing file, We can continue ...
                if (log.isDetailed())
                    log.logDetailed(toString(),
                            Messages.getString("JobMssqlBulkLoad.FileExists.Label", realFilename));

                if (connection != null) {
                    // User has specified a connection, We can continue ...
                    Database db = new Database(connection);

                    if (db.getDatabaseMeta().getDatabaseType() != DatabaseMeta.TYPE_DATABASE_MSSQL) {
                        log.logError(toString(), Messages.getString("JobMssqlBulkLoad.Error.DbNotMSSQL",
                                connection.getDatabaseName()));
                        return result;
                    }
                    db.shareVariablesWith(this);
                    try {
                        db.connect();
                        // Get schemaname
                        String realSchemaname = environmentSubstitute(schemaname);
                        // Get tablename
                        String realTablename = environmentSubstitute(tablename);

                        if (db.checkTableExists(realTablename)) {
                            // The table existe, We can continue ...
                            if (log.isDetailed())
                                log.logDetailed(toString(), Messages
                                        .getString("JobMssqlBulkLoad.TableExists.Label", realTablename));

                            // Add schemaname (Most the time
                            // Schemaname.Tablename)
                            if (schemaname != null)
                                realTablename = realSchemaname + "." + realTablename;

                            // FIELDTERMINATOR
                            String Fieldterminator = getRealFieldTerminator();
                            if (Const.isEmpty(Fieldterminator)
                                    && (datafiletype.equals("char") || datafiletype.equals("widechar"))) {
                                log.logError(toString(),
                                        Messages.getString("JobMssqlBulkLoad.Error.FieldTerminatorMissing"));
                                return result;
                            } else {
                                if (datafiletype.equals("char") || datafiletype.equals("widechar")) {
                                    useFieldSeparator = true;
                                    FieldTerminatedby = "FIELDTERMINATOR='" + Fieldterminator + "'";
                                }
                            }
                            // Check Specific Code page
                            if (codepage.equals("Specific")) {
                                if (specificcodepage.length() < 0) {
                                    log.logError(toString(), Messages
                                            .getString("JobMssqlBulkLoad.Error.SpecificCodePageMissing"));
                                    return result;

                                } else
                                    UseCodepage = "CODEPAGE = '" + specificcodepage + "'";
                            } else {
                                UseCodepage = "CODEPAGE = '" + codepage + "'";
                            }

                            // Check Error file
                            if (errorfilename != null) {
                                File errorfile = new File(errorfilename);
                                if (errorfile.exists() && !adddatetime) {
                                    // The error file is created when the
                                    // command is executed. An error occurs
                                    // if the file already exists.
                                    log.logError(toString(),
                                            Messages.getString("JobMssqlBulkLoad.Error.ErrorFileExists"));
                                    return result;
                                }
                                if (adddatetime) {
                                    // Add date time to filename...
                                    SimpleDateFormat daf = new SimpleDateFormat();
                                    Date now = new Date();
                                    daf.applyPattern("yyyMMdd_HHmmss");
                                    String d = daf.format(now);

                                    ErrorfileName = "ERRORFILE ='" + errorfilename + "_" + d + "'";
                                } else
                                    ErrorfileName = "ERRORFILE ='" + errorfilename + "'";
                            }

                            // ROWTERMINATOR
                            String Rowterminator = getRealLineterminated();
                            if (!Const.isEmpty(Rowterminator))
                                LineTerminatedby = "ROWTERMINATOR='" + Rowterminator + "'";

                            // Start file at
                            if (startfile > 0)
                                TakeFirstNbrLines = "FIRSTROW=" + startfile;

                            // End file at
                            if (endfile > 0)
                                TakeFirstNbrLines = "LASTROW=" + endfile;

                            // Truncate table?
                            String SQLBULKLOAD = "";
                            if (truncate)
                                SQLBULKLOAD = "TRUNCATE " + realTablename + ";";

                            // Build BULK Command
                            SQLBULKLOAD = SQLBULKLOAD + "BULK INSERT " + realTablename + " FROM " + "'"
                                    + realFilename.replace('\\', '/') + "'";
                            SQLBULKLOAD = SQLBULKLOAD + " WITH (";
                            if (useFieldSeparator)
                                SQLBULKLOAD = SQLBULKLOAD + FieldTerminatedby;
                            else
                                SQLBULKLOAD = SQLBULKLOAD + "DATAFILETYPE ='" + datafiletype + "'";

                            if (LineTerminatedby.length() > 0)
                                SQLBULKLOAD = SQLBULKLOAD + "," + LineTerminatedby;
                            if (TakeFirstNbrLines.length() > 0)
                                SQLBULKLOAD = SQLBULKLOAD + "," + TakeFirstNbrLines;
                            if (UseCodepage.length() > 0)
                                SQLBULKLOAD = SQLBULKLOAD + "," + UseCodepage;
                            if (formatfilename != null)
                                SQLBULKLOAD = SQLBULKLOAD + ", FORMATFILE='" + formatfilename + "'";
                            if (firetriggers)
                                SQLBULKLOAD = SQLBULKLOAD + ",FIRE_TRIGGERS";
                            if (keepnulls)
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPNULLS";
                            if (keepidentity)
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPIDENTITY";
                            if (checkconstraints)
                                SQLBULKLOAD = SQLBULKLOAD + ",CHECK_CONSTRAINTS";
                            if (tablock)
                                SQLBULKLOAD = SQLBULKLOAD + ",TABLOCK";
                            if (orderby != null)
                                SQLBULKLOAD = SQLBULKLOAD + ",ORDER ( " + orderby + " " + orderdirection + ")";
                            if (ErrorfileName.length() > 0)
                                SQLBULKLOAD = SQLBULKLOAD + ", " + ErrorfileName;
                            if (maxerrors > 0)
                                SQLBULKLOAD = SQLBULKLOAD + ", MAXERRORS=" + maxerrors;
                            if (batchsize > 0)
                                SQLBULKLOAD = SQLBULKLOAD + ", BATCHSIZE=" + batchsize;
                            if (rowsperbatch > 0)
                                SQLBULKLOAD = SQLBULKLOAD + ", ROWS_PER_BATCH=" + rowsperbatch;
                            // End of Bulk command
                            SQLBULKLOAD = SQLBULKLOAD + ")";

                            try {
                                // Run the SQL
                                db.execStatements(SQLBULKLOAD);

                                // Everything is OK...we can disconnect now
                                db.disconnect();

                                if (isAddFileToResult()) {
                                    // Add filename to output files
                                    ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL,
                                            KettleVFS.getFileObject(realFilename), parentJob.getJobname(),
                                            toString());
                                    result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                                }

                                result.setResult(true);
                            } catch (KettleDatabaseException je) {
                                result.setNrErrors(1);
                                log.logError(toString(),
                                        "An error occurred executing this job entry : " + je.getMessage());
                            } catch (IOException e) {
                                log.logError(toString(),
                                        "An error occurred executing this job entry : " + e.getMessage());
                                result.setNrErrors(1);
                            } finally {
                                if (db != null) {
                                    db.disconnect();
                                    db = null;
                                }
                            }
                        } else {
                            // Of course, the table should have been created
                            // already before the bulk load operation
                            db.disconnect();
                            result.setNrErrors(1);
                            if (log.isDetailed())
                                log.logDetailed(toString(), Messages
                                        .getString("JobMssqlBulkLoad.Error.TableNotExists", realTablename));
                        }
                    } catch (KettleDatabaseException dbe) {
                        db.disconnect();
                        result.setNrErrors(1);
                        log.logError(toString(), "An error occurred executing this entry: " + dbe.getMessage());
                    }
                } else {
                    // No database connection is defined
                    result.setNrErrors(1);
                    log.logError(toString(), Messages.getString("JobMssqlBulkLoad.Nodatabase.Label"));
                }
            } else {
                // the file doesn't exist
                result.setNrErrors(1);
                log.logError(toString(),
                        Messages.getString("JobMssqlBulkLoad.Error.FileNotExists", realFilename));
            }
        } catch (Exception e) {
            // An unexpected error occurred
            result.setNrErrors(1);
            log.logError(toString(), Messages.getString("JobMssqlBulkLoad.UnexpectedError.Label"), e);
        } finally {
            try {
                if (fileObject != null)
                    fileObject.close();
            } catch (Exception e) {
            }
        }
    } else {
        // No file was specified
        result.setNrErrors(1);
        log.logError(toString(), Messages.getString("JobMssqlBulkLoad.Nofilename.Label"));
    }
    return result;
}

From source file:org.pentaho.di.trans.steps.xmloutput.XMLOutputMeta.java

public String buildFilename(VariableSpace space, int stepnr, int splitnr, boolean ziparchive) {
    SimpleDateFormat daf = new SimpleDateFormat();
    DecimalFormat df = new DecimalFormat("00000");

    // Replace possible environment variables...
    String retval = space.environmentSubstitute(fileName);
    String realextension = space.environmentSubstitute(extension);

    Date now = new Date();

    if (SpecifyFormat && !Const.isEmpty(date_time_format)) {
        daf.applyPattern(date_time_format);
        String dt = daf.format(now);
        retval += dt;//  w  w  w.  j  a  v a 2 s. com
    } else {
        if (dateInFilename) {
            daf.applyPattern("yyyyMMdd");
            String d = daf.format(now);
            retval += "_" + d;
        }
        if (timeInFilename) {
            daf.applyPattern("HHmmss");
            String t = daf.format(now);
            retval += "_" + t;
        }
    }

    if (stepNrInFilename) {
        retval += "_" + stepnr;
    }
    if (splitEvery > 0) {
        retval += "_" + df.format(splitnr + 1);
    }

    if (zipped) {
        if (ziparchive) {
            retval += ".zip";
        } else {
            if (realextension != null && realextension.length() != 0) {
                retval += "." + realextension;
            }
        }
    } else {
        if (realextension != null && realextension.length() != 0) {
            retval += "." + realextension;
        }
    }
    return retval;
}

From source file:edu.hawaii.soest.hioos.isus.ISUSSource.java

/**
 * A method that processes the data object passed and flushes the
 * data to the DataTurbine given the sensor properties in the XMLConfiguration
 * passed in./*from  www.  ja  va2  s  .  c o m*/
 *
 * @param xmlConfig - the XMLConfiguration object containing the list of
 *                    sensor properties
 * @param frameMap  - the parsed data as a HierarchicalMap object
 */
public boolean process(XMLConfiguration xmlConfig, HierarchicalMap frameMap) {

    logger.debug("ISUSSource.process() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean success = false;

    try {

        // add channels of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.  Information
        // on each channel is found in the XMLConfiguration file (email.account.properties.xml)
        // and the StorXParser object (to get the data string)
        ChannelMap rbnbChannelMap = new ChannelMap(); // used to flush channels
        ChannelMap registerChannelMap = new ChannelMap(); // used to register channels
        int channelIndex = 0;

        String sensorName = null;
        String sensorSerialNumber = null;
        String sensorDescription = null;
        boolean isImmersed = false;
        String[] calibrationURLs = null;
        String calibrationURL = null;
        String type = null;

        List sensorList = xmlConfig.configurationsAt("account.logger.sensor");

        for (Iterator sIterator = sensorList.iterator(); sIterator.hasNext();) {
            //  
            HierarchicalConfiguration sensorConfig = (HierarchicalConfiguration) sIterator.next();
            sensorSerialNumber = sensorConfig.getString("serialNumber");

            // find the correct sensor configuration properties
            if (sensorSerialNumber.equals(frameMap.get("serialNumber"))) {

                sensorName = sensorConfig.getString("name");
                sensorDescription = sensorConfig.getString("description");
                isImmersed = new Boolean(sensorConfig.getString("isImmersed")).booleanValue();
                calibrationURLs = sensorConfig.getStringArray("calibrationURL");
                type = (String) frameMap.get("type");

                // find the correct calibrationURL from the list given the type
                for (String url : calibrationURLs) {

                    if (url.indexOf(type) > 0) {
                        calibrationURL = url;
                        break;

                    } else {
                        logger.debug("There was no match for " + type);
                    }
                }

                // get a Calibration instance to interpret raw sensor values
                Calibration calibration = new Calibration();

                if (calibration.parse(calibrationURL)) {

                    // Build the RBNB channel map 

                    // get the sample date and convert it to seconds since the epoch
                    Date frameDate = (Date) frameMap.get("date");
                    Calendar frameDateTime = Calendar.getInstance();
                    frameDateTime.setTime(frameDate);
                    double sampleTimeAsSecondsSinceEpoch = (double) (frameDateTime.getTimeInMillis() / 1000);
                    // and create a string formatted date for the given time zone
                    DATE_FORMAT.setTimeZone(TZ);
                    String frameDateAsString = DATE_FORMAT.format(frameDate).toString();

                    // get the sample data from the frame map
                    ByteBuffer rawFrame = (ByteBuffer) frameMap.get("rawFrame");
                    ISUSFrame isusFrame = (ISUSFrame) frameMap.get("parsedFrameObject");
                    String serialNumber = isusFrame.getSerialNumber();
                    String sampleDate = isusFrame.getSampleDate();
                    String sampleTime = isusFrame.getSampleTime();
                    SimpleDateFormat dtFormat = new SimpleDateFormat();
                    Date sampleDateTime = isusFrame.getSampleDateTime();
                    dtFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
                    dtFormat.applyPattern("MM/dd/yy");
                    String sampleDateUTC = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("HH:mm:ss");
                    String sampleTimeUTC = dtFormat.format(sampleDateTime);
                    dtFormat.setTimeZone(TimeZone.getTimeZone("HST"));
                    dtFormat.applyPattern("MM/dd/yy");
                    String sampleDateHST = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("HH:mm:ss");
                    String sampleTimeHST = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("dd-MMM-yy HH:mm");
                    String sampleDateTimeHST = dtFormat.format(sampleDateTime);

                    double rawNitrogenConcentration = isusFrame.getNitrogenConcentration();
                    double rawAuxConcentration1 = isusFrame.getAuxConcentration1();
                    double rawAuxConcentration2 = isusFrame.getAuxConcentration2();
                    double rawAuxConcentration3 = isusFrame.getAuxConcentration3();
                    double rawRmsError = isusFrame.getRmsError();
                    double rawInsideTemperature = isusFrame.getInsideTemperature();
                    double rawSpectrometerTemperature = isusFrame.getSpectrometerTemperature();
                    double rawLampTemperature = isusFrame.getLampTemperature();
                    int rawLampTime = isusFrame.getLampTime();
                    double rawHumidity = isusFrame.getHumidity();
                    double rawLampVoltage12 = isusFrame.getLampVoltage12();
                    double rawInternalPowerVoltage5 = isusFrame.getInternalPowerVoltage5();
                    double rawMainPowerVoltage = isusFrame.getMainPowerVoltage();
                    double rawReferenceAverage = isusFrame.getReferenceAverage();
                    double rawReferenceVariance = isusFrame.getReferenceVariance();
                    double rawSeaWaterDarkCounts = isusFrame.getSeaWaterDarkCounts();
                    double rawSpectrometerAverage = isusFrame.getSpectrometerAverage();
                    int checksum = isusFrame.getChecksum();

                    //// apply calibrations to the observed data
                    double nitrogenConcentration = calibration.apply(rawNitrogenConcentration, isImmersed,
                            "NITRATE");
                    double auxConcentration1 = calibration.apply(rawAuxConcentration1, isImmersed, "AUX1");
                    double auxConcentration2 = calibration.apply(rawAuxConcentration2, isImmersed, "AUX2");
                    double auxConcentration3 = calibration.apply(rawAuxConcentration3, isImmersed, "AUX3");
                    double rmsError = calibration.apply(rawRmsError, isImmersed, "RMSe");
                    double insideTemperature = calibration.apply(rawInsideTemperature, isImmersed, "T_INT");
                    double spectrometerTemperature = calibration.apply(rawSpectrometerTemperature, isImmersed,
                            "T_SPEC");
                    double lampTemperature = calibration.apply(rawLampTemperature, isImmersed, "T_LAMP");
                    int lampTime = rawLampTime;
                    double humidity = calibration.apply(rawHumidity, isImmersed, "HUMIDITY");
                    double lampVoltage12 = calibration.apply(rawLampVoltage12, isImmersed, "VOLT_12");
                    double internalPowerVoltage5 = calibration.apply(rawInternalPowerVoltage5, isImmersed,
                            "VOLT_5");
                    double mainPowerVoltage = calibration.apply(rawMainPowerVoltage, isImmersed, "VOLT_MAIN");
                    double referenceAverage = calibration.apply(rawReferenceAverage, isImmersed, "REF_AVG");
                    double referenceVariance = calibration.apply(rawReferenceVariance, isImmersed, "REF_STD");
                    double seaWaterDarkCounts = calibration.apply(rawSeaWaterDarkCounts, isImmersed, "SW_DARK");
                    double spectrometerAverage = calibration.apply(rawSpectrometerAverage, isImmersed,
                            "SPEC_AVG");

                    // iterate through the individual wavelengths
                    List<String> variableNames = calibration.getVariableNames();
                    TreeMap<String, Double> wavelengthsMap = new TreeMap<String, Double>();
                    Collections.sort(variableNames);
                    int rawWavelengthCounts = 0;
                    int count = 1;

                    for (String name : variableNames) {

                        // just handle the wavelength channels
                        if (name.startsWith("UV_")) {
                            rawWavelengthCounts = isusFrame.getChannelWavelengthCounts(count);

                            double value = calibration.apply(rawWavelengthCounts, isImmersed, name);
                            count++;
                            wavelengthsMap.put(name, new Double(value));

                        }

                    }

                    String sampleString = "";
                    sampleString += sampleDate + "\t";
                    sampleString += sampleDateUTC + "\t";
                    sampleString += sampleTime + "\t";
                    sampleString += sampleTimeUTC + "\t";
                    sampleString += sampleDateHST + "\t";
                    sampleString += sampleTimeHST + "\t";
                    sampleString += sampleDateTimeHST + "\t";
                    sampleString += String.format("%-15.11f", nitrogenConcentration) + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration1)     + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration2)     + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration3)     + "\t";
                    sampleString += String.format("%15.11f", rmsError) + "\t";
                    sampleString += String.format("%15.11f", insideTemperature) + "\t";
                    sampleString += String.format("%15.11f", spectrometerTemperature) + "\t";
                    sampleString += String.format("%15.11f", lampTemperature) + "\t";
                    sampleString += String.format("%6d", lampTime) + "\t";
                    sampleString += String.format("%15.11f", humidity) + "\t";
                    sampleString += String.format("%15.11f", lampVoltage12) + "\t";
                    sampleString += String.format("%15.11f", internalPowerVoltage5) + "\t";
                    sampleString += String.format("%15.11f", mainPowerVoltage) + "\t";
                    sampleString += String.format("%15.11f", referenceAverage) + "\t";
                    sampleString += String.format("%15.11f", referenceVariance) + "\t";
                    sampleString += String.format("%15.11f", seaWaterDarkCounts) + "\t";
                    sampleString += String.format("%15.11f", spectrometerAverage) + "\t";

                    Set<String> wavelengths = wavelengthsMap.keySet();
                    Iterator wIterator = wavelengths.iterator();

                    while (wIterator.hasNext()) {
                        String name = (String) wIterator.next();
                        Double wavelengthValue = (Double) wavelengthsMap.get(name);
                        sampleString += String.format("%6d", wavelengthValue.intValue()) + "\t";
                        channelIndex = registerChannelMap.Add(name);
                        registerChannelMap.PutUserInfo(channelIndex, "units=counts");
                        channelIndex = rbnbChannelMap.Add(name);
                        rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                        rbnbChannelMap.PutDataAsFloat64(channelIndex,
                                new double[] { wavelengthValue.doubleValue() });

                    }

                    sampleString += String.format("%03d", checksum);
                    sampleString += "\n";

                    // add the sample timestamp to the rbnb channel map
                    //registerChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d);
                    rbnbChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d);

                    // add the BinaryRawSatlanticFrameData channel to the channelMap
                    channelIndex = registerChannelMap.Add("BinaryRawSatlanticFrameData");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("BinaryRawSatlanticFrameData");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsByteArray(channelIndex, rawFrame.array());

                    // add the DecimalASCIISampleData channel to the channelMap
                    channelIndex = registerChannelMap.Add(getRBNBChannelName());
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleString);

                    // add the serialNumber channel to the channelMap
                    channelIndex = registerChannelMap.Add("serialNumber");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("serialNumber");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, serialNumber);

                    // add the sampleDateUTC channel to the channelMap
                    channelIndex = registerChannelMap.Add("sampleDateUTC");
                    registerChannelMap.PutUserInfo(channelIndex, "units=YYYYDDD");
                    channelIndex = rbnbChannelMap.Add("sampleDateUTC");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleDate);

                    // add the sampleTimeUTC channel to the channelMap
                    channelIndex = registerChannelMap.Add("sampleTimeUTC");
                    registerChannelMap.PutUserInfo(channelIndex, "units=hh.hhhhhh");
                    channelIndex = rbnbChannelMap.Add("sampleTimeUTC");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleTimeUTC);

                    // add the nitrogenConcentration channel to the channelMap
                    channelIndex = registerChannelMap.Add("nitrogenConcentration");
                    registerChannelMap.PutUserInfo(channelIndex, "units=uM");
                    channelIndex = rbnbChannelMap.Add("nitrogenConcentration");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { nitrogenConcentration });

                    // add the auxConcentration1 channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration1");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration1");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration1 });

                    // add the auxConcentration3 channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration2");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration2");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration2 });

                    // add the serialNumber channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration3");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration3");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration3 });

                    // add the rmsError channel to the channelMap
                    channelIndex = registerChannelMap.Add("rmsError");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("rmsError");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { rmsError });

                    // add the insideTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("insideTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("insideTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { insideTemperature });

                    // add the spectrometerTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("spectrometerTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("spectrometerTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerTemperature });

                    // add the lampTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("lampTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampTemperature });

                    // add the lampTime channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampTime");
                    registerChannelMap.PutUserInfo(channelIndex, "units=seconds");
                    channelIndex = rbnbChannelMap.Add("lampTime");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { lampTime });

                    // add the humidity channel to the channelMap
                    channelIndex = registerChannelMap.Add("humidity");
                    registerChannelMap.PutUserInfo(channelIndex, "units=%");
                    channelIndex = rbnbChannelMap.Add("humidity");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { humidity });

                    // add the lampVoltage12 channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampVoltage12");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("lampVoltage12");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampVoltage12 });

                    // add the internalPowerVoltage5 channel to the channelMap
                    channelIndex = registerChannelMap.Add("internalPowerVoltage5");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("internalPowerVoltage5");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { internalPowerVoltage5 });

                    // add the mainPowerVoltage channel to the channelMap
                    channelIndex = registerChannelMap.Add("mainPowerVoltage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("mainPowerVoltage");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { mainPowerVoltage });

                    // add the referenceAverage channel to the channelMap
                    channelIndex = registerChannelMap.Add("referenceAverage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("referenceAverage");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceAverage });

                    // add the referenceVariance channel to the channelMap
                    channelIndex = registerChannelMap.Add("referenceVariance");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("referenceVariance");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceVariance });

                    // add the seaWaterDarkCounts channel to the channelMap
                    channelIndex = registerChannelMap.Add("seaWaterDarkCounts");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("seaWaterDarkCounts");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { seaWaterDarkCounts });

                    // add the spectrometerAverage channel to the channelMap
                    channelIndex = registerChannelMap.Add("spectrometerAverage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("averageWavelength");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerAverage });

                    // Now register the RBNB channels, and flush the rbnbChannelMap to the
                    // DataTurbine
                    getSource().Register(registerChannelMap);
                    getSource().Flush(rbnbChannelMap);
                    logger.info(frameDateAsString + " " + "Sample sent to the DataTurbine: (" + serialNumber
                            + ") " + sampleString);

                    registerChannelMap.Clear();
                    rbnbChannelMap.Clear();

                } else {

                    logger.info("Couldn't apply the calibration coefficients. " + "Skipping this sample.");

                } // end if()

            } // end if()

        } // end for()                                             

        //getSource.Detach();

        success = true;
    } catch (ParseException pe) {
        // parsing of the calibration file failed.  Log the exception, return false
        success = false;
        logger.debug("There was a problem parsing the calibration file. The " + "error message was: "
                + pe.getMessage());
        return success;

    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        success = false;
        sapie.printStackTrace();
        return success;

    }

    return success;
}

From source file:com.s3d.webapps.util.time.DateUtils.java

/**
 * <p>Parses a string representing a date by trying a variety of different parsers.</p>
 * //w w  w  .  j a  v  a2 s . c om
 * <p>The parse will try each parse pattern in turn.
 * A parse is only deemed successful if it parses the whole of the input string.
 * If no parse patterns match, a ParseException is thrown.</p>
 * 
 * @param str  the date to parse, not null
 * @param parsePatterns  the date format patterns to use, see SimpleDateFormat, not null
 * @param lenient Specify whether or not date/time parsing is to be lenient.
 * @return the parsed date
 * @throws IllegalArgumentException if the date string or pattern array is null
 * @throws ParseException if none of the date patterns were suitable
 * @see java.util.Calender#isLenient()
 */
private static Date parseDateWithLeniency(String str, String[] parsePatterns, boolean lenient)
        throws ParseException {
    if (str == null || parsePatterns == null) {
        throw new IllegalArgumentException("Date and Patterns must not be null");
    }

    SimpleDateFormat parser = new SimpleDateFormat();
    parser.setLenient(lenient);
    ParsePosition pos = new ParsePosition(0);
    for (int i = 0; i < parsePatterns.length; i++) {

        String pattern = parsePatterns[i];

        // LANG-530 - need to make sure 'ZZ' output doesn't get passed to SimpleDateFormat
        if (parsePatterns[i].endsWith("ZZ")) {
            pattern = pattern.substring(0, pattern.length() - 1);
        }

        parser.applyPattern(pattern);
        pos.setIndex(0);

        String str2 = str;
        // LANG-530 - need to make sure 'ZZ' output doesn't hit SimpleDateFormat as it will ParseException
        if (parsePatterns[i].endsWith("ZZ")) {
            int signIdx = indexOfSignChars(str2, 0);
            while (signIdx >= 0) {
                str2 = reformatTimezone(str2, signIdx);
                signIdx = indexOfSignChars(str2, ++signIdx);
            }
        }

        Date date = parser.parse(str2, pos);
        if (date != null && pos.getIndex() == str2.length()) {
            return date;
        }
    }
    throw new ParseException("Unable to parse the date: " + str, -1);
}

From source file:org.wso2.carbon.registry.indexing.solr.SolrClient.java

/**
 * Method to get Solr generic date formatter
 * @param dateStr date value//from w w w  . ja va2 s .co m
 * @param currentFormat date format
 * @return solr date format
 */
private String toSolrDateFormat(String dateStr, String currentFormat) {
    String solrDateFormatResult = null;
    try {
        SimpleDateFormat sdf = new SimpleDateFormat(currentFormat, Locale.ENGLISH);
        Date date = sdf.parse(dateStr);
        sdf.applyPattern(SolrConstants.SOLR_DATE_FORMAT);
        solrDateFormatResult = sdf.format(date);
    } catch (ParseException e) {
        log.error("Error when passing date to create solr date format." + e);
    }
    return solrDateFormatResult;
}

From source file:net.sf.jasperreports.components.headertoolbar.json.HeaderToolbarElementJsonHandler.java

private List<HashMap<String, String>> getDatePatterns(List<String> datePatterns, Locale locale) {
    List<HashMap<String, String>> formatPatterns = new ArrayList<HashMap<String, String>>();

    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy", locale);
    Date today = new Date();
    HashMap<String, String> keys;

    for (String datePattern : datePatterns) {
        keys = new HashMap<String, String>();
        sdf.applyPattern(datePattern);
        keys.put("key", datePattern);
        keys.put("val", sdf.format(today));
        formatPatterns.add(keys);//from w ww.j  a  va2s .c  o m
    }

    return formatPatterns;
}

From source file:com.groupon.odo.Proxy.java

/**
 * Log modified request//from  ww  w .  j a va  2 s .c om
 *
 * @param httpMethodProxyRequest
 * @param httpServletResponse
 * @param history
 */
private void logRequestHistory(HttpMethod httpMethodProxyRequest, HttpServletResponse httpServletResponse,
        History history) {
    try {
        if (requestInformation.get().handle && requestInformation.get().client.getIsActive()) {
            logger.info("Storing history");
            String createdDate;
            SimpleDateFormat sdf = new SimpleDateFormat();
            sdf.setTimeZone(new SimpleTimeZone(0, "GMT"));
            sdf.applyPattern("dd MMM yyyy HH:mm:ss");
            createdDate = sdf.format(new Date()) + " GMT";

            history.setCreatedAt(createdDate);
            history.setRequestURL(HttpUtilities.getURL(httpMethodProxyRequest.getURI().toString()));
            history.setRequestParams(httpMethodProxyRequest.getQueryString() == null ? ""
                    : httpMethodProxyRequest.getQueryString());
            history.setRequestHeaders(HttpUtilities.getHeaders(httpMethodProxyRequest));
            history.setResponseHeaders(HttpUtilities.getHeaders(httpServletResponse));
            history.setResponseCode(Integer.toString(httpServletResponse.getStatus()));
            history.setResponseContentType(httpServletResponse.getContentType());
            history.setResponseData(requestInformation.get().outputString);
            HistoryService.getInstance().addHistory(history);
            logger.info("Done storing");
        }
    } catch (URIException e) {
        e.printStackTrace();
    }
}