Example usage for java.util Date before

List of usage examples for java.util Date before

Introduction

In this page you can find the example usage for java.util Date before.

Prototype

public boolean before(Date when) 

Source Link

Document

Tests if this date is before the specified date.

Usage

From source file:de.micromata.genome.chronos.spi.ram.RamJobStore.java

@Override
public synchronized List<TriggerJobDO> getNextJobs(Scheduler scheduler, boolean foreignJobs) {
    final Map<Long, TriggerJobDO> tjobs;

    tjobs = allJobs.get(scheduler.getId());

    if (tjobs == null || tjobs.isEmpty() == true) {
        return Collections.emptyList();
    }/*from  w w w  .  j  a  v a 2s. co  m*/

    final ArrayList<TriggerJobDO> jobsToStart = new ArrayList<TriggerJobDO>();
    Date now = new Date();
    boolean isDebugEnabled = GLog.isDebugEnabled();

    for (final TriggerJobDO job : tjobs.values()) {
        if (job.getState() != State.WAIT) {
            continue;
        }
        final Trigger trigger = job.getTrigger();
        final Date nextFireTime = trigger.getNextFireTime(now);
        if (nextFireTime != null && nextFireTime.before(now)) {
            if (isDebugEnabled == true) {
                GLog.debug(GenomeLogCategory.Scheduler, "Found trigger: " + trigger);
            }
            jobsToStart.add(job);
        }

    }
    return jobsToStart;
}

From source file:de.fhbingen.wbs.wpOverview.tabs.APCalendarPanel.java

/**
 * Convert work packages to tasks.//from   w  w w.  ja  v a  2 s.  com
 * @param userWp
 *            list with work packages.
 * @return IntervalCategoryDataset: tasks of the work packages.
 */
public final IntervalCategoryDataset createDataset(final List<Workpackage> userWp) {

    final TaskSeries s1 = new TaskSeries(LocalizedStrings.getGeneralStrings().overview());
    colorList = new ArrayList<Integer>();
    for (Workpackage actualPackage : userWp) {
        if (actualPackage.getEndDateCalc() != null && actualPackage.getStartDateCalc() != null) {
            if (actualPackage.getlastRelevantIndex() <= showLevels) {

                Date endDateCalc = null;
                Date start = null;

                endDateCalc = actualPackage.getEndDateCalc();
                start = actualPackage.getStartDateCalc();

                String indent = "";

                for (int i = 0; i < actualPackage.getlastRelevantIndex(); i++) {
                    indent += "   ";
                }
                if (!endDateCalc.before(start)) {
                    Task t = new Task(indent + actualPackage.toString(),
                            new SimpleTimePeriod(start, endDateCalc));
                    t.setPercentComplete(0.01 * WpManager.calcPercentComplete(actualPackage.getBac(),
                            actualPackage.getEtc(), actualPackage.getAc()));
                    s1.add(t);
                    colorList.add(actualPackage.getlastRelevantIndex());
                }

            }
        }
    }

    final TaskSeriesCollection collection = new TaskSeriesCollection();
    collection.add(s1);

    return collection;
}

From source file:com.surevine.alfresco.webscript.gsa.getallitems.GetAllItemsCommandImpl.java

/**
 * All the interface methods delegate down to this implementation method to retrieve a "get all items" page.
 * This method performs the logic for a getAllitems call on content items, then delegates to another method
 * to retrieve profile results, before merging the two result sets together.
 * @param since Only retrieve items modified since this date
 * @param maxItems A hint as to the number of items the caller wishes to retrieve.  The system must not return more
 * than this number of items, but may return less.  If it does, this is indicated in the return value
 * @param startAt If multiple items are tied for the "earliest" item within the search and one of them is this item,
 * discard all the items that occur before this item according to some deterministic ordering
 * @return SearchResults objects encapsulating the results of this search
 * @throws GSAProcessingException in certain circumstances.  The code tries to distinguish between errors with parsing
 * a subset of data points and systemic errors.  The heuristic used is that if 100 items fail to parse in a row (this
 * count persists accross service calls) AND no items within the current batch have parsed then we call a systemic 
  * error and throw an exception.  We have to use behaviour like this because the ICD doesn't allow for a partial 
  * success return value//  w w  w  .j av  a 2 s .  c  om
 */
private SearchResults getAllItemsImpl(Date since, NodeRef startAt, int maxItems) {
    //Respect the hard results limit
    if (maxItems > _hardMaxResultsLimit) {
        if (_logger.isInfoEnabled()) {
            _logger.info(
                    "Results size specified was " + maxItems + " but this is greater than the hard limit of "
                            + _hardMaxResultsLimit + " so using the hard limit");
        }
        maxItems = _hardMaxResultsLimit;
    }

    //Generate the lucene query and log
    StringBuilder sb = new StringBuilder(300);

    sb.append("+((PATH:\"/app:company_home/st:sites//*\" AND (" + getMimeTypeQueryPart() + "))) "
            + "+ (@cm\\:modified:[" + DateToLuceneDateString(since) + " TO MAX ] )");
    sb.append("-TYPE:\"fm:topic\" -TYPE:\"cm:folder\" -TYPE:\"cm:thumbnail\"");
    String queryString = sb.toString();

    SearchParameters sp = new SearchParameters();
    sp.addStore(SPACES_STORE);
    sp.setLanguage(SearchService.LANGUAGE_LUCENE);
    sp.setQuery(queryString);
    sp.addSort("@cm:modified", true);

    if (_logger.isDebugEnabled()) {
        _logger.debug("Running search: " + queryString);
    }

    //Actually run the search and get the results as an Iterator of NodeRefs 
    Iterator<NodeRef> results = _searchService.query(sp).getNodeRefs().iterator();
    List<SearchResult> workingResults = new ArrayList<SearchResult>(maxItems);

    //Respecting the specified limits, turn the iterator of NodeRefs into a list of Objects of type SearchResult
    int resultsSoFar = 0;
    int bytesSoFar = 0;
    while (results.hasNext() && bytesSoFar < _softMaxResultsSizeLimitInBytes && resultsSoFar < maxItems) {
        NodeRef nr = results.next();
        try {
            //Alfresco only indexes by date not time (!)  So we're going to get back some documents which are too early and we need to throw away
            Date lastModifiedDate = _gsaNodePropertyService.getModifiedDate(nr);

            if (lastModifiedDate != null && !lastModifiedDate.before(since)) {
                if (_logger.isDebugEnabled()) {
                    _logger.debug("Adding " + nr + " to results list with date " + lastModifiedDate);
                }

                SearchResult result = createSearchResult(nr);
                bytesSoFar += _gsaNodePropertyService.getContentSize(nr);
                workingResults.add(result);
                resultsSoFar++;
                errorCount = 0; // Reset the error count as by this point we have succesfully retrieved a result
            } else {
                if (_logger.isDebugEnabled()) {
                    _logger.debug("Ignoring " + nr + " as it's last modified date of " + lastModifiedDate
                            + " is before the since value");
                }
            }
        } catch (GSAProcessingException e) {
            errorCount++;
            _logger.warn("Error creating a record in the results set for " + queryString, e);
        } catch (InvalidNodeRefException ex) {
            errorCount++;
            _logger.warn("Found an invalid node with reference: " + ex.getNodeRef().toString());
        } catch (NullPointerException npe) {
            errorCount++;
            _logger.warn(
                    "NullPointerException during parsing of result set.  This is usually benign, but please contact support if these occur alongside undesirable behaviour",
                    npe);
        } catch (Exception exx) {
            errorCount++;
            _logger.warn(
                    "An unexpected exeption occurred during parsing of result set.  This may be benign, and indexing has not been stopped, but please report this issue to support so that the cause can be analysed",
                    exx);
        }
        if (errorCount > (_systemicParseExceptionThreshold - 1) && resultsSoFar == 0) {
            throw new GSAProcessingException(
                    "Systemic error detected.  Batch returned " + errorCount + " errors and no successes",
                    29108265);
        }
    }

    if (_logger.isInfoEnabled()) {
        _logger.info(resultsSoFar + " results returned");
    }

    List<SearchResult> profileResults = null;
    if (workingResults.size() > 0) {
        //Get profile results - we know we don't need to take anything with a date later than the latest thing in the content results list
        profileResults = getProfileResults(since,
                workingResults.get(workingResults.size() - 1).getLastModifiedDate());
    } else {
        //Just use profile results with no maximum date (well, use "now") as there are no normal results
        profileResults = getProfileResults(since, new Date());
    }
    if (profileResults.size() > 0) //merge profile and normal results if required
    {
        workingResults.addAll(profileResults);
        Collections.sort(workingResults);
        while (workingResults.size() > maxItems) {
            if (_logger.isDebugEnabled()) {
                _logger.debug(
                        "MergeProfile: Removing " + workingResults.get(workingResults.size() - 1).getTitle()
                                + " from results list - last modified date = "
                                + workingResults.get(workingResults.size() - 1).getLastModifiedDate());
            }
            workingResults.remove(workingResults.size() - 1);
        }
        if (_logger.isDebugEnabled()) {
            _logger.debug("MergeProfile: Search results runs until "
                    + workingResults.get(workingResults.size() - 1).getLastModifiedDate());
        }
    }

    //If startAt is specified, perform the relevant filtering logic - usually, this will leave workingResults unchanged
    if (startAt != null && workingResults.size() > 0) {
        workingResults = resolveDateCollisionsWithStartAt(workingResults, startAt);
    } else {
        if (_logger.isDebugEnabled()) {
            _logger.debug("startAt not specified so returning all results within merged results list");
        }
    }

    //Special case - if resolving date collisions has left us with no results at all, then return the next result in the underlying list, even if it would normally fall outside of the input params.  
    //This keeps a parse going where it might otherwise stall
    if (workingResults.size() == 0 && results.hasNext()) {
        if (_logger.isDebugEnabled()) {
            _logger.debug("Anti-stall - returning next item in results list");
        }
        workingResults = new ArrayList<SearchResult>(1);
        workingResults.add(createSearchResult(results.next()));
    }

    //Wrap the collection of SearchResult objects into a SearchResults object and calculate whether more results are available
    SearchResults finalResults = new SearchResultsImpl();
    finalResults.setResults(workingResults);
    finalResults.moreAvailable(results.hasNext());

    return finalResults;

}

From source file:org.openmrs.module.sdmxhddataexport.web.controller.report.ReportDataElementController.java

@RequestMapping(value = "/module/sdmxhddataexport/resultExecuteReport.form", method = RequestMethod.GET)
public String executeReport(@RequestParam(value = "reportId", required = false) Integer reportId,
        @RequestParam(value = "startDate", required = false) String startDate,
        @RequestParam(value = "endDate", required = false) String endDate, Model model) throws ParseException {

    SDMXHDDataExportService sDMXHDDataExportService = Context.getService(SDMXHDDataExportService.class);
    List<ReportDataElement> list = sDMXHDDataExportService.listReportDataElement(reportId, null, null, 0, 0);
    String dataSetCode = "";
    String reportName = "";
    List<String> periods = new ArrayList<String>();
    Map<String, List<ReportDataElementResult>> periodResults = new HashMap<String, List<ReportDataElementResult>>();

    if (CollectionUtils.isNotEmpty(list)) {
        dataSetCode = list.get(0).getReport().getCode();
        reportName = list.get(0).getReport().getName();
        Date begin = SDMXHDataExportUtils.getFirstDate(sdf.parse(startDate));
        Date end = SDMXHDataExportUtils.getLastDate(begin);
        SimpleDateFormat periodFormatter = new SimpleDateFormat("yyyyMM");
        Date finalDate = SDMXHDataExportUtils.getLastDate(sdf.parse(endDate));
        while (begin.before(finalDate)) {
            periods.add(periodFormatter.format(begin));
            List<ReportDataElementResult> results = new ArrayList<ReportDataElementResult>();
            for (ReportDataElement reportDataElement : list) {
                ReportDataElementResult result = new ReportDataElementResult();
                result.setDataElement(reportDataElement.getDataElement());
                result.setId(reportDataElement.getId());
                result.setReport(reportDataElement.getReport());
                result.setQuery(reportDataElement.getQuery());
                result.setResult(sDMXHDDataExportService.executeQuery(
                        reportDataElement.getDataElement().getSqlQuery(), sdf.format(begin), sdf.format(end)));
                results.add(result);//w  w w .j  av a  2  s .c  o  m
            }

            periodResults.put(periodFormatter.format(begin), results);
            begin = SDMXHDataExportUtils.nextMonth(begin);
            end = SDMXHDataExportUtils.getLastDate(begin);
        }
        String orgunitCode = Context.getAdministrationService()
                .getGlobalProperty("sdmxhddataexport.organisationUnit");
        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
        model.addAttribute("DATASET_CODE", dataSetCode);
        model.addAttribute("reportName", reportName);
        model.addAttribute("periods", periods);
        model.addAttribute("periodResults", periodResults);
        model.addAttribute("orgunit", orgunitCode);
        model.addAttribute("prepared", formatter.format(new Date()));
    }
    return "/module/sdmxhddataexport/report/result";
}

From source file:com.inmobi.databus.readers.DatabusStreamWaitingReader.java

/**
 * This method is used to check whether the given minute directory is
 * completely read or not. It takes the current time stamp and the minute
 * on which the reader is currently working. It retrieves the partition checkpoint
 * for that minute if it contains. It compares the current time stamp with
 * the checkpointed time stamp. If current time stamp is before the
 * checkpointed time stamp then that minute directory for the current hour is
 * completely read. If both the time stamps are same then it checks line number.
 * If line num is -1 means all the files in that minute dir are already read.
 *///w  w w.jav a  2 s .  c om
public boolean isRead(Date currentTimeStamp, int minute) {
    Date checkpointedTimestamp = pChkpoints.get(Integer.valueOf(minute)).timeStamp;
    if (checkpointedTimestamp == null) {
        return false;
    }
    if (currentTimeStamp.before(checkpointedTimestamp)) {
        return true;
    } else if (currentTimeStamp.equals(checkpointedTimestamp)) {
        return pChkpoints.get(Integer.valueOf(minute)).readFully;
    }
    return false;
}

From source file:com.eucalyptus.imaging.backend.ImagingTaskStateManager.java

private void processCancellingTasks(final List<ImagingTask> tasks) {
    for (final ImagingTask task : tasks) {
        try {/*from  w w  w.j  a  v  a 2  s  .co m*/
            if (!cancellingTimer.containsKey(task.getDisplayName())) {
                cancellingTimer.put(task.getDisplayName(), Dates.minutesFromNow(CANCELLING_WAIT_MIN));
            }
            final Date cancellingExpired = cancellingTimer.get(task.getDisplayName());
            if (cancellingExpired.before(new Date())) {
                try {
                    task.cleanUp();
                } catch (final Exception ex) {
                    LOG.warn("Failed to cleanup resources for " + task.getDisplayName());
                }
                ImagingTasks.transitState(task, ImportTaskState.CANCELLING, ImportTaskState.CANCELLED, null);
            }
        } catch (final Exception ex) {
            LOG.error("Could not process cancelling task " + task.getDisplayName());
        }
    }
}

From source file:com.vmware.identity.openidconnect.server.TokenRequestProcessor.java

private ErrorObject validateRefreshToken(ReadOnlyJWTClaimsSet claimsSet, SolutionUser solutionUser) {
    String error = null;/*from   w  w w.j a  v a  2  s.c  o m*/

    String tokenClass = null;
    String scope = null;
    String actAs = null;
    String clientId = null;
    String tenant = null;
    try {
        tokenClass = claimsSet.getStringClaim("token_class");
        scope = claimsSet.getStringClaim("scope");
        actAs = claimsSet.getStringClaim("act_as");
        clientId = claimsSet.getStringClaim("client_id");
        tenant = claimsSet.getStringClaim("tenant");
    } catch (java.text.ParseException e) {
        error = "failed to parse claims out of jwt";
    }

    if (error == null && !("refresh_token").equals(tokenClass)) {
        error = "jwt is missing a token_class=refresh_token claim";
    }

    if (error == null && StringUtils.isEmpty(claimsSet.getSubject())) {
        error = "jwt is missing sub (subject) claim";
    }

    Date expirationTime = claimsSet.getExpirationTime();
    if (error == null && expirationTime == null) {
        error = "jwt is missing exp (expiration) claim";
    }

    Date now = new Date();
    if (error == null && expirationTime.before(now)) {
        error = "jwt has expired";
    }

    if (error == null && StringUtils.isEmpty(scope)) {
        error = "jwt is missing scope claim";
    }

    if (error == null && !this.tenantInfo.getName().equals(tenant)) {
        error = "refresh_token was not issued to this tenant";
    }

    String expectedClientId = (this.tokenRequest.getClientID() == null) ? null
            : this.tokenRequest.getClientID().getValue();
    if (error == null && !Objects.equals(clientId, expectedClientId)) {
        error = "refresh_token was not issued to this client";
    }

    String expectedActAs = (solutionUser == null) ? null : solutionUser.getSubject().getValue();
    if (error == null && !Objects.equals(actAs, expectedActAs)) {
        error = "refresh_token was not issued to this solution user";
    }

    return (error == null) ? null : OAuth2Error.INVALID_GRANT.setDescription(error);
}

From source file:com.jpeterson.littles3.bo.S3Authenticator.java

/**
 * Authenticate the request using the prescribed Amazon S3 authentication
 * mechanisms.//from   w  ww  .  j  a va 2  s . co m
 * 
 * @param req
 *            The original HTTP request.
 * @param s3Request
 *            The S3 specific information for authenticating the request.
 * @return The authenticated <code>CanonicalUser</code> making the request.
 * @throws RequestTimeTooSkewedException
 *             Thrown if the request timestamp is outside of the allotted
 *             timeframe.
 */
public CanonicalUser authenticate(HttpServletRequest req, S3ObjectRequest s3Request)
        throws AuthenticatorException {
    // check to see if anonymous request
    String authorization = req.getHeader(HEADER_AUTHORIZATION);

    if (authorization == null) {
        return new CanonicalUser(CanonicalUser.ID_ANONYMOUS);
    }

    // attempting to be authenticated request

    if (false) {
        // check timestamp of request
        Date timestamp = s3Request.getTimestamp();
        if (timestamp == null) {
            throw new RequestTimeTooSkewedException("No timestamp provided");
        }

        GregorianCalendar calendar = new GregorianCalendar();
        Date now = calendar.getTime();
        calendar.add(Calendar.MINUTE, 15);
        Date maximumDate = calendar.getTime();
        calendar.add(Calendar.MINUTE, -30);
        Date minimumDate = calendar.getTime();

        if (timestamp.before(minimumDate)) {
            throw new RequestTimeTooSkewedException(
                    "Timestamp [" + timestamp + "] too old. System time: " + now);
        }

        if (timestamp.after(maximumDate)) {
            throw new RequestTimeTooSkewedException(
                    "Timestamp [" + timestamp + "] too new. System time: " + now);
        }
    }

    // authenticate request
    String[] fields = authorization.split(" ");

    if (fields.length != 2) {
        throw new InvalidSecurityException("Unsupported authorization format");
    }

    if (!fields[0].equals(AUTHORIZATION_TYPE)) {
        throw new InvalidSecurityException("Unsupported authorization type: " + fields[0]);
    }

    String[] keys = fields[1].split(":");

    if (keys.length != 2) {
        throw new InvalidSecurityException("Invalid AWSAccesskeyId:Signature");
    }

    String accessKeyId = keys[0];
    String signature = keys[1];
    String secretAccessKey = userDirectory.getAwsSecretAccessKey(accessKeyId);
    String calculatedSignature;

    try {
        SecretKey key = new SecretKeySpec(secretAccessKey.getBytes(), "HmacSHA1");
        Mac m = Mac.getInstance("HmacSHA1");
        m.init(key);
        m.update(s3Request.getStringToSign().getBytes());
        byte[] mac = m.doFinal();
        calculatedSignature = new String(Base64.encodeBase64(mac));
    } catch (NoSuchAlgorithmException e) {
        throw new InvalidSecurityException(e);
    } catch (InvalidKeyException e) {
        throw new InvalidSecurityException(e);
    }

    System.out.println("-----------------");
    System.out.println("signature: " + signature);
    System.out.println("calculatedSignature: " + calculatedSignature);
    System.out.println("-----------------");

    if (calculatedSignature.equals(signature)) {
        // authenticated!
        return userDirectory.getCanonicalUser(secretAccessKey);
    } else {
        throw new SignatureDoesNotMatchException("Provided signature doesn't match calculated value");
    }
}

From source file:jp.co.ctc_g.jfw.core.util.Dates.java

/**
 * 2?1???????????./*from  ww w. ja  v a2  s.c  o  m*/
 * @param fromDate1 
 * @param toDate1 
 * @param fromDate2 2
 * @param toDate2 2
 * @return true:?????false:??????
 */
public static boolean isDateInclude(Date fromDate1, Date toDate1, Date fromDate2, Date toDate2) {

    fromDate1 = truncate(fromDate1, Dates.DAY);
    toDate1 = truncate(toDate1, Dates.DAY);
    fromDate2 = truncate(fromDate2, Dates.DAY);
    toDate2 = truncate(toDate2, Dates.DAY);
    if ((fromDate1.before(fromDate2) || fromDate1.equals(fromDate2)) && // 11??2???
            (toDate1.after(fromDate2) || toDate1.equals(fromDate2))
            && (fromDate1.before(toDate2) || fromDate1.equals(toDate2)) && // ???11??2???
            (toDate1.after(toDate2) || toDate1.equals(toDate2)))
        return true;
    return false;
}

From source file:ob.PO.TaskPO.java

@Override
public void run() {
    setTaskLog("");
    if (thistype == 0) {//
        Date tasktime;//w  w w. ja v a2s  . c o  m
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss"); //????
        try {
            tasktime = sdf.parse(thisstr1 + thisstr2);
            //if(cl.)
        } catch (ParseException ex) {
            setTaskLog("");
            return;
        }
        Date nowtime = new Date();
        if (nowtime.after(tasktime)) {
            setTaskLog("??");
            return;
        }
        while (nowtime.before(tasktime)) {
            long deltaminute = (tasktime.getTime() - nowtime.getTime()) / (1000 * 60); //
            long day = deltaminute / (24 * 60);
            long hour = (deltaminute / 60) % 24;
            long minute = deltaminute % 60;
            setTaskLog("\n" + day + "" + hour + "" + minute
                    + "");
            try {
                Thread.sleep(5000);
            } catch (InterruptedException ex) {
                setTaskLog("");
                return;
            }
            nowtime = new Date();
        }
        if (nowtime.getTime() - tasktime.getTime() > 60 * 1000) {//?
            setTaskLog("?");
            return;
        }
        setTaskLog("");
    } else if (thistype == 1) {//
        int lastmessage_num = -1;
        POP3SClient pop3 = new POP3SClient(true);
        POP3MessageInfo[] messages;
        pop3.setDefaultTimeout(600000);
        while (true) {
            try {
                pop3.connect("pop." + thisstr1.split("@")[1]);//pop. + domain
                if (!pop3.login(thisstr1, AESUtil.Decryptor(thisstr2))) {
                    pop3.disconnect();
                    setTaskLog("?.");
                    return;
                }
                messages = pop3.listMessages();
                if (lastmessage_num < 0) {//?
                    lastmessage_num = messages.length;//?
                    setTaskLog("?" + lastmessage_num);
                } else if (lastmessage_num < messages.length) {//
                    break;
                }
                setTaskLog("?" + messages.length);
                Thread.sleep(10000);
            } catch (Exception ex) {
                setTaskLog("");
                return;
            }
        }
        setTaskLog("");
    } else if (thistype == 2) {//?
        setTaskLog("??");
        while (true) {
            try {
                String Access_token;
                if ((Access_token = Sina.getToken(thisstr1, AESUtil.Decryptor(thisstr2))
                        .getAccessToken()) == null) {
                    setTaskLog("???");
                    return;
                }
                String lasttimeline = GetUserTimeline.getTimeline(Access_token);
                if (lasttimeline == null) {
                } else {
                    if (lasttimeline.contains(thistext)) {//?
                        break;
                    }
                }
                setTaskLog("??");
                Thread.sleep(10000);
            } catch (Exception ex) {
                ex.printStackTrace();
                setTaskLog("??");
                return;
            }
        }
        setTaskLog("???");
    }
    if (thattype == 1) {//?
        setTaskLog("??");
        SimpleEmail email = new SimpleEmail();
        Properties props = new Properties();
        String user, pass;
        try {
            props.load(
                    Thread.currentThread().getContextClassLoader().getResourceAsStream("adminmail.properties"));
            if ((user = (String) props.get("user")) == null || (pass = (String) props.get("pass")) == null) {
                setTaskLog("?");
                return;
            }
            email.setHostName("smtp." + user.split("@")[1]);//?  smtp. + domain
            email.setAuthentication(user, pass);//smtp????  
            email.setSSLOnConnect(true);
            email.addTo(thatusername, "JAVA IFTTT RECEIVER");//  
            email.setFrom(user, "JAVA IFTTT SENDER");//?  
            email.setSubject("JAVA IFTTT SEND EMAIL");//  
            email.setCharset("UTF-8");//??  
            email.setMsg(thattext);//  
            email.send();//??  
        } catch (Exception ex) {
            setTaskLog("??");
            return;
        }
        setTaskLog("????");
    } else if (thattype == 0) {//??
        setTaskLog("???");
        try {
            String Access_token;
            if ((Access_token = Sina.getToken(thatusername, AESUtil.Decryptor(thatpassword))
                    .getAccessToken()) == null) {
                setTaskLog("???");
                return;
            }
            UpdateStatus.Update(Access_token, thattext);
        } catch (Exception ex) {
            ex.printStackTrace();
            setTaskLog("???");
            return;
        }
        setTaskLog("?????");
    }
}