Example usage for java.lang IllegalArgumentException toString

List of usage examples for java.lang IllegalArgumentException toString

Introduction

In this page you can find the example usage for java.lang IllegalArgumentException toString.

Prototype

public String toString() 

Source Link

Document

Returns a short description of this throwable.

Usage

From source file:org.ecocean.media.MediaAsset.java

public void copyInBase64(String b64) throws IOException {
    if (b64 == null)
        throw new IOException("copyInBase64() null string");
    byte[] imgBytes = new byte[100];
    try {//from   ww w.  j  av  a2 s  .com
        imgBytes = DatatypeConverter.parseBase64Binary(b64);
    } catch (IllegalArgumentException ex) {
        throw new IOException("copyInBase64() could not parse: " + ex.toString());
    }
    File file = (this.localPath() != null) ? this.localPath().toFile()
            : File.createTempFile("b64-" + Util.generateUUID(), ".tmp");
    FileOutputStream stream = new FileOutputStream(file);
    try {
        stream.write(imgBytes);
    } finally {
        stream.close();
    }
    if (file.exists()) {
        this.copyIn(file);
    } else {
        throw new IOException("copyInBase64() could not write " + file);
    }
}

From source file:mx.edu.ittepic.AEEcommerce.ejbs.OperationCommerce.java

public String getUsers() {

    List<Users> users;// ww w.  j a  v  a2 s  . co m
    Message msg = new Message();
    GsonBuilder builder = new GsonBuilder();
    Gson gson = builder.create();
    try {
        Query q = entity.createNamedQuery("Users.findAll");

        users = q.getResultList();
        msg.setCode(200);
        msg.setMsg(gson.toJson(users));
        msg.setDetail("OK");
    } catch (IllegalArgumentException e) {
        msg.setCode(422);
        msg.setMsg("Error de entidad, el usuario no es una entidad.");
        msg.setDetail(e.toString());
    } catch (IllegalStateException e) {
        msg.setCode(422);
        msg.setMsg("Error de entidad, el usuario no es una entidad o ha sido removido.");
        msg.setDetail(e.toString());
    } catch (QueryTimeoutException e) {
        msg.setCode(509);
        msg.setMsg("La operacin tardo demasiado, por favor vuelve a intentarlo.");
        msg.setDetail(e.toString());
    } catch (TransactionRequiredException e) {
        msg.setCode(509);
        msg.setMsg("La operacin tardo demasiado, por favor vuelve a intentarlo.");
        msg.setDetail(e.toString());
    } catch (PessimisticLockException e) {
        msg.setCode(400);
        msg.setMsg("Error, operacin bloqueada (Pesimistic), no se realizo la transaccin.");
        msg.setDetail(e.toString());
    } catch (LockTimeoutException e) {
        msg.setCode(400);
        msg.setMsg("Error, operacin bloqueada (Lock), no se realizo la transaccin.");
        msg.setDetail(e.toString());
    } catch (PersistenceException e) {
        msg.setCode(400);
        msg.setMsg("Error, operacin bloqueada (Persistence), no se realizo la transaccin.");
        msg.setDetail(e.toString());
    }
    return gson.toJson(msg);
}

From source file:net.longfalcon.newsj.Releases.java

public void processReleases() {
    String startDateString = DateUtil.displayDateFormatter.print(System.currentTimeMillis());
    _log.info(String.format("Starting release update process (%s)", startDateString));

    // get site config TODO: use config service
    Site site = siteDAO.getDefaultSite();

    int retcount = 0;

    Directory nzbBaseDir = fileSystemService.getDirectory("/nzbs");

    checkRegexesUptoDate(site.getLatestRegexUrl(), site.getLatestRegexRevision());

    // Stage 0//  w  w w . j a  v  a 2  s.  co m

    // this is a hack - tx is not working ATM
    TransactionStatus transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));

    //
    // Get all regexes for all groups which are to be applied to new binaries
    // in order of how they should be applied
    //
    List<ReleaseRegex> releaseRegexList = releaseRegexDAO.getRegexes(true, "-1", false);
    for (ReleaseRegex releaseRegex : releaseRegexList) {

        String releaseRegexGroupName = releaseRegex.getGroupName();
        _log.info(String.format("Applying regex %d for group %s", releaseRegex.getId(),
                ValidatorUtil.isNull(releaseRegexGroupName) ? "all" : releaseRegexGroupName));

        // compile the regex early, to test them
        String regex = releaseRegex.getRegex();
        Pattern pattern = Pattern.compile(fixRegex(regex), Pattern.CASE_INSENSITIVE); // remove '/' and '/i'

        HashSet<Long> groupMatch = new LinkedHashSet<>();

        //
        // Groups ending in * need to be like matched when getting out binaries for groups and children
        //
        Matcher matcher = _wildcardPattern.matcher(releaseRegexGroupName);
        if (matcher.matches()) {
            releaseRegexGroupName = releaseRegexGroupName.substring(0, releaseRegexGroupName.length() - 1);
            List<Group> groups = groupDAO.findGroupsByName(releaseRegexGroupName);
            for (Group group : groups) {
                groupMatch.add(group.getId());
            }
        } else if (!ValidatorUtil.isNull(releaseRegexGroupName)) {
            Group group = groupDAO.getGroupByName(releaseRegexGroupName);
            if (group != null) {
                groupMatch.add(group.getId());
            }
        }

        List<Binary> binaries = new ArrayList<>();
        if (groupMatch.size() > 0) {
            // Get out all binaries of STAGE0 for current group
            binaries = binaryDAO.findByGroupIdsAndProcStat(groupMatch, Defaults.PROCSTAT_NEW);
        }

        Map<String, String> arrNoPartBinaries = new LinkedHashMap<>();
        DateTime fiveHoursAgo = DateTime.now().minusHours(5);

        // this for loop should probably be a single transaction
        for (Binary binary : binaries) {
            String testMessage = "Test run - Binary Name " + binary.getName();

            Matcher groupRegexMatcher = pattern.matcher(binary.getName());
            if (groupRegexMatcher.find()) {
                String reqIdGroup = null;
                try {
                    reqIdGroup = groupRegexMatcher.group("reqid");
                } catch (IllegalArgumentException e) {
                    _log.debug(e.toString());
                }
                String partsGroup = null;
                try {
                    partsGroup = groupRegexMatcher.group("parts");
                } catch (IllegalArgumentException e) {
                    _log.debug(e.toString());
                }
                String nameGroup = null;
                try {
                    nameGroup = groupRegexMatcher.group("name");
                } catch (Exception e) {
                    _log.debug(e.toString());
                }
                _log.debug(testMessage + " matches with: \n reqId = " + reqIdGroup + " parts = " + partsGroup
                        + " and name = " + nameGroup);

                if ((ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup))
                        && ValidatorUtil.isNull(nameGroup)) {
                    nameGroup = reqIdGroup;
                }

                if (ValidatorUtil.isNull(nameGroup)) {
                    _log.warn(String.format(
                            "regex applied which didnt return right number of capture groups - %s", regex));
                    _log.warn(String.format("regex matched: reqId = %s parts = %s and name = %s", reqIdGroup,
                            partsGroup, nameGroup));
                    continue;
                }

                // If theres no number of files data in the subject, put it into a release if it was posted to usenet longer than five hours ago.
                if ((ValidatorUtil.isNull(partsGroup) && fiveHoursAgo.isAfter(binary.getDate().getTime()))) {
                    //
                    // Take a copy of the name of this no-part release found. This can be used
                    // next time round the loop to find parts of this set, but which have not yet reached 3 hours.
                    //
                    arrNoPartBinaries.put(nameGroup, "1");
                    partsGroup = "01/01";
                }

                if (ValidatorUtil.isNotNull(nameGroup) && ValidatorUtil.isNotNull(partsGroup)) {

                    if (partsGroup.indexOf('/') == -1) {
                        partsGroup = partsGroup.replaceFirst("(-)|(~)|(\\sof\\s)", "/"); // replace weird parts delimiters
                    }

                    Integer regexCategoryId = releaseRegex.getCategoryId();
                    Integer reqId = null;
                    if (ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup)) {
                        reqId = Integer.parseInt(reqIdGroup);
                    }

                    //check if post is repost
                    Pattern repostPattern = Pattern.compile("(repost\\d?|re\\-?up)", Pattern.CASE_INSENSITIVE);
                    Matcher binaryNameRepostMatcher = repostPattern.matcher(binary.getName());

                    if (binaryNameRepostMatcher.find()
                            && !nameGroup.toLowerCase().matches("^[\\s\\S]+(repost\\d?|re\\-?up)")) {
                        nameGroup = nameGroup + (" " + binaryNameRepostMatcher.group(1));
                    }

                    String partsStrings[] = partsGroup.split("/");
                    int relpart = Integer.parseInt(partsStrings[0]);
                    int relTotalPart = Integer.parseInt(partsStrings[1]);

                    binary.setRelName(nameGroup.replace("_", " "));
                    binary.setRelPart(relpart);
                    binary.setRelTotalPart(relTotalPart);
                    binary.setProcStat(Defaults.PROCSTAT_TITLEMATCHED);
                    binary.setCategoryId(regexCategoryId);
                    binary.setRegexId(releaseRegex.getId());
                    binary.setReqId(reqId);
                    binaryDAO.updateBinary(binary);

                }
            }
        }

    }

    transactionManager.commit(transaction);

    // this is a hack - tx is not working ATM
    transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
    //
    // Move all binaries from releases which have the correct number of files on to the next stage.
    //
    _log.info("Stage 2");
    List<MatchedReleaseQuery> matchedReleaseQueries = binaryDAO
            .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_TITLEMATCHED);
    matchedReleaseQueries = combineMatchedQueries(matchedReleaseQueries);

    int siteMinFilestoFormRelease = site.getMinFilesToFormRelease();

    for (MatchedReleaseQuery matchedReleaseQuery : matchedReleaseQueries) {
        retcount++;

        //
        // Less than the site permitted number of files in a release. Dont discard it, as it may
        // be part of a set being uploaded.
        //
        int minFiles = siteMinFilestoFormRelease;
        String releaseName = matchedReleaseQuery.getReleaseName();
        long matchedReleaseQueryGroup = matchedReleaseQuery.getGroup();
        Long matchedReleaseQueryNumberOfBinaries = matchedReleaseQuery.getNumberOfBinaries();
        int matchecReleaseTotalParts = matchedReleaseQuery.getReleaseTotalParts();
        String fromName = matchedReleaseQuery.getFromName();
        Integer reqId = matchedReleaseQuery.getReqId();

        Group group = groupDAO.findGroupByGroupId(matchedReleaseQueryGroup);
        if (group != null && group.getMinFilesToFormRelease() != null) {
            minFiles = group.getMinFilesToFormRelease();
        }

        if (matchedReleaseQueryNumberOfBinaries < minFiles) {

            _log.warn(String.format("Number of files in release %s less than site/group setting (%s/%s)",
                    releaseName, matchedReleaseQueryNumberOfBinaries, minFiles));

            binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                    matchedReleaseQueryGroup, fromName);
        } else if (matchedReleaseQueryNumberOfBinaries >= matchecReleaseTotalParts) {
            // Check that the binary is complete
            List<Binary> releaseBinaryList = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName(
                    releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);

            boolean incomplete = false;
            for (Binary binary : releaseBinaryList) {
                long partsCount = partDAO.countPartsByBinaryId(binary.getId());
                if (partsCount < binary.getTotalParts()) {
                    float percentComplete = ((float) partsCount / (float) binary.getTotalParts()) * 100;
                    _log.warn(String.format("binary %s from %s has missing parts = %s/%s (%s%% complete)",
                            binary.getId(), releaseName, partsCount, binary.getTotalParts(), percentComplete));

                    // Allow to binary to release if posted to usenet longer than four hours ago and we still don't have all the parts
                    DateTime fourHoursAgo = DateTime.now().minusHours(4);
                    if (fourHoursAgo.isAfter(new DateTime(binary.getDate()))) {
                        _log.info("allowing incomplete binary " + binary.getId());
                    } else {
                        incomplete = true;
                    }
                }
            }

            if (incomplete) {
                _log.warn(String.format("Incorrect number of parts %s-%s-%s", releaseName,
                        matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts));
                binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                        matchedReleaseQueryGroup, fromName);
            }

            //
            // Right number of files, but see if the binary is a allfilled/reqid post, in which case it needs its name looked up
            // TODO: Does this even work anymore?
            else if (ValidatorUtil.isNotNull(site.getReqIdUrl()) && ValidatorUtil.isNotNull(reqId)) {

                //
                // Try and get the name using the group
                //
                _log.info("Looking up " + reqId + " in " + group.getName() + "...");
                String newTitle = getReleaseNameForReqId(site.getReqIdUrl(), group, reqId, true);

                //
                // if the feed/group wasnt supported by the scraper, then just use the release name as the title.
                //
                if (ValidatorUtil.isNull(newTitle) || newTitle.equals("no feed")) {
                    newTitle = releaseName;
                    _log.warn("Group not supported");
                }

                //
                // Valid release with right number of files and title now, so move it on
                //
                if (ValidatorUtil.isNotNull(newTitle)) {
                    binaryDAO.updateBinaryNameAndStatus(newTitle, Defaults.PROCSTAT_READYTORELEASE, releaseName,
                            Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                } else {
                    //
                    // Item not found, if the binary was added to the index yages ago, then give up.
                    //
                    Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName(
                            releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                    DateTime maxAddedDate = new DateTime(timestamp);
                    DateTime twoDaysAgo = DateTime.now().minusDays(2);

                    if (maxAddedDate.isBefore(twoDaysAgo)) {
                        binaryDAO.updateBinaryNameAndStatus(releaseName,
                                Defaults.PROCSTAT_NOREQIDNAMELOOKUPFOUND, releaseName,
                                Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                        _log.warn("Not found in 48 hours");
                    }
                }
            } else {
                binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_READYTORELEASE, releaseName,
                        Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);

            }
        } else {
            //
            // Theres less than the expected number of files, so update the attempts and move on.
            //

            _log.info(String.format("Incorrect number of files for %s (%d/%d)", releaseName,
                    matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts));
            binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                    matchedReleaseQueryGroup, fromName);
        }

        if (retcount % 10 == 0) {
            _log.info(String.format("-processed %d binaries stage two", retcount));
        }

    }
    transactionManager.commit(transaction);

    retcount = 0;
    int nfoCount = 0;

    // this is a hack - tx is not working ATM
    transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
    //
    // Get out all distinct relname, group from binaries of STAGE2
    //
    _log.info("Stage 3");
    List<MatchedReleaseQuery> readyReleaseQueries = binaryDAO
            .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_READYTORELEASE);
    readyReleaseQueries = combineMatchedQueries(readyReleaseQueries);
    for (MatchedReleaseQuery readyReleaseQuery : readyReleaseQueries) {
        retcount++;

        String releaseName = readyReleaseQuery.getReleaseName();
        int numParts = readyReleaseQuery.getReleaseTotalParts();
        long binaryCount = readyReleaseQuery.getNumberOfBinaries();
        long groupId = readyReleaseQuery.getGroup();
        //
        // Get the last post date and the poster name from the binary
        //
        String fromName = readyReleaseQuery.getFromName();
        Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName(releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);
        DateTime addedDate = new DateTime(timestamp);

        //
        // Get all releases with the same name with a usenet posted date in a +1-1 date range.
        //
        Date oneDayBefore = addedDate.minusDays(1).toDate();
        Date oneDayAfter = addedDate.plusDays(1).toDate();
        List<Release> relDupes = releaseDAO.findReleasesByNameAndDateRange(releaseName, oneDayBefore,
                oneDayAfter);

        if (!relDupes.isEmpty()) {
            binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_DUPLICATE, releaseName,
                    Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);
            continue;
        }

        //
        // Get total size of this release
        // Done in a big OR statement, not an IN as the mysql binaryID index on parts table
        // was not being used.
        //

        // SM: TODO this should be revisited, using hb mappings

        long totalSize = 0;
        int regexAppliedCategoryId = 0;
        long regexIdUsed = 0;
        int reqIdUsed = 0;
        int relTotalParts = 0;
        float relCompletion;
        List<Binary> binariesForSize = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName(releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);

        long relParts = 0;
        for (Binary binary : binariesForSize) {
            if (ValidatorUtil.isNotNull(binary.getCategoryId()) && regexAppliedCategoryId == 0) {
                regexAppliedCategoryId = binary.getCategoryId();
            }

            if (ValidatorUtil.isNotNull(binary.getRegexId()) && regexIdUsed == 0) {
                regexIdUsed = binary.getRegexId();
            }

            if (ValidatorUtil.isNotNull(binary.getReqId()) && reqIdUsed == 0) {
                reqIdUsed = binary.getReqId();
            }

            relTotalParts += binary.getTotalParts();
            relParts += partDAO.countPartsByBinaryId(binary.getId());
            totalSize += partDAO.sumPartsSizeByBinaryId(binary.getId());
        }
        relCompletion = ((float) relParts / (float) relTotalParts) * 100f;

        //
        // Insert the release
        //

        String releaseGuid = UUID.randomUUID().toString();
        int categoryId;
        Category category = null;
        Long regexId;
        Integer reqId;
        if (regexAppliedCategoryId == 0) {
            categoryId = categoryService.determineCategory(groupId, releaseName);
        } else {
            categoryId = regexAppliedCategoryId;
        }
        if (categoryId > 0) {
            category = categoryService.getCategory(categoryId);
        }

        if (regexIdUsed == 0) {
            regexId = null;
        } else {
            regexId = regexIdUsed;
        }

        if (reqIdUsed == 0) {
            reqId = null;
        } else {
            reqId = reqIdUsed;
        }

        //Clean release name of '#', '@', '$', '%', '^', '', '', '', ''
        String cleanReleaseName = releaseName.replaceAll("[^A-Za-z0-9-_\\ \\.]+", "");
        Release release = new Release();
        release.setName(cleanReleaseName);
        release.setSearchName(cleanReleaseName);
        release.setTotalpart(numParts);
        release.setGroupId(groupId);
        release.setAddDate(new Date());
        release.setGuid(releaseGuid);
        release.setCategory(category);
        release.setRegexId(regexId);
        release.setRageId((long) -1);
        release.setPostDate(addedDate.toDate());
        release.setFromName(fromName);
        release.setSize(totalSize);
        release.setReqId(reqId);
        release.setPasswordStatus(site.getCheckPasswordedRar() == 1 ? -1 : 0); // magic constants
        release.setCompletion(relCompletion);
        releaseDAO.updateRelease(release);
        long releaseId = release.getId();
        _log.info("Added release " + cleanReleaseName);

        //
        // Tag every binary for this release with its parent release id
        // remove the release name from the binary as its no longer required
        //
        binaryDAO.updateBinaryNameStatusReleaseID("", Defaults.PROCSTAT_RELEASED, releaseId, releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);

        //
        // Find an .nfo in the release
        //
        ReleaseNfo releaseNfo = nfo.determineReleaseNfo(release);
        if (releaseNfo != null) {
            nfo.addReleaseNfo(releaseNfo);
            nfoCount++;
        }

        //
        // Write the nzb to disk
        //
        nzb.writeNZBforReleaseId(release, nzbBaseDir, true);

        if (retcount % 5 == 0) {
            _log.info("-processed " + retcount + " releases stage three");
        }

    }

    _log.info("Found " + nfoCount + " nfos in " + retcount + " releases");

    //
    // Process nfo files
    //
    if (site.getLookupNfo() != 1) {
        _log.info("Site config (site.lookupnfo) prevented retrieving nfos");
    } else {
        nfo.processNfoFiles(site.getLookupImdb(), site.getLookupTvRage());
    }

    //
    // Lookup imdb if enabled
    //
    if (site.getLookupImdb() == 1) {
        movieService.processMovieReleases();
    }

    //
    // Lookup music if enabled
    //
    if (site.getLookupMusic() == 1) {
        musicService.processMusicReleases();
    }

    //
    // Lookup games if enabled
    //
    if (site.getLookupGames() == 1) {
        gameService.processConsoleReleases();
    }

    //
    // Check for passworded releases
    //
    if (site.getCheckPasswordedRar() != 1) {
        _log.info("Site config (site.checkpasswordedrar) prevented checking releases are passworded");
    } else {
        processPasswordedReleases(true);
    }

    //
    // Process all TV related releases which will assign their series/episode/rage data
    //
    tvRageService.processTvReleases(site.getLookupTvRage() == 1);

    //
    // Get the current datetime again, as using now() in the housekeeping queries prevents the index being used.
    //
    DateTime now = new DateTime();

    //
    // Tidy away any binaries which have been attempted to be grouped into
    // a release more than x times (SM: or is it days?)
    //
    int attemtpGroupBinDays = site.getAttemtpGroupBinDays();
    _log.info(String.format("Tidying away binaries which cant be grouped after %s days", attemtpGroupBinDays));

    DateTime maxGroupBinDays = now.minusDays(attemtpGroupBinDays);
    binaryDAO.updateProcStatByProcStatAndDate(Defaults.PROCSTAT_WRONGPARTS, Defaults.PROCSTAT_NEW,
            maxGroupBinDays.toDate());

    //
    // Delete any parts and binaries which are older than the site's retention days
    //
    int maxRetentionDays = site.getRawRetentionDays();
    DateTime maxRetentionDate = now.minusDays(maxRetentionDays);
    _log.info(String.format("Deleting parts which are older than %d days", maxRetentionDays));
    partDAO.deletePartByDate(maxRetentionDate.toDate());

    _log.info(String.format("Deleting binaries which are older than %d days", maxRetentionDays));
    binaryDAO.deleteBinaryByDate(maxRetentionDate.toDate());

    //
    // Delete any releases which are older than site's release retention days
    //
    int releaseretentiondays = site.getReleaseRetentionDays();
    if (releaseretentiondays != 0) {
        _log.info("Determining any releases past retention to be deleted.");

        DateTime maxReleaseRetentionDate = DateTime.now().minusDays(releaseretentiondays);
        List<Release> releasesToDelete = releaseDAO.findReleasesBeforeDate(maxReleaseRetentionDate.toDate());
        for (Iterator<Release> iterator = releasesToDelete.iterator(); iterator.hasNext();) {
            Release release = iterator.next();
            releaseDAO.deleteRelease(release);
        }
    }
    transaction.flush(); // may be unneeded
    transactionManager.commit(transaction);

    _log.info(String.format("Processed %d releases", retcount));
    if (!transaction.isCompleted()) {
        throw new IllegalStateException("Transaction is not completed or rolled back.");
    }
    //return retcount;
}

From source file:com.ifeng.util.download.DownloadThread.java

/**
 * Send the request to the server, handling any I/O exceptions.
 * /*w  w  w  . ja  va  2s  .  c om*/
 * @param state
 *            state
 * @param client
 *            client
 * @param request
 *            request
 * @return sendRequest
 * @throws StopRequest
 *             StopRequest
 */
private HttpResponse sendRequest(State state, HttpClient client, HttpGet request) throws StopRequest {
    try {
        return client.execute(request);
    } catch (IllegalArgumentException ex) {
        throw new StopRequest(Downloads.Impl.STATUS_HTTP_DATA_ERROR,
                "while trying to execute request: " + ex.toString(), ex);
    } catch (IOException ex) {
        logNetworkState();
        throw new StopRequest(getFinalStatusForHttpError(state),
                "while trying to execute request: " + ex.toString(), ex);
    }
}

From source file:at.alladin.rmbt.controlServer.QualityOfServiceResultResource.java

@Post("json")
public String request(final String entity) {
    final String secret = getContext().getParameters().getFirstValue("RMBT_SECRETKEY");

    addAllowOrigin();/* w  w w.  jav  a 2 s. com*/

    JSONObject request = null;

    final ErrorList errorList = new ErrorList();
    final JSONObject answer = new JSONObject();

    System.out.println(MessageFormat.format(labels.getString("NEW_QOS_RESULT"), getIP()));

    if (entity != null && !entity.isEmpty())
        // try parse the string to a JSON object
        try {
            request = new JSONObject(entity);

            final String lang = request.optString("client_language");

            // Load Language Files for Client

            final List<String> langs = Arrays
                    .asList(settings.getString("RMBT_SUPPORTED_LANGUAGES").split(",\\s*"));

            if (langs.contains(lang)) {
                errorList.setLanguage(lang);
                labels = ResourceManager.getSysMsgBundle(new Locale(lang));
            }

            //                System.out.println(request.toString(4));

            if (conn != null) {
                ResultOptions resultOptions = new ResultOptions(new Locale(lang));

                conn.setAutoCommit(false);

                final Test test = new Test(conn);

                if (request.optString("test_token").length() > 0) {

                    final String[] token = request.getString("test_token").split("_");

                    try {
                        // Check if UUID
                        final UUID testUuid = UUID.fromString(token[0]);

                        final String data = token[0] + "_" + token[1];

                        final String hmac = Helperfunctions.calculateHMAC(secret, data);
                        if (hmac.length() == 0)
                            errorList.addError("ERROR_TEST_TOKEN");

                        if (token[2].length() > 0 && hmac.equals(token[2])) {

                            final List<String> clientNames = Arrays
                                    .asList(settings.getString("RMBT_CLIENT_NAME").split(",\\s*"));
                            final List<String> clientVersions = Arrays
                                    .asList(settings.getString("RMBT_VERSION_NUMBER").split(",\\s*"));

                            if (test.getTestByUuid(testUuid) > 0)
                                if (clientNames.contains(request.optString("client_name"))
                                        && clientVersions.contains(request.optString("client_version"))) {
                                    //save qos test results:
                                    JSONArray qosResult = request.optJSONArray("qos_result");
                                    if (qosResult != null) {
                                        QoSTestResultDao resultDao = new QoSTestResultDao(conn);

                                        Set<String> excludeTestTypeKeys = new TreeSet<>();
                                        excludeTestTypeKeys.add("test_type");
                                        excludeTestTypeKeys.add("qos_test_uid");

                                        for (int i = 0; i < qosResult.length(); i++) {
                                            JSONObject testObject = qosResult.optJSONObject(i);
                                            //String hstore = Helperfunctions.json2hstore(testObject, excludeTestTypeKeys);
                                            JSONObject resultJson = new JSONObject(testObject,
                                                    JSONObject.getNames(testObject));
                                            for (String excludeKey : excludeTestTypeKeys) {
                                                resultJson.remove(excludeKey);
                                            }
                                            QoSTestResult testResult = new QoSTestResult();
                                            //testResult.setResults(hstore);
                                            testResult.setResults(resultJson.toString());
                                            testResult.setTestType(testObject.getString("test_type"));
                                            testResult.setTestUid(test.getUid());
                                            long qosTestId = testObject.optLong("qos_test_uid", Long.MIN_VALUE);
                                            testResult.setQoSTestObjectiveId(qosTestId);
                                            resultDao.save(testResult);
                                        }
                                    }

                                    QoSTestResultDao resultDao = new QoSTestResultDao(conn);
                                    PreparedStatement updateCounterPs = resultDao
                                            .getUpdateCounterPreparedStatement();
                                    List<QoSTestResult> testResultList = resultDao.getByTestUid(test.getUid());
                                    //map that contains all test types and their result descriptions determined by the test result <-> test objectives comparison
                                    Map<TestType, TreeSet<ResultDesc>> resultKeys = new HashMap<>();

                                    //test description set:
                                    Set<String> testDescSet = new TreeSet<>();
                                    //test summary set:
                                    Set<String> testSummarySet = new TreeSet<>();

                                    //iterate through all result entries
                                    for (QoSTestResult testResult : testResultList) {

                                        //reset test counters
                                        testResult.setFailureCounter(0);
                                        testResult.setSuccessCounter(0);

                                        //get the correct class of the result;
                                        TestType testType = TestType
                                                .valueOf(testResult.getTestType().toUpperCase());
                                        Class<? extends AbstractResult<?>> clazz = testType.getClazz();
                                        //parse hstore data
                                        final JSONObject resultJson = new JSONObject(testResult.getResults());
                                        AbstractResult<?> result = QoSUtil.HSTORE_PARSER.fromJSON(resultJson,
                                                clazz);
                                        result.setResultJson(resultJson);

                                        if (result != null) {
                                            //add each test description key to the testDescSet (to fetch it later from the db)
                                            if (testResult.getTestDescription() != null) {
                                                testDescSet.add(testResult.getTestDescription());
                                            }
                                            if (testResult.getTestSummary() != null) {
                                                testSummarySet.add(testResult.getTestSummary());
                                            }
                                            testResult.setResult(result);

                                        }
                                        //compare test results with expected results 
                                        QoSUtil.compareTestResults(testResult, result, resultKeys, testType,
                                                resultOptions);
                                        //resultList.put(testResult.toJson());

                                        //update all test results after the success and failure counters have been set
                                        resultDao.updateCounter(testResult, updateCounterPs);
                                        //System.out.println("UPDATING: " + testResult.toString());
                                    }
                                } else
                                    errorList.addError("ERROR_CLIENT_VERSION");
                        } else
                            errorList.addError("ERROR_TEST_TOKEN_MALFORMED");
                    } catch (final IllegalArgumentException e) {
                        e.printStackTrace();
                        errorList.addError("ERROR_TEST_TOKEN_MALFORMED");
                    } catch (HstoreParseException e) {
                        e.printStackTrace();
                        errorList.addError("ERROR_DB_CONNECTION");
                    } catch (IllegalAccessException e) {
                        e.printStackTrace();
                        errorList.addError("ERROR_TEST_TOKEN_MALFORMED");
                    }

                } else
                    errorList.addError("ERROR_TEST_TOKEN_MISSING");

                conn.commit();
            } else
                errorList.addError("ERROR_DB_CONNECTION");

        } catch (final JSONException e) {
            errorList.addError("ERROR_REQUEST_JSON");
            //System.out.println("Error parsing JSDON Data " + e.toString());
            e.printStackTrace();
        } catch (final SQLException e) {
            //System.out.println("Error while storing data " + e.toString());
            e.printStackTrace();
        }
    else
        errorList.addErrorString("Expected request is missing.");

    try {
        answer.putOpt("error", errorList.getList());
    } catch (final JSONException e) {
        System.out.println("Error saving ErrorList: " + e.toString());
    }

    return answer.toString();
}

From source file:net.kidlogger.kidlogger.KLService.java

@Override
public void onDestroy() {
    super.onDestroy();
    try {/*from w ww .  ja  va  2s  . c o m*/
        if (uploadOn && mAlarmManager != null) {
            mAlarmManager.cancel(mPI);
        }
        if (taskOn) {
            handleTask.removeCallbacks(taskScan);
        }
        if (clipOn) {
            handleClipb.removeCallbacks(clipboardScan);
        }
        if (delayNewCallEvent != null) {
            delayNewCallEvent.cancel();
        }
        if (gpsOn) {
            locMngr.removeUpdates(locListener);
        }
        if (wifiOn && wifiReceiver != null) {
            unregisterReceiver(wifiReceiver);
        }
        if (smsOn && smsObserver != null && smsObserver.inSms != null) {
            unregisterReceiver(smsObserver.inSms);
            smsObserver.unregisterObserver();
        }
        if (callOn && callsReceiver != null) {
            unregisterReceiver(callsReceiver);
        }
        if (idleOn && idleReceiver != null) {
            unregisterReceiver(idleReceiver);
        }
        if (urlOn && urlObserver != null) {
            urlObserver.unregisterObserver();
        }
        if (usbOn && usbReceiver != null) {
            unregisterReceiver(usbReceiver);
        }
        if (powerOn && powerReceiver != null) {
            unregisterReceiver(powerReceiver);
        }
        if (mediaOn && mediaReceiver != null) {
            unregisterReceiver(mediaReceiver);
        }
        if (gsmOn && telManager != null && gsmObserver != null) {
            telManager.listen(gsmObserver, PhoneStateListener.LISTEN_NONE);
        }
        if (airOn && airReceiver != null) {
            unregisterReceiver(airReceiver);
        }
        if (photoOn && photoObserver != null) {
            photoObserver.unregisterObserver();
        }

        if (mConReceiver != null)
            unregisterReceiver(mConReceiver);
    } catch (IllegalArgumentException e) {
        app.logError(CN + "onDestroy", e.toString());
    }

    saveToPref("fileName", file);
    saveToPref("uploadedSize", uploadedSize);

    // Log stop service
    logServiceState(false);
    serviceStarted = false;
    app.mService = null;
    //Log.i(CN + "onDestroy", "onDestroy");
}

From source file:org.metis.sql.SqlStmnt.java

/**
 * Called by the Controller bean (RDB or PDB) to execute this SQL statement
 * with the given params.//from w  ww.ja v  a2s .co m
 * 
 * @param params
 * @throws SQLException
 */
public SqlResult execute(List<Map<String, String>> params) {

    if (params == null) {
        params = new ArrayList<Map<String, String>>();
    }

    LOG.debug("execute: executing this statement: " + getOriginal());
    LOG.debug("execute: ... with this number of param maps  = " + params.size());

    // first, do some light validation work
    if (params.size() == 0 && (isPrepared() || isCallable())) {
        // if it is callable and it requires an IN param
        if (isCallable() && getInTokens().size() > 0) {
            LOG.error("execute: ERROR, IN params were not provided "
                    + "for this callable statement that requires IN params: " + getPrepared());
            return null;
        }
        // all prepared statements that are not callable require an
        // input param
        else {
            LOG.error("execute: ERROR, params were not provided " + "for this prepared statement: "
                    + getPrepared());
            return null;

        }
    } else if (params.size() > 0 && !isPrepared()) {
        LOG.error("execute: ERROR, params were provided "
                + "for this static or non-prepared statement that does not " + "require params: "
                + getOriginal());
        return null;
    }

    // make sure given params match
    if (params.size() > 0) {
        for (Map<String, String> pMap : params) {
            if (!isMatch(pMap.keySet())) {
                LOG.error("execute: ERROR, given key:value set does not match "
                        + "this statement's key:value set\n" + getKeyTokens().toString() + "  vs.  "
                        + params.toString());
                return null;
            }
        }
    }

    // if trace is on, dump params if any
    if (params.size() > 0 && LOG.isTraceEnabled()) {
        for (Map<String, String> pMap : params) {
            LOG.trace("execute: valid param set = " + pMap.toString());
        }
    }

    // A list that essentially represents the result set returned by the
    // DB for queries.
    List<Map<String, Object>> listOfMaps = new ArrayList<Map<String, Object>>();

    // dequeue a sqlResult object from the SqlResult cache
    SqlResult sqlResult = SqlResult.dequeue();

    try {
        // if this statement is call'able, then execute its stored procedure
        // object. Note that we don't support batching calls to stored
        // procedures and functions. Maybe that can be a future
        // enhancement...
        if (isCallable()) {
            LOG.debug("execute: invoking this stored procedure or function: " + getStoredProcName());
            Map<String, Object> kvMap = new HashMap<String, Object>();
            // first prepare the IN params (if any)
            if (params.size() > 0) {
                for (KeyValueObject kvObj : getPreparedObjects(params.get(0))) {
                    kvMap.put(kvObj.getKey(), kvObj.getObj());
                }
            }
            // now execute the function or stored proc
            // Note from Spring docs: The execute() method returns a
            // map with an entry for each declared output parameter,
            // using the parameter name as the key.
            kvMap = getStoredProcedure().execute(kvMap);
            // now that the execute has completed, fetch the OUT params
            // from the kvMap. i suppose it is possible for a stored proc
            // not to have any OUT params.

            // need to transfer each key:value that is associated with
            // the OUT param as a map to listOfMaps. However, those
            // keys that pertain to cursors or sets, point
            // to a List of Maps!!
            for (SqlToken sqlToken : getSortedKeyTokens()) {
                // skip IN only params; we're only looking for OUT params
                if (sqlToken.isIn()) {
                    continue;
                }
                Object outObj = kvMap.remove(sqlToken.getKey());
                if (outObj == null) {
                    LOG.error("execute: object was not returned for this " + "out param: " + sqlToken.getKey());
                    continue;
                }
                if (sqlToken.isCursor() || sqlToken.isRset()) {
                    if (outObj instanceof List) {
                        List<Map<String, Object>> mList = (List<Map<String, Object>>) outObj;
                        for (Map<String, Object> map : mList) {
                            listOfMaps.add(map);
                        }
                    } else {
                        LOG.error("execute: this OUT result set param did not return a type of List: "
                                + sqlToken.getKey());
                        LOG.error("execute: got this type/class instead: " + outObj.getClass().getName());
                    }
                } else {
                    Map<String, Object> map = new HashMap<String, Object>();
                    map.put(sqlToken.getKey(), outObj);
                    listOfMaps.add(map);
                }
            }
            /*
             * Any undeclared results returned are added to the output map
             * with generated names like "#result-set-1" "#result-set-2"
             * etc. You can change this by setting 'skipUndeclaredResults'
             * to true, and then these undeclared resultsets will be
             * skipped. TODO: look into the update count
             */
            if (!kvMap.isEmpty()) {
                LOG.debug("execute: looking for result sets");
                for (Object kvObj : kvMap.values()) {
                    if (kvObj instanceof List) {
                        for (Map<String, Object> map : (List<Map<String, Object>>) kvObj) {
                            listOfMaps.add(map);
                        }
                    } else {
                        LOG.debug(
                                "execute: unknown object returned from execute: " + kvObj.getClass().getName());
                        LOG.debug("execute: unknown object's toString value: " + kvObj.toString());
                    }
                }
            }
            sqlResult.setResultSet(listOfMaps);
            return sqlResult;

        } // if (isCallable()...

        // key:value type objects used for binding the input params to
        // prepared statements
        List<KeyValueObject> kvObjs = null;
        Object bindObjs[] = null;

        // is this a query; i.e., select statement?
        if (getSqlStmntType() == SqlStmntType.SELECT) {
            if (isPrepared()) {
                LOG.debug("execute: executing this prepared SELECT statement: " + getPrepared());
                kvObjs = getPreparedObjects(params.get(0));
                bindObjs = new Object[kvObjs.size()];
                for (int i = 0; i < bindObjs.length; i++) {
                    bindObjs[i] = kvObjs.get(i).getObj();
                }
                listOfMaps = getJdbcTemplate().query(getPrepared(), bindObjs, this);
            } else {
                LOG.trace("execute: executing this SELECT statement: " + getOriginal());
                listOfMaps = getJdbcTemplate().query(getOriginal(), this);
            }
            if (listOfMaps != null && listOfMaps.size() > 0) {
                LOG.trace("execute: dumping first map - " + listOfMaps.get(0).toString());
            }
            sqlResult.setResultSet(listOfMaps);
            return sqlResult;
        }

        // ok, this statement is neither call'able nor a query so it
        // must be either an update of some kind; i.e., insert, update or
        // delete

        // note that keyHolders are only used for INSERT statements!

        if (!isPrepared()) {
            PreparedStmntCreator creatorSetter = new PreparedStmntCreator(this, bindObjs);
            // i guess it is possible to have a non prepared update of some
            // sort
            if (getSqlStmntType() == SqlStmntType.INSERT) {
                GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
                sqlResult.setNumRows(getJdbcTemplate().update(creatorSetter, keyHolder));
                sqlResult.setKeyHolder(keyHolder);
            } else {
                sqlResult.setNumRows(getJdbcTemplate().update(getOriginal(), creatorSetter));
            }
        }

        // we have a prepared update; is the client requesting a batch
        // update?
        else if (params.size() > 1) {
            LOG.debug("execute: invoking batch update for this statement: " + getPrepared());
            // create the list of objects for the batch update
            List<Object[]> batchArgs = new ArrayList<Object[]>();
            for (Map<String, String> map : params) {
                // prepare the bind objects for the prepared
                // statement
                kvObjs = getPreparedObjects(map);
                bindObjs = new Object[kvObjs.size()];
                for (int i = 0; i < bindObjs.length; i++) {
                    bindObjs[i] = kvObjs.get(i).getObj();
                }
                batchArgs.add(bindObjs);
            }
            sqlResult.setBatchNumRows(getJdbcTemplate().batchUpdate(getPrepared(), batchArgs));
            // note that a key holder is not possible with a batch
            // update
        }

        // we have a prepared update, but it is not a batch update
        else if (params.size() == 1) {

            LOG.debug("execute: invoking prepared update for this statement: " + getPrepared());
            kvObjs = getPreparedObjects(params.get(0));
            bindObjs = new Object[kvObjs.size()];
            for (int i = 0; i < bindObjs.length; i++) {
                bindObjs[i] = kvObjs.get(i).getObj();
            }
            // note that PreparedStmntCreator is both a creator and setter
            PreparedStmntCreator creatorSetter = new PreparedStmntCreator(this, bindObjs);

            if (getSqlStmntType() == SqlStmntType.INSERT) {
                LOG.trace("execute: executing prepared INSERT statement");
                GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
                int numRows = getJdbcTemplate().update(creatorSetter, keyHolder);
                sqlResult.setNumRows(numRows);
                sqlResult.setKeyHolder(keyHolder);
            } else {
                LOG.trace("execute: executing UPDATE statement");
                int numRows = getJdbcTemplate().update(getPrepared(), creatorSetter);
                sqlResult.setNumRows(numRows);
            }
        }

    } catch (IllegalArgumentException exc) {
        LOG.error("execute: ERROR, caught this " + "IllegalArgumentException while executing sql: "
                + exc.toString());
        LOG.error("execute: exception stack trace follows:");
        dumpStackTrace(exc.getStackTrace());
        if (exc.getCause() != null) {
            LOG.error("execute: Caused by " + exc.getCause().toString());
            LOG.error("execute: causing exception stack trace follows:");
            dumpStackTrace(exc.getCause().getStackTrace());
        }
        if (sqlResult != null) {
            SqlResult.enqueue(sqlResult);
        }
        sqlResult = null;
    } catch (DataAccessException exc) {
        LOG.error("execute:ERROR, caught this " + "DataAccessException while executing sql: " + exc.toString());
        LOG.error("execute: exception stack trace follows:");
        dumpStackTrace(exc.getStackTrace());
        LOG.error("execute: Most Specific Cause = " + exc.getMostSpecificCause().toString());
        LOG.error("execute: MSC exception stack trace follows:");
        dumpStackTrace(exc.getMostSpecificCause().getStackTrace());
        if (sqlResult != null) {
            SqlResult.enqueue(sqlResult);
        }
        sqlResult = null;
    }
    return sqlResult;
}

From source file:edu.umass.cs.gnsserver.httpserver.GNSHttpServer.java

private CommandResponse processQuery(String host, String commandName, String queryString, boolean secureServer)
        throws InternalRequestException {

    // Convert the URI into a JSONObject, stuffing in some extra relevant fields like
    // the signature, and the message signed.
    try {/*from  ww w  .  j  a v  a  2 s  .c  o  m*/
        // Note that the commandName is not part of the queryString string here so
        // it doesn't end up in the jsonCommand. Also see below where we put the
        // command integer into the jsonCommand.
        JSONObject jsonCommand = Util.parseURIQueryStringIntoJSONObject(queryString);
        // If the signature exists it is Base64 encoded so decode it now.
        if (jsonCommand.has(GNSProtocol.SIGNATURE.toString())) {
            jsonCommand.put(GNSProtocol.SIGNATURE.toString(),
                    new String(Base64.decode(jsonCommand.getString(GNSProtocol.SIGNATURE.toString())),
                            GNSProtocol.CHARSET.toString()));
        }
        // getCommandForHttp allows for "dump" as well as "Dump"
        CommandType commandType = CommandType.getCommandForHttp(commandName);
        if (commandType == null) {
            return new CommandResponse(ResponseCode.OPERATION_NOT_SUPPORTED,
                    GNSProtocol.BAD_RESPONSE.toString() + " " + GNSProtocol.OPERATION_NOT_SUPPORTED.toString()
                            + " Sorry, don't understand " + commandName + QUERYPREFIX + queryString);
        }

        //Only allow mutual auth commands if we're on a secure (HTTPS) server
        if (commandType.isMutualAuth() && !secureServer) {
            return new CommandResponse(ResponseCode.OPERATION_NOT_SUPPORTED,
                    GNSProtocol.BAD_RESPONSE.toString() + " " + GNSProtocol.OPERATION_NOT_SUPPORTED.toString()
                            + " Not authorized to execute " + commandName + QUERYPREFIX + queryString);
        }

        // The client currently just uses the command name (which is not part of the
        // query string above) so we need to stuff
        // in the Command integer for the signature check and execution.
        jsonCommand.put(GNSProtocol.COMMAND_INT.toString(), commandType.getInt());
        // Optionally does some sanity checking on the message if that was enabled at the client.
        // This makes necessary changes to the jsonCommand so don't remove this call
        // unless you know what you're doing and also change the code in the HTTP client.
        sanityCheckMessage(jsonCommand);
        // Hair below is to handle some commands locally (creates, delets, selects, admin)
        // and the rest by invoking the GNS client and sending them out.
        // Client will be null if GNSC.DISABLE_MULTI_SERVER_HTTP (see above)
        // is true (or there was a problem).
        if (client == null || commandType.isLocallyHandled()) {
            // EXECUTE IT LOCALLY
            AbstractCommand command;
            try {
                command = commandModule.lookupCommand(commandType);
                // Do some work to get the signature and message into the command for
                // signature checking that happens later on.
                // This only happens for local execution because remote handling (in the
                // other side of the if) already does this.
                processSignature(jsonCommand);
                if (command != null) {
                    return CommandHandler.executeCommand(command,
                            new CommandPacket((long) (Math.random() * Long.MAX_VALUE), jsonCommand, false),
                            requestHandler);
                }
                LOGGER.log(Level.FINE, "lookupCommand returned null for {0}", commandName);
            } catch (IllegalArgumentException e) {
                LOGGER.log(Level.FINE, "lookupCommand failed for {0}", commandName);
            }
            return new CommandResponse(ResponseCode.OPERATION_NOT_SUPPORTED,
                    GNSProtocol.BAD_RESPONSE.toString() + " " + GNSProtocol.OPERATION_NOT_SUPPORTED.toString()
                            + " Sorry, don't understand " + commandName + QUERYPREFIX + queryString);
        } else {
            // Send the command remotely using a client
            try {
                LOGGER.log(Level.FINE, "Sending command out to a remote server: {0}", jsonCommand);
                CommandPacket commandResponsePacket = getResponseUsingGNSClient(client, jsonCommand);
                return new CommandResponse(ResponseCode.NO_ERROR,
                        // Some crap here to make single field reads return just the value for backward compatibility
                        // There is similar code to this other places.
                        specialCaseSingleFieldRead(commandResponsePacket.getResultString(), commandType,
                                jsonCommand));
            } catch (IOException | ClientException e) {
                return new CommandResponse(ResponseCode.UNSPECIFIED_ERROR, GNSProtocol.BAD_RESPONSE.toString()
                        + " " + GNSProtocol.UNSPECIFIED_ERROR.toString() + " " + e.toString());
                //      } catch (ClientException e) {
                //        return new CommandResponse(ResponseCode.GNSProtocol.UNSPECIFIED_ERROR.toString(),
                //                GNSProtocol.BAD_RESPONSE.toString() + " " + GNSProtocol.OPERATION_NOT_SUPPORTED.toString()
                //                + " Sorry, don't understand " + commandName + QUERYPREFIX + queryString);
            }
        }
    } catch (JSONException | UnsupportedEncodingException e) {
        return new CommandResponse(ResponseCode.UNSPECIFIED_ERROR, GNSProtocol.BAD_RESPONSE.toString() + " "
                + GNSProtocol.UNSPECIFIED_ERROR.toString() + " " + e.toString());
    }
}

From source file:de.arcus.playmusiclib.PlayMusicManager.java

/**
 * Copies the music file to a new path and adds the mp3 meta data
 * @param musicTrack Track information//from  w w w .jav  a  2 s . c o m
 * @param src The source mp3 file
 * @param dest The destination path
 * return Return if the operation was successful
 */
private boolean trackWriteID3(MusicTrack musicTrack, String src, String dest) {
    try {
        // Opens the mp3
        Mp3File mp3File = new Mp3File(src);

        // Removes all existing tags
        mp3File.removeId3v1Tag();
        mp3File.removeId3v2Tag();
        mp3File.removeCustomTag();

        // We want to add a fallback ID3v1 tag
        if (mID3EnableFallback) {
            // Create a new tag with ID3v1
            ID3v1Tag tagID3v1 = new ID3v1Tag();

            // Set all tag values
            tagID3v1.setTrack(musicTrack.getTitle());
            tagID3v1.setArtist(musicTrack.getArtist());
            tagID3v1.setAlbum(musicTrack.getAlbum());
            tagID3v1.setYear(musicTrack.getYear());

            // Search the genre
            for (int n = 0; n < ID3v1Genres.GENRES.length; n++) {
                // Genre found
                if (ID3v1Genres.GENRES[n].equals(musicTrack.getGenre())) {
                    tagID3v1.setGenre(n);
                    break;
                }
            }

            mp3File.setId3v1Tag(tagID3v1);
        }

        // It can't be null
        final ID3v2 tagID3v2;

        // Creates the requested version
        switch (mID3v2Version) {
        case ID3v22:
            tagID3v2 = new ID3v22Tag();
            break;
        case ID3v23:
            tagID3v2 = new ID3v23Tag();
            break;
        case ID3v24:
            tagID3v2 = new ID3v24Tag();
            break;
        default:
            tagID3v2 = null;
            break;
        }

        // Set all tag values
        tagID3v2.setTitle(musicTrack.getTitle());
        tagID3v2.setArtist(musicTrack.getArtist());
        tagID3v2.setAlbum(musicTrack.getAlbum());
        tagID3v2.setAlbumArtist(musicTrack.getAlbumArtist());
        tagID3v2.setTrack("" + musicTrack.getTrackNumber());
        tagID3v2.setPartOfSet("" + musicTrack.getDiscNumber());
        tagID3v2.setYear(musicTrack.getYear());

        if (!TextUtils.isEmpty(musicTrack.getGenre())) {
            try {
                // Maybe the genre is not supported
                tagID3v2.setGenreDescription(musicTrack.getGenre());
            } catch (IllegalArgumentException e) {
                Logger.getInstance().logWarning("TrackWriteID3", e.getMessage());
            }
        }

        // Add the artwork to the meta data
        if (mID3EnableArtwork) {
            // Load the artwork
            Bitmap bitmap = ArtworkLoader.loadArtwork(musicTrack, mID3ArtworkMaximumSize);

            if (bitmap != null) {
                // JPEG is default
                String mimeType = "image/jpeg";

                // Load the bitmap into a byte array
                ByteArrayOutputStream artworkDataStream = new ByteArrayOutputStream();
                bitmap.compress(Bitmap.CompressFormat.JPEG, 90, artworkDataStream);

                // Adds the artwork to the meta data
                tagID3v2.setAlbumImage(artworkDataStream.toByteArray(), mimeType);
            }
        }

        mp3File.setId3v2Tag(tagID3v2);

        // Save the file
        mp3File.save(dest);

        // Done
        return true;
    } catch (Exception e) {
        Logger.getInstance().logError("TrackWriteId3", e.toString());
    }

    // Failed
    return false;
}

From source file:contestWebsite.PublicResults.java

@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    VelocityEngine ve = new VelocityEngine();
    ve.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, "html/pages, html/snippets, html/templates");
    ve.init();//from w ww  .ja va2 s.  c o m
    VelocityContext context = new VelocityContext();
    Pair<Entity, UserCookie> infoAndCookie = init(context, req);

    UserCookie userCookie = infoAndCookie.y;
    boolean loggedIn = (boolean) context.get("loggedIn");

    Map<String, Integer> awardCriteria = Retrieve.awardCriteria(infoAndCookie.x);

    if (!loggedIn && req.getParameter("refresh") != null && req.getParameter("refresh").equals("1")) {
        resp.sendRedirect("/?refresh=1");
    }

    Entity contestInfo = infoAndCookie.x;

    context.put("testsGradedNums",
            contestInfo.hasProperty("testsGradedNums") && contestInfo.getProperty("testsGradedNums") != null
                    ? ((Text) contestInfo.getProperty("testsGradedNums")).getValue()
                    : "{}");
    if (contestInfo.hasProperty("testsGraded") && contestInfo.getProperty("testsGraded") != null) {
        context.put("testsGraded", contestInfo.getProperty("testsGraded"));
    }

    Object complete = contestInfo.getProperty("complete");
    if (complete != null && (Boolean) complete || loggedIn && userCookie.isAdmin()) {
        context.put("complete", true);

        String type = req.getParameter("type");
        context.put("type", type);

        if (type != null) {
            String[] types = type.split("_");

            String levelString = req.getParameter("level");
            Level level;
            try {
                level = Level.fromString(levelString);
            } catch (IllegalArgumentException e) {
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid level: " + levelString);
                return;
            }

            context.put("level", level.toString());
            context.put("tests", Test.getTests(level));
            context.put("Test", Test.class);

            if (type.startsWith("category_")) {
                context.put("test", Test.fromString(types[1]));
                context.put("trophy", awardCriteria.get("category_" + level + "_trophy"));
                context.put("medal", awardCriteria.get("category_" + level + "_medal"));
                context.put("winners", Retrieve.categoryWinners(types[1], level));
            } else if (type.startsWith("qualifying_")) {
                context.put("School", School.class);
                Pair<School, List<Student>> schoolAndStudents = Retrieve.schoolStudents(types[1], level);
                context.put("school", schoolAndStudents.x);
                context.put("students", schoolAndStudents.y);
            } else if (type.startsWith("categorySweep")) {
                context.put("trophy", awardCriteria.get("categorySweep_" + level));
                context.put("winners", Retrieve.categorySweepstakesWinners(level));
            } else if (type.equals("sweep")) {
                context.put("trophy", awardCriteria.get("sweepstakes_" + level));
                context.put("winners", Retrieve.sweepstakesWinners(level));
            } else if (type.equals("visualizations")) {
                Map<Test, Statistics> statistics;
                try {
                    statistics = Retrieve.visualizations(level);
                } catch (JSONException e) {
                    resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.toString());
                    e.printStackTrace();
                    return;
                }

                context.put("statistics", statistics);
            } else {
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid type: " + type);
                return;
            }
        } else {
            context.put("type", "avail");
        }
    } else {
        context.put("complete", false);
    }

    Map<Level, List<String>> schools = new HashMap<Level, List<String>>();
    for (Level level : Level.values()) {
        schools.put(level, Retrieve.schoolNames(level));
    }
    context.put("schools", schools);

    context.put("qualifyingCriteria", Retrieve.qualifyingCriteria(infoAndCookie.x));
    context.put("hideFullNames", contestInfo.getProperty("hideFullNames"));
    context.put("date", contestInfo.getProperty("updated"));
    context.put("subjects", Subject.values());
    context.put("Level", Level.class);
    context.put("levels", Level.values());
    context.put("esc", new EscapeTool());

    close(context, ve.getTemplate("publicResults.html"), resp);
}