Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:org.apache.hadoop.mapred.NetCDFInputFormatPrunerByFileIndexMultiFileTwoDimensions.java

@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
    FileStatus[] files = listStatus(job);

    LOG.info("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] hive query is: "
            + job.get(HIVE_QUERY, "Kossher"));
    System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] hive query is: "
            + job.get(HIVE_QUERY, "Kossher"));

    /* Analyzing Query here */
    String hiveQuery = job.get(HIVE_QUERY, "Kossher");
    QueryType queryType = QueryType.NOLIMIT; // default mode
    /*/*from   w  ww .  j  a va 2 s  .co m*/
    if(hiveQuery.contains("where") || hiveQuery.contains("WHERE")) {
    if (hiveQuery.contains("time") || hiveQuery.contains("TIME")) {
        queryType = QueryType.TIME;
    } else if (hiveQuery.contains("lat") || hiveQuery.contains("LAT")) {
        queryType = QueryType.LAT;
    } else if (hiveQuery.contains("lon") || hiveQuery.contains("LON")) {
        queryType = QueryType.LON;
    }
    }
    */

    float latTopLimit = -1;
    float latBottomLimit = -1;
    float lonTopLimit = -1;
    float lonBottomLimit = -1;

    String[] querySplitted = hiveQuery.split(" ");
    for (int i = 0; i < querySplitted.length; i++) {
        if (querySplitted[i].equals("lat") || querySplitted[i].equals("LAT")) {
            if (querySplitted[i + 1].equals(">")) {
                latBottomLimit = Float.valueOf(querySplitted[i + 2]);
            } else if (querySplitted[i + 1].equals("<")) {
                latTopLimit = Float.valueOf(querySplitted[i + 2]);
            }
        }
        if (querySplitted[i].equals("lon") || querySplitted[i].equals("LON")) {
            if (querySplitted[i + 1].equals(">")) {
                lonBottomLimit = Float.valueOf(querySplitted[i + 2]);
            } else if (querySplitted[i + 1].equals("<")) {
                lonTopLimit = Float.valueOf(querySplitted[i + 2]);
            }
        }
    }
    System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] " + "latTopLimit=" + latTopLimit
            + ",latBottomLimit=" + latBottomLimit + ",lonTopLimit=" + lonTopLimit + ",lonBottomLimit="
            + lonBottomLimit);

    System.out.println("[SAMANPruner] beginning of getSplits");

    job.setLong(NUM_INPUT_FILES, files.length);
    long totalSize = 0; // compute total size
    for (FileStatus file : files) { // check we have valid files
        if (file.isDir()) {
            throw new IOException("Not a file: " + file.getPath());
        }
        totalSize += file.getLen();
    }

    // generate splits
    ArrayList<NetCDFFileSplit> splits = new ArrayList<NetCDFFileSplit>(numSplits);
    ArrayList<NetCDFFileSplit> finalSplits = new ArrayList<NetCDFFileSplit>();
    NetworkTopology clusterMap = new NetworkTopology();
    for (FileStatus file : files) {
        Path path = file.getPath();
        int fileIndex = 0;
        int dimIndex = 0;
        String[] parts = path.getName().split("-");
        dimIndex = Integer.valueOf(parts[1]);

        //LOG.info("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] File name is : " + path.getName());
        System.out.println(
                "[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] File name is : " + path.getName());
        FileSystem fs = path.getFileSystem(job);
        long length = file.getLen();
        BlockLocation[] blkLocations = fs.getFileBlockLocations(file, 0, length);
        if ((length != 0) && isSplitable(fs, path)) {
            long blockSize = file.getBlockSize();
            netInfo = getNetCDFInfo(path, fs, job);

            // First decide which which files should be considered as the base to be read
            int latTopTemp = -1;
            if (latTopLimit == -1) {
                latTopTemp = result.latLength;
            } else {
                latTopTemp = Math.min(result.latLength, (int) latTopLimit);
            }

            int latBottomTemp = -1;
            if (latBottomLimit == -1) {
                latBottomTemp = 0;
            } else {
                latBottomTemp = Math.max(0, (int) latBottomLimit);
            }

            int lonTopTemp = -1;
            if (lonTopLimit == -1) {
                lonTopTemp = result.lonLength;
            } else {
                lonTopTemp = Math.min(result.lonLength, (int) lonTopLimit);
            }

            int lonBottomTemp = -1;
            if (lonBottomLimit == -1) {
                lonBottomTemp = 0;
            } else {
                lonBottomTemp = Math.min(0, (int) lonBottomLimit);
            }

            if ((latTopTemp - latBottomTemp) * 4 * result.lonLength
                    * result.timeLength < (lonTopTemp - lonBottomTemp) * 4 * result.latLength
                            * result.timeLength) {
                chooseLat = true;
            } else {
                chooseLat = false;
            }

            System.out.println("[SAMAN][NetCDFInputFormat][getSplits] chooseLat = " + chooseLat);

            if (chooseLat) {
                if (!path.getName().contains("lat"))
                    continue;
            } else {
                if (!path.getName().contains("lon"))
                    continue;
            }

            long recStart = netInfo.recStart;
            long[] chunkStarts = netInfo.chunkStarts;
            long smallSize = netInfo.smallRecSize;
            long recSize = netInfo.recSize;
            long splitSize = 0;
            int chunkIndex = 0;
            long bytesRemaining = chunkStarts[chunkStarts.length - 1] + recSize - recStart - 2 * smallSize;
            long thisStart = recStart; // file position
            long thisChunk = 0;
            long blockNo = 1;

            while (bytesRemaining > 0) {
                while (chunkIndex < chunkStarts.length && chunkStarts[chunkIndex] < blockNo * blockSize) {
                    chunkIndex++;
                }
                long tempStart = thisStart;
                long endChunk;
                if (chunkIndex >= chunkStarts.length) {
                    splitSize = chunkStarts[chunkStarts.length - 1] + recSize - thisStart - smallSize;

                } else {
                    splitSize = chunkStarts[chunkIndex] - thisStart - smallSize;
                    thisStart = chunkStarts[chunkIndex];
                }
                endChunk = chunkIndex;
                blockNo++;
                //LOG.info( "[SAMAN] NetCDFInputFormatPruner.getSplits => splitSize="+splitSize+", thisStart="+thisStart+
                //        ", endChunk="+endChunk+", blockNo="+blockNo);
                System.out.println("[SAMAN] NetCDFInputFormatPruner.getSplits => splitSize=" + splitSize
                        + ", thisStart=" + thisStart + ", endChunk=" + endChunk + ", blockNo=" + blockNo);
                String[] splitHosts = getSplitHosts(blkLocations, tempStart, splitSize, clusterMap);
                NetCDFFileSplit split = new NetCDFFileSplit(path, tempStart, splitSize, splitHosts);

                if (chooseLat) {
                    if (latTopTemp < thisChunk) {
                        bytesRemaining -= splitSize;
                        thisChunk = endChunk;
                        continue;
                    }
                    if (latBottomTemp > endChunk) {
                        bytesRemaining -= splitSize;
                        thisChunk = endChunk;
                        continue;
                    }

                    blockToNodes.put(split, splitHosts);

                    // Put the nodes with the specified split into the node to block set
                    for (int i = 0; i < splitHosts.length; i++) {
                        Set<NetCDFFileSplit> splitList = nodeToBlocks.get(splitHosts[i]);
                        if (splitList == null) {
                            splitList = new LinkedHashSet<NetCDFFileSplit>();
                            nodeToBlocks.put(splitHosts[i], splitList);
                        }
                        splitList.add(split);
                    }

                    // For the test, we would assign everything statically.
                    if (latBottomLimit > thisChunk) {
                        System.out
                                .println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] startChunk = "
                                        + latBottomLimit);
                        split.getFileSplit().startChunk.add((long) latBottomLimit);
                    } else {
                        split.getFileSplit().startChunk.add(thisChunk);
                    }
                    if (latTopLimit < endChunk) {
                        System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] endChunk = "
                                + latTopLimit);
                        split.getFileSplit().endChunk.add((long) latTopLimit);
                    } else {
                        split.getFileSplit().endChunk.add(endChunk);
                    }

                    split.getFileSplit().secondDimStartChunk.add((long) lonBottomTemp);
                    split.getFileSplit().secondDimEndChunk.add((long) lonTopTemp);
                }
                if (!chooseLat) {
                    if (lonTopTemp < thisChunk) {
                        bytesRemaining -= splitSize;
                        thisChunk = endChunk;
                        continue;
                    }
                    if (lonBottomTemp > endChunk) {
                        bytesRemaining -= splitSize;
                        thisChunk = endChunk;
                        continue;
                    }

                    blockToNodes.put(split, splitHosts);

                    // Put the nodes with the specified split into the node to block set
                    for (int i = 0; i < splitHosts.length; i++) {
                        Set<NetCDFFileSplit> splitList = nodeToBlocks.get(splitHosts[i]);
                        if (splitList == null) {
                            splitList = new LinkedHashSet<NetCDFFileSplit>();
                            nodeToBlocks.put(splitHosts[i], splitList);
                        }
                        splitList.add(split);
                    }

                    if (lonBottomLimit > thisChunk) {
                        System.out
                                .println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] startChunk = "
                                        + latBottomLimit);
                        split.getFileSplit().startChunk.add((long) lonBottomLimit);
                    } else {
                        split.getFileSplit().startChunk.add(thisChunk);
                    }
                    if (lonTopLimit < endChunk) {
                        System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndex][getSplits] endChunk = "
                                + latTopLimit);
                        split.getFileSplit().endChunk.add((long) lonTopLimit);
                    } else {
                        split.getFileSplit().endChunk.add(endChunk);
                    }

                    split.getFileSplit().secondDimStartChunk.add((long) latBottomTemp);
                    split.getFileSplit().secondDimEndChunk.add((long) latTopTemp);
                }

                splits.add(split);

                bytesRemaining -= splitSize;
                thisChunk = endChunk;
                //LOG.info( "[SAMAN] NetCDFInputFormatPruner.getSplits => bytesRemaining="+bytesRemaining+", thisChunk="+thisChunk );
                //System.out.println( "[SAMAN] NetCDFInputFormatPruner.getSplits => bytesRemaining="+bytesRemaining+", thisChunk="+thisChunk );
            }

        } else if (length != 0) {
            String[] splitHosts = getSplitHosts(blkLocations, 0, length, clusterMap);
            //splits.add(new FileSplit(path, 0, length, splitHosts));
        } else {
            //Create empty hosts array for zero length files
            //splits.add(new FileSplit(path, 0, length, new String[0]));
        }
    }

    // Now it's time to merge non-complete splits.
    // Check if each split has enough space to include another split too

    Set<String> completedNodes = new HashSet<String>();
    ArrayList<NetCDFFileSplit> validBlocks = new ArrayList<NetCDFFileSplit>();
    long curSplitSize = 0;
    Multiset<String> splitsPerNode = HashMultiset.create();

    for (Iterator<Map.Entry<String, Set<NetCDFFileSplit>>> iter = nodeToBlocks.entrySet().iterator(); iter
            .hasNext();) {
        Map.Entry<String, Set<NetCDFFileSplit>> one = iter.next();
        String node = one.getKey();

        System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] node is = " + node);

        // Skip the node if it has previously been marked as completed.
        if (completedNodes.contains(node)) {
            continue;
        }

        Set<NetCDFFileSplit> blocksInCurrentNode = one.getValue();

        // for each block, copy it into validBlocks. Delete it from
        // blockToNodes so that the same block does not appear in
        // two different splits.
        Iterator<NetCDFFileSplit> oneBlockIter = blocksInCurrentNode.iterator();
        while (oneBlockIter.hasNext()) {
            NetCDFFileSplit oneblock = oneBlockIter.next();

            System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " + "split is: "
                    + oneblock.getFileSplit().getPath());

            // Remove all blocks which may already have been assigned to other
            // splits.
            if (!blockToNodes.containsKey(oneblock)) {
                oneBlockIter.remove();
                continue;
            }

            validBlocks.add(oneblock);
            if (chooseLat) {
                curSplitSize += (oneblock.getFileSplit().endChunk.get(0)
                        - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.lonLength
                        * netInfo.timeLength;
            } else {
                curSplitSize += (oneblock.getFileSplit().endChunk.get(0)
                        - oneblock.getFileSplit().startChunk.get(0)) * 4 * netInfo.latLength
                        * netInfo.timeLength;
            }
            blockToNodes.remove(oneblock);
            System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] curSplitSize = "
                    + curSplitSize);

            //curSplitSize += singleSplitSize;

            System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] "
                    + "Added to valid blocks!");

            // if the accumulated split size exceeds the maximum, then
            // create this split.
            if (blockSize != 0 && curSplitSize >= blockSize) {
                // create an input split and add it to the splits array
                addCreatedSplit(finalSplits, Collections.singleton(node), validBlocks);
                //totalLength -= curSplitSize;

                System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] "
                        + "addCreatedSplit called!");

                curSplitSize = 0;
                splitsPerNode.add(node);

                // Remove entries from blocksInNode so that we don't walk these
                // again.
                //blocksInCurrentNode.removeAll(validBlocks);
                validBlocks.clear();

                // Done creating a single split for this node. Move on to the next
                // node so that splits are distributed across nodes.
                //break;
            }

        }
        if (!validBlocks.isEmpty()) {
            System.out.println(
                    "[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] validBlocks not empty!");
            addCreatedSplit(finalSplits, Collections.singleton(node), validBlocks);
            curSplitSize = 0;
            splitsPerNode.add(node);
            blocksInCurrentNode.removeAll(validBlocks);
            validBlocks.clear();
        }
    }

    Set<NetCDFFileSplit> singleSplitsSet = blockToNodes.keySet();
    Iterator itrSingle = singleSplitsSet.iterator();
    while (itrSingle.hasNext()) {
        NetCDFFileSplit temp = (NetCDFFileSplit) itrSingle.next();
        addCreatedSingleSplit(finalSplits, temp.getLocations(), temp);
    }

    Iterator itr = finalSplits.iterator();
    while (itr.hasNext()) {

        NetCDFFileSplit temp = (NetCDFFileSplit) itr.next();

        String[] locations = temp.getFileSplit().getLocations();
        String locationsString = "";
        for (int i = 0; i < locations.length; i++)
            locationsString += locations[i];

        String pathsString = "";
        List<Path> paths = temp.getFileSplit().getPaths();
        for (Path path : paths)
            pathsString += path.getName() + ",";

        String startsString = "";
        List<Long> starts = temp.getFileSplit().startChunk;
        for (Long start : starts)
            startsString += (start + ",");

        String endsString = "";
        List<Long> ends = temp.getFileSplit().endChunk;
        for (Long end : ends)
            endsString += (end + ",");

        System.out.println("[SAMAN][NetCDFInputFormatPrunerByFileIndexMultiFile][getSplits] " + "locations="
                + locationsString + "," + "paths=" + pathsString + "," + "starts=" + startsString + ","
                + "ends=" + endsString + ",");
    }

    return finalSplits.toArray(new NetCDFFileSplit[finalSplits.size()]);

}

From source file:com.krawler.crm.hrmsintegration.bizservice.GoalManagementServiceImpl.java

public JSONObject completedGoalReport(String userid, String companyid, DateFormat dateFormat, int relatedTo,
        int leadPerm, int accountPerm, int oppPerm, Long fromDate, Long toDate, String searchStr,
        String startStr, String limitStr, boolean exportall, Locale locale, String timeZoneDiff)
        throws ServiceException {
    JSONObject jobj = new JSONObject();
    KwlReturnObject kmsg = null;//from   w w  w.j av a 2s .c  o m
    JSONArray jarr = new JSONArray();
    int count = 0;
    int start = 0;
    int limit = 25;
    int offSet = TimeZone.getTimeZone("GMT" + timeZoneDiff).getOffset(System.currentTimeMillis());
    try {
        HashMap<String, Object> requestParams = new HashMap<String, Object>();

        ArrayList filter_names = new ArrayList();
        ArrayList filter_params = new ArrayList();

        filter_names.add("c.userID.userID");
        filter_names.add("c.userID.company.companyID");
        filter_names.add("c.deleted");
        filter_params.add(userid);
        filter_params.add(companyid);
        filter_params.add(false);
        if (relatedTo > 0) {
            filter_names.add("c.relatedto");
            filter_params.add(relatedTo);
        } else if (relatedTo == 0) { // Kuldeep Singh : When user select view 'All' goals then fetch goal data according to module's permission
            StringBuffer relatedlist = new StringBuffer();
            if ((leadPerm & 1) == 1) {
                relatedlist.append("" + 1 + ",");
                relatedlist.append("" + 2 + ",");
            }
            if ((accountPerm & 1) == 1) {
                relatedlist.append("" + 3 + ",");
                relatedlist.append("" + 4 + ",");
            }
            if ((oppPerm & 1) == 1) {
                relatedlist.append("" + 5 + ",");
                relatedlist.append("" + 6 + ",");
            }
            String rltedlist = "0";
            if (relatedlist.length() > 0) {
                rltedlist = relatedlist.substring(0, (relatedlist.length() - 1));
            }
            filter_names.add("INc.relatedto");
            filter_params.add(rltedlist);
        }
        if (toDate != null && fromDate != null) {
            Date fmDate = new Date(fromDate);
            Date tDate = new Date(toDate);
            filter_names.add(">=c.startdate");
            filter_names.add("<=c.startdate");
            filter_params.add(dateOnlyFormat.parse(dateOnlyFormat.format(fmDate)).getTime());
            filter_params.add(dateOnlyFormat.parse(dateOnlyFormat.format(tDate)).getTime());
        }

        filter_names.add("<=c.enddate");
        Date now = new Date();
        now = dateOnlyFormat.parse(dateOnlyFormat.format(now));
        filter_params.add(now.getTime());

        requestParams.clear();
        if (startStr != null) {
            start = Integer.parseInt(startStr);
            limit = Integer.parseInt(limitStr);
        }
        if (!StringUtil.isNullOrEmpty(searchStr)) {
            requestParams.put("ss", searchStr);
        }
        if (!exportall) {
            requestParams.put("start", start);
            requestParams.put("limit", limit);
        }
        kmsg = hrmsIntDAOObj.getFinalGoals(requestParams, filter_names, filter_params);
        count = kmsg.getRecordTotalCount();
        if (count > 0) {
            User empuser = profileHandlerDAOObj.getUserObject(userid);
            List<Finalgoalmanagement> finalGoalmanagementList = kmsg.getEntityList();
            for (Finalgoalmanagement fgmt : finalGoalmanagementList) {
                JSONObject tmpObj = new JSONObject();
                double dl = 0;
                String relatedName = "";
                double percentageTarget = 0;
                filter_names.clear();
                filter_params.clear();
                requestParams.clear();
                requestParams.put("distinctFlag", true);

                dl = getAchievedTarget(fgmt, companyid, empuser);
                percentageTarget = getPercentageTarget(fgmt, dl);
                int reltTo = fgmt.getRelatedto() == null ? 0 : fgmt.getRelatedto();
                relatedName = getGoalName(reltTo);
                String gid = fgmt.getId();
                int pastGoals = getPastGoal(fgmt, percentageTarget);
                DecimalFormat decimalFormat = new DecimalFormat("#0");
                String percentAchvd = decimalFormat.format(percentageTarget) + " %";

                tmpObj.put("gid", gid);
                tmpObj.put("gname", fgmt.getGoalname());
                tmpObj.put("empname", empuser.getFirstName() + " " + empuser.getLastName());
                tmpObj.put("empid", empuser.getUserID());
                tmpObj.put("gdescription", fgmt.getGoaldesc());
                tmpObj.put("targeted", fgmt.getTargeted());
                tmpObj.put("relatedto", reltTo);
                tmpObj.put("relatedName", relatedName);
                tmpObj.put("achieved", dl != 0.0 ? dl : "0");
                tmpObj.put("percentageachieved", percentageTarget != 0.0 ? percentAchvd : "0");
                tmpObj.put("pastgoals", pastGoals);
                tmpObj.put("gstartdate", dateFormat.format(fgmt.getStartdate() + offSet));
                tmpObj.put("genddate", dateFormat.format(fgmt.getEnddate() + offSet));
                tmpObj.put("gassignedby", fgmt.getManager().getFirstName() + " "
                        + (fgmt.getManager().getLastName() == null ? "" : fgmt.getManager().getLastName()));
                jarr.put(tmpObj);
            }
        }
        jobj.put("coldata", jarr);
        jobj.put("data", jarr);
        jobj.put("totalCount", count);

        JSONObject jobjTemp = new JSONObject();
        JSONArray jarrColumns = new JSONArray();
        JSONArray jarrRecords = new JSONArray();
        JSONObject jMeta = new JSONObject();

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goalsettings.header.empname", null, locale));//"Employee Name");
        jobjTemp.put("tip", messageSource.getMessage("crm.goalsettings.header.empname", null, locale));//"Employee Name");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("dataIndex", "empname");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.goaltype", null, locale));//"Goal Type");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.goaltype", null, locale));//"Goal Type");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("dataIndex", "relatedName");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.target", null, locale));//"Target");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.target", null, locale));//"Target");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("align", "right");
        jobjTemp.put("dataIndex", "targeted");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.achieved", null, locale));//"Achieved");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.achieved", null, locale));//"Achieved");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("align", "right");
        jobjTemp.put("dataIndex", "achieved");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.percentageachieved", null, locale));//"Percentage Achieved");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.percentageachieved", null, locale));//"Percentage Achieved");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("align", "right");
        jobjTemp.put("dataIndex", "percentageachieved");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.fromdate", null, locale));//"From Date");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.fromdate", null, locale));//"From Date");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("title", "gstartdate");
        jobjTemp.put("dataIndex", "gstartdate");
        jobjTemp.put("align", "center");
        jobjTemp.put("renderer", crmManagerCommon.dateRendererReport());
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.todate", null, locale));//"To Date");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.todate", null, locale));//"To Date");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("title", "gstartdate");
        jobjTemp.put("dataIndex", "genddate");
        jobjTemp.put("align", "center");
        jobjTemp.put("renderer", crmManagerCommon.dateRendererReport());
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("header", messageSource.getMessage("crm.goals.header.assignedby", null, locale));//"Assigned By");
        jobjTemp.put("tip", messageSource.getMessage("crm.goals.header.assignedby", null, locale));//"Assigned By");
        jobjTemp.put("pdfwidth", 60);
        jobjTemp.put("dataIndex", "gassignedby");
        jarrColumns.put(jobjTemp);

        jobjTemp = new JSONObject();
        jobjTemp.put("name", "empname");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "relatedName");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "targeted");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "achieved");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "percentageachieved");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "gassignedby");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "gstartdate");
        jobjTemp.put("type", "date");
        jarrRecords.put(jobjTemp);
        jobjTemp = new JSONObject();
        jobjTemp.put("name", "genddate");
        jobjTemp.put("type", "date");
        jarrRecords.put(jobjTemp);
        jobj.put("columns", jarrColumns);

        jMeta.put("totalProperty", "totalCount");
        jMeta.put("root", "coldata");
        jMeta.put("fields", jarrRecords);
        jMeta.put("id", "asd");
        jobj.put("metaData", jMeta);

    } catch (JSONException e) {
        logger.warn("JSONException exception in completedGoalReport()", e);
        throw ServiceException.FAILURE("GoalManagementServiceImpl.assignedgoalsdelete", e);
    } catch (ServiceException e) {
        logger.warn("ServiceException exception in completedGoalReport()", e);
        throw ServiceException.FAILURE("GoalManagementServiceImpl.assignedgoalsdelete", e);
    } catch (Exception e) {
        logger.warn("General exception in completedGoalReport()", e);
        throw ServiceException.FAILURE("GoalManagementServiceImpl.assignedgoalsdelete", e);
    }
    return jobj;
}

From source file:com.att.nsa.cambria.service.impl.EventsServiceImpl.java

/**
 * //from   w w w . j ava2 s  .co  m
 * @param ctx
 * @param inputStream
 * @param topic
 * @param partitionKey
 * @param requestTime
 * @param chunked
 * @param mediaType
 * @throws ConfigDbException
 * @throws AccessDeniedException
 * @throws TopicExistsException
 * @throws IOException
 * @throws CambriaApiException
 */
private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
        final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
        throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
        CambriaApiException {

    final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();

    // setup the event set
    final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);

    // start processing, building a batch to push to the backend
    final long startMs = System.currentTimeMillis();
    long count = 0;
    long maxEventBatch = 1024 * 16;
    String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
    if (null != evenlen)
        maxEventBatch = Long.parseLong(evenlen);
    //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
    final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
    final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();

    Publisher.message m = null;
    int messageSequence = 1;
    Long batchId = 1L;
    final boolean transactionEnabled = true;
    int publishBatchCount = 0;
    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");

    //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
    try {
        // for each message...
        batchId = DMaaPContext.getBatchID();

        String responseTransactionId = null;

        while ((m = events.next()) != null) {

            //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));

            addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
                    transactionEnabled);
            messageSequence++;

            // add the message to the batch
            batch.add(m);

            responseTransactionId = m.getLogDetails().getTransactionId();

            JSONObject jsonObject = new JSONObject();
            jsonObject.put("message", m.getMessage());
            jsonObject.put("transactionId", responseTransactionId);
            final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
                    jsonObject.toString());
            kms.add(data);

            // check if the batch is full
            final int sizeNow = batch.size();
            if (sizeNow >= maxEventBatch) {
                String startTime = sdf.format(new Date());
                LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                        + batchId + "]");
                try {
                    ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                    //transactionLogs(batch);
                    for (message msg : batch) {
                        LogDetails logDetails = msg.getLogDetails();
                        LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                    }
                } catch (Exception excp) {

                    int status = HttpStatus.SC_NOT_FOUND;
                    String errorMsg = null;
                    if (excp instanceof CambriaApiException) {
                        status = ((CambriaApiException) excp).getStatus();
                        JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                        JSONObject errObject = new JSONObject(jsonTokener);
                        errorMsg = (String) errObject.get("message");
                    }
                    ErrorResponse errRes = new ErrorResponse(status,
                            DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                            "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                    + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                            null, Utils.getFormattedDate(new Date()), topic,
                            Utils.getUserApiKey(ctx.getRequest()), ctx.getRequest().getRemoteHost(), null,
                            null);
                    LOG.info(errRes.toString());
                    throw new CambriaApiException(errRes);
                }
                kms.clear();
                batch.clear();
                metricsSet.publishTick(sizeNow);
                publishBatchCount = sizeNow;
                count += sizeNow;
                //batchId++;
                String endTime = sdf.format(new Date());
                LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                        + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                        + ",Batch End Time=" + endTime + "]");
                batchId = DMaaPContext.getBatchID();
            }
        }

        // send the pending batch
        final int sizeNow = batch.size();
        if (sizeNow > 0) {
            String startTime = sdf.format(new Date());
            LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                    + batchId + "]");
            try {
                ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                //transactionLogs(batch);
                for (message msg : batch) {
                    LogDetails logDetails = msg.getLogDetails();
                    LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                }
            } catch (Exception excp) {
                int status = HttpStatus.SC_NOT_FOUND;
                String errorMsg = null;
                if (excp instanceof CambriaApiException) {
                    status = ((CambriaApiException) excp).getStatus();
                    JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                    JSONObject errObject = new JSONObject(jsonTokener);
                    errorMsg = (String) errObject.get("message");
                }

                ErrorResponse errRes = new ErrorResponse(status,
                        DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                        "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                        null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                        ctx.getRequest().getRemoteHost(), null, null);
                LOG.info(errRes.toString());
                throw new CambriaApiException(errRes);
            }
            kms.clear();
            metricsSet.publishTick(sizeNow);
            count += sizeNow;
            //batchId++;
            String endTime = sdf.format(new Date());
            publishBatchCount = sizeNow;
            LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                    + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                    + ",Batch End Time=" + endTime + "]");
        }

        final long endMs = System.currentTimeMillis();
        final long totalMs = endMs - startMs;

        LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);

        if (null != responseTransactionId) {
            ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
        }

        // build a response
        final JSONObject response = new JSONObject();
        response.put("count", count);
        response.put("serverTimeMs", totalMs);
        DMaaPResponseBuilder.respondOk(ctx, response);

    } catch (Exception excp) {
        int status = HttpStatus.SC_NOT_FOUND;
        String errorMsg = null;
        if (excp instanceof CambriaApiException) {
            status = ((CambriaApiException) excp).getStatus();
            JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
            JSONObject errObject = new JSONObject(jsonTokener);
            errorMsg = (String) errObject.get("message");
        }

        ErrorResponse errRes = new ErrorResponse(status,
                DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                        + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                ctx.getRequest().getRemoteHost(), null, null);
        LOG.info(errRes.toString());
        throw new CambriaApiException(errRes);
    }
}

From source file:com.musicplayer.AudioDecoderThread.java

/**
 * After decoding AAC, Play using Audio Track.
 * /*from  ww  w . ja  v a 2s .  com*/
 */

public void processTrack(Uri syncContentUri, final Genre classLabel, Context context,
        ProcessTrackRunnable lock) {

    // INITIALISE EXTRACTOR AND DECODER
    Log.v("", "Break Point 1");

    MediaExtractor extractor = new MediaExtractor();
    int sampleRate = 0;
    Uri contentUri = null;
    synchronized (lock) {
        contentUri = syncContentUri;
    }
    try {
        extractor.setDataSource(context, contentUri, null);
    } catch (IOException e) {
        e.printStackTrace();
    }
    int channel = 0;

    for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        if (mime.startsWith("audio/")) {
            extractor.selectTrack(i);
            Log.d("", "format : " + format);
            //            ByteBuffer csd = format.getByteBuffer("csd-0");
            //            if(csd == null){
            //            Log.v("", "csd is null");
            //            } else{
            //               Log.v("", "csd is not null");
            //            }
            //            for (int k = 0; k < csd.capacity(); ++k) {
            //               Log.v("", "inside for loop 1");
            //               Log.e("TAG", "csd : " + csd.array()[k]);
            //            }
            sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
            channel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
            break;
        }
    }
    //      MediaFormat format = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel);
    //      if (format == null)
    //         return;
    int countt = 0;
    boolean found = false;
    MediaFormat format = null;
    String mime = null;

    while (countt < extractor.getTrackCount() && !found) {
        format = extractor.getTrackFormat(countt);
        mime = format.getString(MediaFormat.KEY_MIME);
        sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
        if (mime.startsWith("audio/")) {
            found = true;
        }
        countt++;
    }
    //format = mExtractor.getTrackFormat(count);
    //MediaCodecInfo codec = selectCodec(mime);
    //String name = codec.getName();
    MediaCodec decoder = MediaCodec.createDecoderByType(mime);

    //mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
    decoder.configure(format, null, null, 0);

    if (decoder == null) {
        Log.e("DecodeActivity", "Can't find video info!");
        return;
    }

    decoder.start();

    Log.v("", "Break Point 2");

    // Get decoded bytes

    ByteBuffer[] inputBuffers = decoder.getInputBuffers();
    ByteBuffer[] outputBuffers = decoder.getOutputBuffers();

    BufferInfo info = new BufferInfo();

    //      int buffsize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
    //        // create an audiotrack object
    //      AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
    //                AudioFormat.CHANNEL_OUT_STEREO,
    //                AudioFormat.ENCODING_PCM_16BIT,
    //                buffsize,
    //                AudioTrack.MODE_STREAM);
    //      audioTrack.play();

    extractor.seekTo(WINDOW_START, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

    long start = SystemClock.elapsedRealtimeNanos();

    Log.v("", "Break Point 3");

    // MUSICAL SURFACE FEATURES

    double[] flux = new double[NUM_CHUNKS];
    double[] zeroCrossings = new double[NUM_CHUNKS];
    double[] centroid = new double[NUM_CHUNKS];
    int[] rolloff = new int[NUM_CHUNKS];
    double[] rolloffFreq = new double[NUM_CHUNKS];
    double lowEnergy = 0.0;

    // Means across all chunks
    double fluxMean = 0.0;
    double zeroCrossingsMean = 0;
    double centroidMean = 0.0;
    double rolloffMean = 0;

    // Standard deviations across all chunks
    double fluxStdDeviation = 0.0;
    double zeroCrossingsStdDeviation = 0;
    double centroidStdDeviation = 0.0;
    double rolloffStdDeviation = 0;

    // Initialise some variables to use while iterating
    double[] fftSums = new double[NUM_CHUNKS];
    int iter = 0;
    int count = 0;
    FastFourierTransformer transformer = new FastFourierTransformer(DftNormalization.STANDARD);
    double po2 = 0.0;
    Complex[] input = null;
    Complex[] output = null;
    Complex[] previousOutput = null;
    Complex[] temp = null;
    double frequency = 0.0;
    double centroidNum = 0.0;
    double centroidDen = 0.0;
    double fftValue = 0.0;
    double fftPrevious = 0.0;
    double fluxSquared = 0.0;
    int r = 0;
    boolean foundRolloff = false;
    double sum = 0;
    ArrayList<Double> data = new ArrayList<Double>();
    ArrayList<Double> currentChunk = new ArrayList<Double>();
    int gap = 0;
    int tempCount = 0;
    byte[] chunk = null;
    ArrayList<Double> outputExample = new ArrayList<Double>();
    double normConst = 0.0;

    // Iterate through the chunks
    Log.v("", "count: " + String.valueOf(count));
    while (!eosReceived && count < NUM_CHUNKS) {
        Log.v("", "Break Point " + String.valueOf(count + 4));
        Log.v("", "Inside While Loop Break Point 1");
        if (count == 0) {
            //   Log.v("", "Timestamp of chunk 0: " + String.valueOf(extractor.getSampleTime()));
        }

        int inIndex = decoder.dequeueInputBuffer(TIMEOUT_US);
        if (inIndex >= 0) {
            ByteBuffer buffer = inputBuffers[inIndex];
            int sampleSize = extractor.readSampleData(buffer, 0);
            if (sampleSize < 0) {
                // We shouldn't stop the playback at this point, just pass the EOS
                // flag to mDecoder, we will get it again from the
                // dequeueOutputBuffer
                //Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

            } else {
                decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                extractor.advance();
            }

            int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US);
            Log.v("", "Inside While Loop Break Point 2");
            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;

            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                MediaFormat mediaFormat = decoder.getOutputFormat();
                Log.d("DecodeActivity", "New format " + mediaFormat);
                //   audioTrack.setPlaybackRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE));

                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;

            default:

                Log.v("", "Inside While Loop Break Point 3");
                ByteBuffer outBuffer = outputBuffers[outIndex];
                //Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer);

                chunk = new byte[info.size];
                if (chunk.length == 0) {
                    continue;
                }
                outBuffer.get(chunk); // Read the buffer all at once
                outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN

                gap = chunk.length / DOWN_FACTOR;
                currentChunk.clear();
                Log.v("", "Inside While Loop Break Point 4a");
                // ZERO CROSSINGS

                int increment = 1;
                if (chunk.length > 1000) {
                    increment = (int) ((double) chunk.length / ((double) 1000));
                }

                // Downsampling
                for (int i = 0; i < chunk.length; i = i + increment) {
                    data.add((double) chunk[i]);
                    currentChunk.add((double) chunk[i]);
                    tempCount++;

                    if (currentChunk.size() > 1) {
                        iter += FastMath.abs(sign(currentChunk.get(currentChunk.size() - 1))
                                - sign(currentChunk.get(currentChunk.size() - 2)));

                    }
                }
                increment = 0;

                tempCount = 0;
                zeroCrossings[count] = 0.5 * iter;

                po2 = FastMath.ceil(FastMath.log(currentChunk.size()) / FastMath.log(2));
                input = new Complex[(int) (FastMath.pow(2.0, po2))];

                Log.v("", "chunk length: " + chunk.length);
                Log.v("", "input length: " + input.length);
                for (int i = 0; i < input.length; i++) {
                    if (i < currentChunk.size()) {
                        input[i] = new Complex((double) currentChunk.get(i));
                    } else {
                        input[i] = new Complex(0.0);
                    }
                }

                // FFT
                output = transformer.transform(input, TransformType.FORWARD);

                outputExample.add(centroidDen);

                // CENTROID AND FLUX      

                for (int i = 0; i < output.length; i++) {

                    if (count > 0) {
                        fftPrevious = fftValue;
                    }
                    fftValue = FastMath.hypot(output[i].getReal(), output[i].getImaginary());
                    fluxSquared += (fftValue - fftPrevious) * (fftValue - fftPrevious);

                    centroidNum += i * fftValue;
                    centroidDen += fftValue;

                }

                //               for(int i = 0; i < output.length; i++){
                //                  
                //                  normConst += FastMath.hypot(output[i].getReal(), output[i].getImaginary()) *
                //                        FastMath.hypot(output[i].getReal(), output[i].getImaginary());
                //                  
                //                  
                //               }

                //               fluxSquared = fluxSquared / normConst;
                flux[count] = FastMath.sqrt(fluxSquared) / 1000.0;

                // ROLLOFF

                while (!foundRolloff && r < output.length - 1) {
                    r++;
                    sum += FastMath.hypot(output[r].getReal(), output[r].getImaginary());
                    foundRolloff = checkRolloff(ROLLOFF_PROPORTIONAL_ERROR, sum, centroidDen);
                }

                fftSums[count] = centroidDen;
                if (centroidDen != 0.0) {
                    centroid[count] = centroidNum / centroidDen;
                } else {
                    centroid[count] = 0.0;
                }
                rolloff[count] = r;

                iter = 0;
                fluxSquared = 0.0;
                centroidNum = 0.0;
                centroidDen = 0.0;
                r = 0;
                sum = 0.0;
                foundRolloff = false;
                count++;
                //audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
                decoder.releaseOutputBuffer(outIndex, false);

                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }

            if (count > 0) {
                previousOutput = output;
                output = null;
            }
        }
        if (count == NUM_CHUNKS) {
            //   Log.v("", "Timestamp of last chunk: " + String.valueOf(extractor.getSampleTime()));
            decoder.stop();
            decoder.release();
            extractor.release();
        }

    } // while loop

    currentChunk.clear();
    currentChunk = null;

    //      for(int i = 0; i < centroid.length; i++){
    //      Log.v("", "centroid: " + String.valueOf(centroid[i]));
    //      }
    double energySum = 0.0;
    double energyAverage = 0.0;
    int lowEnergyCount = 0;

    for (int i = 0; i < NUM_CHUNKS; i++) {
        energySum += fftSums[i];
    }

    energyAverage = energySum / NUM_CHUNKS;
    for (int i = 0; i < NUM_CHUNKS; i++) {
        if (fftSums[i] < energyAverage) {
            lowEnergyCount++;
        }
    }

    lowEnergy = 100.0 * (((double) lowEnergyCount) / ((double) NUM_CHUNKS));

    // Work out the means and standard deviations

    for (int i = 0; i < NUM_CHUNKS; i++) {

        fluxMean += flux[i];
        zeroCrossingsMean += zeroCrossings[i];
        centroidMean += centroid[i];
        rolloffMean += rolloff[i];

    }

    fluxMean = fluxMean / flux.length;
    zeroCrossingsMean = zeroCrossingsMean / zeroCrossings.length;
    centroidMean = centroidMean / centroid.length;
    rolloffMean = rolloffMean / rolloff.length;

    for (int i = 0; i < NUM_CHUNKS; i++) {

        fluxStdDeviation += (flux[i] - fluxMean) * (flux[i] - fluxMean);
        zeroCrossingsStdDeviation += (zeroCrossings[i] - zeroCrossingsMean)
                * (zeroCrossings[i] - zeroCrossingsMean);
        centroidStdDeviation += (centroid[i] - centroidMean) * (centroid[i] - centroidMean);
        rolloffStdDeviation += (rolloff[i] - rolloffMean) * (rolloff[i] - rolloffMean);

    }

    fluxStdDeviation = Math.sqrt(fluxStdDeviation / flux.length);
    zeroCrossingsStdDeviation = Math.sqrt(zeroCrossingsStdDeviation / zeroCrossings.length);
    centroidStdDeviation = Math.sqrt(centroidStdDeviation / centroid.length);
    rolloffStdDeviation = Math.sqrt(rolloffStdDeviation / rolloff.length);

    Log.v("", "fluxMean: " + String.valueOf(fluxMean));
    Log.v("", "zeroCrossingsMean: " + String.valueOf(zeroCrossingsMean));
    Log.v("", "centroidMean: " + String.valueOf(centroidMean));
    Log.v("", "rolloffMean: " + String.valueOf(rolloffMean));

    Log.v("", "fluxStdDeviation: " + String.valueOf(fluxStdDeviation));
    Log.v("", "zeroCrossingsStdDeviation: " + String.valueOf(zeroCrossingsStdDeviation));
    Log.v("", "centroidStdDeviation: " + String.valueOf(centroidStdDeviation));
    Log.v("", "rolloffStdDeviation: " + String.valueOf(rolloffStdDeviation));

    Log.v("", "lowEnergy: " + String.valueOf(lowEnergy));

    Log.v("", "data size: " + String.valueOf(data.size()));

    // BEAT ANALYSIS

    Transform t = new Transform(new FastWaveletTransform(new Daubechies4()));

    double[] dataArray = new double[data.size()];
    for (int i = 0; i < data.size(); i++) {
        dataArray[i] = data.get(i);
    }
    data.clear();
    data = null;

    double powerOf2 = FastMath.ceil(FastMath.log(chunk.length) / FastMath.log(2));
    double[] dataArrayPo2 = Arrays.copyOf(dataArray, (int) (FastMath.pow(2.0, powerOf2)));
    dataArray = null;

    double[] dataCurrentInputArray = null;
    double[] dataCurrentOutputArray = null;
    double[] dataCumulativeArray = new double[dataArrayPo2.length];
    for (int i = 0; i < dataCumulativeArray.length; i++) {
        dataCumulativeArray[i] = 0.0;
    }
    double temp1 = 0.0;
    double temp2 = 0.0;
    ArrayList<Double> tempList = new ArrayList<Double>();
    int k = 16; // Downsampling factor
    int tempCount1 = 0;
    double mean = 0.0;
    for (int level = 0; level < (int) FastMath.log(2.0, dataArrayPo2.length); level++) {

        dataCurrentInputArray = t.forward(dataArrayPo2, level);
        dataCurrentOutputArray = dataCurrentInputArray;
        dataCurrentOutputArray[0] = 0.0;
        for (int i = 1; i < dataCurrentOutputArray.length; i++) {
            temp1 = FastMath.abs(dataCurrentInputArray[i]); // Full-wave rectification
            dataCurrentOutputArray[i] = (1.0 - ALPHA) * temp1 - ALPHA * dataCurrentOutputArray[i - 1]; // Low-pass filtering
        }
        tempCount1 = 0;
        mean = 0.0;
        while (k * tempCount1 < dataCurrentOutputArray.length) {
            tempList.add(dataCurrentOutputArray[k * tempCount1]); // Downsampling by k
            mean += dataCurrentOutputArray[k * tempCount1];
            tempCount1++;
        }
        mean = mean / dataCurrentOutputArray.length;

        tempCount1 = 0;
        while (k * tempCount1 < dataCurrentOutputArray.length) {
            dataCumulativeArray[k * tempCount1] += tempList.get(tempCount1) - mean; // Mean removal
            tempCount1++;
        }

    }
    int N = dataCumulativeArray.length;
    ArrayList<Double> dataList = new ArrayList<Double>();
    double dataElement = 0.0;

    for (int i = 0; i < N; i++) {
        if (dataCumulativeArray[i] != 0.0) {
            dataElement = autocorrelate(i, N, dataCumulativeArray);
            dataList.add(dataElement);
            Log.v("", "dataList: " + String.valueOf(dataElement));
        }
    }

    PeakDetector peakDetector = new PeakDetector(dataList);
    int[] peakIndices = peakDetector.process(5, 2);
    HashSet<Integer> hs = new HashSet<Integer>();
    for (int i = 0; i < peakIndices.length; i++) {
        hs.add(peakIndices[i]);
    }
    ArrayList<Integer> indicesList = new ArrayList<Integer>();
    ArrayList<Double> valuesList = new ArrayList<Double>();

    indicesList.addAll(hs);
    Double tempDoub = 0.0;

    HashMap<Double, Integer> hm = new HashMap<Double, Integer>();
    for (int i = 0; i < indicesList.size(); i++) {
        tempDoub = dataList.get(indicesList.get(i));
        hm.put(tempDoub, indicesList.get(i));
    }

    indicesList.clear();
    valuesList.clear();

    Entry<Double, Integer> tempEntry = null;
    Iterator<Entry<Double, Integer>> it = hm.entrySet().iterator();
    while (it.hasNext()) {
        tempEntry = (Entry<Double, Integer>) it.next();
        if (tempEntry.getValue() < 75) {
            it.remove();
        } else {
            //indicesList.add(tempEntry.getValue());
            valuesList.add(tempEntry.getKey());
        }
    }

    Collections.sort(valuesList);
    for (int i = 0; i < valuesList.size(); i++) {
        indicesList.add(hm.get(valuesList.get(i)));
    }

    double valuesSum = 0.0;
    double histogramSum = 0.0;

    double beatStrength = 0.0;
    double P1 = 0.0;
    double P2 = 0.0;
    double A1 = 0.0;
    double A2 = 0.0;
    double RA = 0.0;

    for (int i = 0; i < dataList.size(); i++) {
        histogramSum += dataList.get(i);
    }

    for (int i = 0; i < valuesList.size(); i++) {
        valuesSum += valuesList.get(i);
    }

    //      if(histogramSum != 0.0 && valuesList.size() != 0){
    //         SUM = (1000.0 * valuesSum) / (histogramSum * valuesList.size());
    //      }
    if (valuesList.size() != 0) {
        beatStrength = valuesSum / valuesList.size();
    }

    if (indicesList.size() > 0) {

        // Set P1 as the largest peak
        P1 = (double) indicesList.get(indicesList.size() - 1);

    }

    if (indicesList.size() > 1) {
        int beatCount = indicesList.size() - 2;
        boolean beatFound = false;

        // Start with P2 as the second largest peak
        P2 = (double) indicesList.get(indicesList.size() - 2);
        double diff = 0;

        // Iterate backwards through the peaks, largest to smallest
        while (!beatFound && beatCount > -1) {
            diff = ((double) indicesList.get(beatCount)) - P1;

            if (FastMath.abs(diff) / P1 > 0.3) {
                // Set P2 as the period of the first peak that is reasonably different from P1
                P2 = (double) indicesList.get(beatCount);
                beatFound = true;
            }
            beatCount--;
        }
    }

    if (indicesList.size() > 0) {

        A1 = FastMath.abs(dataList.get((int) P1)) / histogramSum;
        if (P2 != 0.0) {
            A2 = FastMath.abs(dataList.get((int) P2)) / histogramSum;
        }

        if (A1 != 0.0) {
            RA = A2 / A1;

        }
    }

    for (int i = 0; i < valuesList.size(); i++) {
        Log.v("", String.valueOf(i) + ") valuesList: " + String.valueOf(valuesList.get(i)));
    }
    Log.v("", "P1: " + String.valueOf(P1));
    Log.v("", "P2: " + String.valueOf(P2));
    Log.v("", "A1: " + String.valueOf(A1));
    Log.v("", "A2: " + String.valueOf(A2));
    Log.v("", "RA: " + String.valueOf(RA));
    Log.v("", "SUM: " + String.valueOf(histogramSum));
    Log.v("", "Number of Peaks: " + String.valueOf(valuesList.size()));
    double[] result = { fluxMean, zeroCrossingsMean, centroidMean, rolloffMean, fluxStdDeviation,
            zeroCrossingsStdDeviation, centroidStdDeviation, rolloffStdDeviation, lowEnergy, P1, P2, A1, A2, RA,
            histogramSum, valuesList.size() };
    final DenseInstance denseInstance = new DenseInstance(result);
    if (P1 + P2 + A1 + A2 + RA != 0.0) {
        Handler handler = new Handler(Looper.getMainLooper());
        handler.post(new ReturnResultsRunnable(lock, mAudioCallback, denseInstance, classLabel));

    } else {
        Log.v("", "Track could not be classified!");
    }

    //      for(int i = 0; i < dataList.size(); i++){
    //         Log.v("", String.valueOf(i) + ") autocorrelation: " + String.valueOf(dataList.get(i)));
    //         histogramSum += dataList.get(i);
    //      }
    //      Log.v("", "indicesList size: " + String.valueOf(indicesList.size()));
    //      for(int i = 0; i < valuesList.size(); i++){
    //         Log.v("", "indicesList: " + String.valueOf(indicesList.get(i)) + ", value: " + String.valueOf(valuesList.get(i)));
    //         valuesSum += valuesList.get(i);
    //      }
    //Classifier c = new KNearestNeighbors(5);

    //      double A0 = valuesList.get(valuesList.size() - 1) / valuesSum;
    //      double A1 = valuesList.get(valuesList.size() - 2) / valuesSum;
    //      double RA = A1 / A0;
    //      double P0 = 1 / ((double) indicesList.get(indicesList.size() - 1));
    //      double P1 = 1 / ((double) indicesList.get(indicesList.size() - 2));
    //      
    //      Log.v("", "A0: " + String.valueOf(A0));
    //      Log.v("", "A1: " + String.valueOf(A1));
    //      Log.v("", "RA: " + String.valueOf(RA));
    //      Log.v("", "P0: " + String.valueOf(P0));
    //      Log.v("", "P1: " + String.valueOf(P1));
    //      Log.v("", "SUM: " + String.valueOf(histogramSum));

    long durationUs = SystemClock.elapsedRealtimeNanos() - start;
    double durationSecs = ((double) durationUs) / 1000000000.0;
    Log.v("", "count = " + String.valueOf(count) + ", Sample rate: " + String.valueOf(sampleRate)
            + ", Duration: " + String.valueOf(durationSecs));

    //      audioTrack.stop();
    //      audioTrack.release();
    //      audioTrack = null;
}

From source file:mom.trd.opentheso.bdd.helper.ConceptHelper.java

/**
 * Focntion rcursive pour trouver le chemin complet d'un concept en partant
 * du Concept lui mme pour arriver  la tte on peut rencontrer plusieurs
 * ttes en remontant, alors on construit  chaque fois un chemin complet.
 *
 * @param ds/*from ww  w. j  a v a2 s .  c  o m*/
 * @param idConcept
 * @param idThesaurus
 * @param firstPath
 * @param path
 * @param tabId
 * @return Vector Ce vecteur contient tous les Path des BT d'un id_terme
 * exemple (327,368,100,#,2251,5555,54544,8789,#) ici deux path disponible
 * il faut trouver le path qui correspond au microthesaurus en cours pour
 * l'afficher en premier
 */
public ArrayList<ArrayList<String>> getInvertPathOfConcept(HikariDataSource ds, String idConcept,
        String idThesaurus, ArrayList<String> firstPath, ArrayList<String> path,
        ArrayList<ArrayList<String>> tabId) {

    RelationsHelper relationsHelper = new RelationsHelper();

    ArrayList<String> resultat = relationsHelper.getListIdBT(ds, idConcept, idThesaurus);
    if (resultat.size() > 1) {
        for (String path1 : path) {
            firstPath.add(path1);
        }
    }
    if (resultat.isEmpty()) {
        path.add(getGroupIdOfConcept(ds, idConcept, idThesaurus));
        ArrayList<String> pathTemp = new ArrayList<>();
        for (String path2 : firstPath) {
            pathTemp.add(path2);
        }
        for (String path1 : path) {
            if (pathTemp.indexOf(path1) == -1) {
                pathTemp.add(path1);
            }
        }
        tabId.add(pathTemp);
        path.clear();
    }

    for (String resultat1 : resultat) {
        path.add(resultat1);
        getInvertPathOfConcept(ds, resultat1, idThesaurus, firstPath, path, tabId);
    }

    return tabId;
}

From source file:org.alfresco.repo.domain.node.AbstractNodeDAOImpl.java

@Override
public int touchNodes(Long txnId, List<Long> nodeIds) {
    // limit in clause to 1000 node ids
    int batchSize = 1000;

    int touched = 0;
    ArrayList<Long> batch = new ArrayList<Long>(batchSize);
    for (Long nodeId : nodeIds) {
        invalidateNodeCaches(nodeId);/*from   w ww .  j av a2s . co  m*/
        batch.add(nodeId);
        if (batch.size() % batchSize == 0) {
            touched += updateNodes(txnId, batch);
            batch.clear();
        }
    }
    if (batch.size() > 0) {
        touched += updateNodes(txnId, batch);
    }
    return touched;
}

From source file:com.ibm.bi.dml.lops.compile.Dag.java

/**
 * Method to generate hadoop jobs. Exec nodes can contains a mixture of node
 * types requiring different mr jobs. This method breaks the job into
 * sub-types and then invokes the appropriate method to generate
 * instructions./*from   ww w. ja v a  2s.co m*/
 * 
 * @param execNodes
 * @param inst
 * @param deleteinst
 * @param jobNodes
 * @throws LopsException
 * @throws DMLRuntimeException
 * @throws DMLUnsupportedOperationException
 */

public void generateMRJobs(ArrayList<N> execNodes, ArrayList<Instruction> inst,
        ArrayList<Instruction> writeinst, ArrayList<Instruction> deleteinst, ArrayList<ArrayList<N>> jobNodes)
        throws LopsException, DMLUnsupportedOperationException, DMLRuntimeException

{

    /*// copy unassigned lops in execnodes to gmrnodes
    for (int i = 0; i < execNodes.size(); i++) {
       N node = execNodes.elementAt(i);
       if (jobType(node, jobNodes) == -1) {
    jobNodes.get(JobType.GMR.getId()).add(node);
    addChildren(node, jobNodes.get(JobType.GMR.getId()),
          execNodes);
       }
    }*/

    printJobNodes(jobNodes);

    ArrayList<Instruction> rmvarinst = new ArrayList<Instruction>();
    for (JobType jt : JobType.values()) {

        // do nothing, if jt = INVALID or ANY
        if (jt == JobType.INVALID || jt == JobType.ANY)
            continue;

        int index = jt.getId(); // job id is used as an index into jobNodes
        ArrayList<N> currNodes = jobNodes.get(index);

        // generate MR job
        if (currNodes != null && !currNodes.isEmpty()) {

            if (LOG.isTraceEnabled())
                LOG.trace("Generating " + jt.getName() + " job");

            if (jt.allowsRecordReaderInstructions()
                    && hasANode(jobNodes.get(index), ExecLocation.RecordReader)) {
                // split the nodes by recordReader lops
                ArrayList<ArrayList<N>> rrlist = splitGMRNodesByRecordReader(jobNodes.get(index));
                for (int i = 0; i < rrlist.size(); i++) {
                    generateMapReduceInstructions(rrlist.get(i), inst, writeinst, deleteinst, rmvarinst, jt);
                }
            } else if (jt.allowsSingleShuffleInstruction()) {
                // These jobs allow a single shuffle instruction. 
                // We should split the nodes so that a separate job is produced for each shuffle instruction.
                Lop.Type splittingLopType = jt.getShuffleLopType();

                ArrayList<N> nodesForASingleJob = new ArrayList<N>();
                for (int i = 0; i < jobNodes.get(index).size(); i++) {
                    if (jobNodes.get(index).get(i).getType() == splittingLopType) {
                        nodesForASingleJob.clear();

                        // Add the lop that defines the split 
                        nodesForASingleJob.add(jobNodes.get(index).get(i));

                        /*
                         * Add the splitting lop's children. This call is redundant when jt=SORT
                         * because a sort job ALWAYS has a SINGLE lop in the entire job
                         * i.e., there are no children to add when jt=SORT. 
                         */
                        addChildren(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index));

                        if (jt.isCompatibleWithParentNodes()) {
                            /*
                             * If the splitting lop is compatible with parent nodes 
                             * then they must be added to the job. For example, MMRJ lop 
                             * may have a Data(Write) lop as its parent, which can be 
                             * executed along with MMRJ.
                             */
                            addParents(jobNodes.get(index).get(i), nodesForASingleJob, jobNodes.get(index));
                        }

                        generateMapReduceInstructions(nodesForASingleJob, inst, writeinst, deleteinst,
                                rmvarinst, jt);
                    }
                }
            } else {
                // the default case
                generateMapReduceInstructions(jobNodes.get(index), inst, writeinst, deleteinst, rmvarinst, jt);
            }
        }
    }
    inst.addAll(rmvarinst);

}

From source file:edu.umass.cs.reconfiguration.SQLReconfiguratorDB.java

private Set<String> putReconfigurationRecordDB(Map<String, ReconfigurationRecord<NodeIDType>> toCommit) {
    String updateCmd = "update " + getRCRecordTable() + " set " + Columns.RC_GROUP_NAME.toString() + "=?, "
            + Columns.STRINGIFIED_RECORD.toString() + "=? where " + Columns.SERVICE_NAME.toString() + "=?";
    String cmd = updateCmd;/*  w w  w .  j av a  2 s  .  c o m*/

    PreparedStatement pstmt = null;
    Connection conn = null;
    Set<String> committed = new HashSet<String>();
    String[] keys = toCommit.keySet().toArray(new String[0]);
    try {
        ArrayList<String> batch = new ArrayList<String>();
        for (int i = 0; i < keys.length; i++) {
            String name = keys[i];
            if (conn == null) {
                conn = this.getDefaultConn();
                conn.setAutoCommit(false);
                pstmt = conn.prepareStatement(updateCmd);
            }
            // removal
            if (toCommit.get(name) == null) {
                this.deleteReconfigurationRecordDB(name);
                log.log(Level.INFO, "{0} deleted RC record {1}", new Object[] { this, name });
                committed.add(name);
                continue;
            }
            // else update/insert
            String rcGroupName = toCommit.get(name).getRCGroupName();
            if (rcGroupName == null)
                rcGroupName = this.getRCGroupName(name);
            pstmt.setString(1, rcGroupName);
            if (RC_RECORD_CLOB_OPTION)
                pstmt.setClob(2, new StringReader((toCommit.get(name)).toString()));
            else
                pstmt.setString(2, (toCommit.get(name)).toString());
            pstmt.setString(3, name);
            pstmt.addBatch();
            batch.add(name);

            int[] executed = new int[batch.size()];
            if ((i + 1) % MAX_DB_BATCH_SIZE == 0 || (i + 1) == toCommit.size()) {
                executed = pstmt.executeBatch();
                assert (executed.length == batch.size());
                conn.commit();
                pstmt.clearBatch();
                for (int j = 0; j < executed.length; j++) {
                    if (executed[j] > 0) {
                        log.log(Level.FINE, "{0} updated RC DB record to {1}",
                                new Object[] { this, toCommit.get(batch.get(j)).getSummary() });
                        committed.add(batch.get(j));
                    } else
                        log.log(Level.FINE,
                                "{0} unable to update RC record {1} (executed={2}), will try insert",
                                new Object[] { this, batch.get(j), executed[j] });
                }
                batch.clear();
            }
        }
    } catch (SQLException sqle) {
        log.severe("SQLException while inserting RC record using " + cmd);
        sqle.printStackTrace();
    } finally {
        cleanup(pstmt);
        cleanup(conn);
    }

    log.log(Level.FINE, "{0} batch-committed {1}({2}) out of {3}({4})",
            new Object[] { this, committed.size(), committed, toCommit.size(), toCommit.keySet() });
    committed.addAll(this.putReconfigurationRecordIndividually(this.diff(toCommit, committed)));
    log.log(Level.FINE, "{0} committed {1}({2}) out of {3}({4})",
            new Object[] { this, committed.size(), committed, toCommit.size(), toCommit.keySet() });
    return committed;
}

From source file:com.hygenics.parser.GetImages.java

private void addFromFile() {
    File f = new File(fpath);
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    ArrayList<String> imageData = new ArrayList<String>();
    int size = (int) Math.ceil(commitsize / numqueries);

    if (f.exists()) {
        // get the image data
        File[] list = f.listFiles();
        int curr = 0;
        if (list != null) {
            for (File img : list) {
                curr += 1;/*  w  w w  .ja  v  a2  s.c o  m*/
                if (img.isDirectory() == false
                        && (img.getName().contains(".bmp") || img.getName().toLowerCase().contains(".jpg")
                                || img.getName().toLowerCase().contains(".png")
                                || img.getName().toLowerCase().contains("jpeg"))) {
                    try {
                        if (dbcondition == null
                                || template
                                        .getJsonData(dbcondition.replace("$IMAGE$",
                                                img.getName().replaceAll("(?mis)" + imagepostfix, "")))
                                        .size() > 0) {
                            BufferedImage bi = ImageIO.read(img);// only
                            // used
                            // to
                            // ensure
                            // that
                            // this
                            // is an
                            // image
                            JsonObject jobj = new JsonObject();
                            jobj.add("offenderhash", img.getName().replaceAll("(?mis)" + imagepostfix, ""));// assumes
                            // hash
                            // is
                            // file
                            // name+postfix
                            jobj.add("image", img.getName().replaceAll("(?mis)" + imagepostfix, ""));
                            jobj.add("image_path", img.getName());
                            jobj.add("table", table);
                            jobj.add("date", Calendar.getInstance().getTime().toString());
                            imageData.add(jobj.toString());
                        }
                    } catch (IOException e) {
                        log.info(img.getName() + " is not an Image!");
                        e.printStackTrace();
                    } catch (Exception e) {
                        log.info("Error in Posting to Database.");
                        e.printStackTrace();
                    }
                }

                // post if > commitsize
                if (imageData.size() >= commitsize || curr == list.length) {
                    log.info("Posting to DB @ " + Calendar.getInstance().getTime().toString());
                    for (int i = 0; i < numqueries; i++) {
                        if (((i + 1) * size) < imageData.size()) {
                            fjp.execute(new ImagePost(imageData.subList((i * size), ((i + 1) * size))));
                        } else {
                            fjp.execute(new ImagePost(imageData.subList((i * size), imageData.size())));
                        }
                    }

                    int w = 0;
                    while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                        w++;
                    }
                    log.info("Waited for " + w + " cycles");
                    imageData.clear();
                    log.info("Finished Posting to DB @ " + Calendar.getInstance().getTime().toString());
                }
            }
        }

    } else {
        log.error("File Path does Not Exist.Please Check Image Pull!");
    }
    fjp.shutdown();
    fjp = null;
}

From source file:gdsc.smlm.ij.plugins.CreateData.java

private int runDensityCalculation(ExecutorService threadPool, List<Future<?>> futures,
        final ArrayList<float[]> coords, final Statistics densityStats, final float radius,
        final Rectangle bounds, final int[] allDensity, final int allIndex) {
    final int size = coords.size();
    final float[] xCoords = new float[size];
    final float[] yCoords = new float[size];
    for (int i = 0; i < xCoords.length; i++) {
        float[] xy = coords.get(i);
        xCoords[i] = xy[0];//from  ww w . ja  va2 s.c o m
        yCoords[i] = xy[1];
    }
    futures.add(threadPool.submit(new Runnable() {
        public void run() {
            incrementProgress();
            final DensityManager dm = new DensityManager(xCoords, yCoords, bounds);
            final int[] density = dm.calculateDensity(radius, true);
            addDensity(densityStats, density);

            // Store the density for each result. This does not need to be synchronised 
            // since the indices in different threads are unique.
            for (int i = 0, index = allIndex; i < density.length; i++, index++)
                allDensity[index] = density[i];
        }
    }));
    coords.clear();
    return size;
}