Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:ffx.autoparm.PME_2.java

/**
 * <p>/*from  ww w  . j  av a 2  s .c om*/
 * polargrp</p>
 */
public static void polargrp() {
    int index;

    ArrayList<ArrayList<Integer>> temp_groups1 = new ArrayList<ArrayList<Integer>>();
    ArrayList<ArrayList<Integer>> temp_groups2 = new ArrayList<ArrayList<Integer>>();
    ArrayList<ArrayList<Integer>> temp_groups3 = new ArrayList<ArrayList<Integer>>();
    ArrayList<ArrayList<Integer>> temp_groups4 = new ArrayList<ArrayList<Integer>>();

    ArrayList<Integer> polarizationGroup = new ArrayList<Integer>();

    ArrayList<Integer> list = new ArrayList<Integer>();
    int nlist = 0;
    ArrayList<Integer> keep = new ArrayList<Integer>();
    //int nkeep = 0;
    ArrayList<Integer> mask = new ArrayList<Integer>();

    ArrayList<Integer> jg;
    ArrayList<Integer> ig;
    int kk;
    int jj;
    int start;
    int stop;
    boolean done;

    for (Atom ai : atoms) {
        ArrayList<Integer> group = new ArrayList<Integer>();
        polarizationGroup.clear();
        index = ai.getXYZIndex() - 1;
        group.add(index);
        //polarizationGroup.add(ai.getType());
        PolarizeType polarizeType = ai.getPolarizeType();
        if (polarizeType != null) {
            if (polarizeType.polarizationGroup != null) {
                for (int i : polarizeType.polarizationGroup) {
                    if (!polarizationGroup.contains(i)) {
                        polarizationGroup.add(i);
                    }
                }
            }
        }
        for (Bond bi : ai.getBonds()) {
            Atom aj = bi.get1_2(ai);
            int tj = aj.getType();
            for (int g : polarizationGroup) {
                if (g == tj) {
                    Integer index2 = aj.getXYZIndex() - 1;
                    group.add(index2);
                }
            }
        }
        Collections.sort(group);
        temp_groups1.add(group);
    }

    //Next part of ip11 creation
    for (int n = 0; n < nAtoms; n++) {
        list.add(n, -1);
    }

    for (int i = 0; i < nAtoms; i++) {
        ig = temp_groups1.get(i);
        done = false;
        start = 1;
        stop = ig.size();
        for (int j = start - 1; j < stop; j++) {
            jj = ig.get(j);
            if (jj < i) {
                done = true;
                jg = temp_groups1.get(jj);
                for (int k = 0; k < jg.size(); k++) {
                    if (k > ig.size() - 1) {
                        for (int s = ig.size(); s < k + 1; s++) {
                            ig.add(0);
                        }
                        ig.set(k, jg.get(k));
                    } else {
                        ig.set(k, jg.get(k));
                    }
                }
            } else {
                list.set(jj, i);
            }
        }
        while (!done) {
            done = true;
            for (int j = start - 1; j < stop; j++) {
                jj = ig.get(j);
                jg = temp_groups1.get(jj);
                for (int k = 0; k < jg.size(); k++) {
                    kk = jg.get(k);
                    if (list.get(kk) != i) {
                        ig.add(kk);
                        list.set(kk, i);
                    }
                }
            }
            if (ig.size() != stop) {
                done = false;
                start = stop + 1;
                stop = ig.size();
            }
        }
        Collections.sort(ig);
    }

    //final part of ip11 array creation
    for (int n = 0; n < nAtoms; n++) {
        ArrayList<Integer> group = temp_groups1.get(n);
        Collections.sort(group);
        //System.out.println(group);
        ip11[n] = new int[group.size()];
        int j = 0;
        for (int k : group) {
            ip11[n][j++] = k;
        }
    }

    //start ip12 creation
    for (int n = 0; n < nAtoms; n++) {
        mask.add(n, -1);
    }
    for (int i = 0; i < nAtoms; i++) {
        list.clear();
        keep.clear();
        ArrayList<Integer> group = new ArrayList<Integer>();
        ig = temp_groups1.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            list.add(jj);
            mask.set(jj, i);
        }
        for (int j = 0; j < list.size(); j++) {
            jj = list.get(j);
            Atom ajj = atoms[jj];
            for (int k = 0; k < ajj.getBonds().size(); k++) {
                kk = ajj.getBonds().get(k).get1_2(ajj).getXYZIndex() - 1;
                //System.out.println(mask.get(kk)+" "+i);
                if (mask.get(kk) != i) {
                    keep.add(kk);
                }
            }
        }
        nlist = 0;
        list.clear();
        for (int j = 0; j < keep.size(); j++) {
            jj = keep.get(j);
            jg = temp_groups1.get(jj);
            for (int k = 0; k < jg.size(); k++) {
                kk = jg.get(k);
                //System.out.println((j+1)+" "+(jj+1)+" "+(k+1)+" "+(kk+1));
                nlist++;
                //list.set(nlist, kk);
                if (nlist - 1 < list.size()) {
                    list.set(nlist - 1, kk);
                } else {
                    list.add(kk);
                }
            }
        }
        Collections.sort(list);
        for (int j = 0; j < list.size(); j++) {
            group.add(j, list.get(j));
        }
        temp_groups2.add(group);
    }
    //final part of ip12 array creation
    for (int n = 0; n < nAtoms; n++) {
        ArrayList<Integer> group = temp_groups2.get(n);
        Collections.sort(group);
        //System.out.println(group);
        ip12[n] = new int[group.size()];
        int j = 0;
        for (int k : group) {
            ip12[n][j++] = k;
        }
    }

    //start ip13 creation
    mask.clear();
    for (int n = 0; n < nAtoms; n++) {
        mask.add(n, -1);
    }
    for (int i = 0; i < nAtoms; i++) {
        list.clear();
        ArrayList<Integer> group = new ArrayList<Integer>();
        ig = temp_groups1.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            mask.set(jj, i);
        }
        ig = temp_groups2.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            mask.set(jj, i);
        }
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            jg = temp_groups2.get(jj);
            for (int k = 0; k < jg.size(); k++) {
                kk = jg.get(k);
                if (mask.get(kk) != i) {
                    list.add(kk);
                }
            }
        }
        Collections.sort(list);
        for (int j = 0; j < list.size(); j++) {
            group.add(j, list.get(j));
        }
        temp_groups3.add(group);
    }
    //final part of ip13 array creation
    for (int n = 0; n < nAtoms; n++) {
        ArrayList<Integer> group = temp_groups3.get(n);
        Collections.sort(group);
        //System.out.println(group);
        ip13[n] = new int[group.size()];
        int j = 0;
        for (int k : group) {
            ip13[n][j++] = k;
        }
    }

    //start ip14 creation
    mask.clear();
    for (int n = 0; n < nAtoms; n++) {
        mask.add(n, -1);
    }
    for (int i = 0; i < nAtoms; i++) {
        list.clear();
        ArrayList<Integer> group = new ArrayList<Integer>();
        ig = temp_groups1.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            mask.set(jj, i);
        }
        ig = temp_groups2.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            mask.set(jj, i);
        }
        ig = temp_groups3.get(i);
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            mask.set(jj, i);
        }
        for (int j = 0; j < ig.size(); j++) {
            jj = ig.get(j);
            jg = temp_groups2.get(jj);
            for (int k = 0; k < jg.size(); k++) {
                kk = jg.get(k);
                if (mask.get(kk) != i) {
                    list.add(kk);
                }
            }
        }
        Collections.sort(list);
        for (int j = 0; j < list.size(); j++) {
            group.add(j, list.get(j));
        }
        temp_groups4.add(group);
    }
    //final part of ip14 array creation
    for (int n = 0; n < nAtoms; n++) {
        ArrayList<Integer> group = temp_groups3.get(n);
        Collections.sort(group);
        //System.out.println(group);
        ip14[n] = new int[group.size()];
        int j = 0;
        for (int k : group) {
            ip14[n][j++] = k;
        }
    }
}

From source file:cloudproject.test.GetContainerInfo.java

/**
 * Parse information from a file, and also optionally print information about what
 * formats, containers and codecs fit into that file.
 * /*from w  w w  .j a va  2  s  .c o m*/
 * @param arg The file to open, or null if we just want generic options.
 * @throws IOException if file cannot be opened.
 * @throws InterruptedException if process is interrupted while querying.
 */
private static void getInfo(String filename) throws InterruptedException, IOException {

    // In Humble, all objects have special contructors named 'make'.  
    // A Demuxer opens up media containers, parses  and de-multiplexes the streams
    // of media data without those containers.
    final Demuxer demuxer = Demuxer.make();

    // We open the demuxer by pointing it at a URL.
    demuxer.open(filename, null, false, true, null, null);

    // Once we've opened a demuxer, Humble can make a guess about the
    // DemuxerFormat. Humble supports over 100+ media container formats.
    final DemuxerFormat format = demuxer.getFormat();
    System.out.printf("URL: '%s' (%s: %s)\n", demuxer.getURL(), format.getLongName(), format.getName());

    // Many programs that make containers, such as iMovie or Adobe Elements, will
    // insert meta-data about the container. Here we extract that meta data and print it.
    KeyValueBag metadata = demuxer.getMetaData();
    System.out.println("MetaData:");
    for (String key : metadata.getKeys())
        System.out.printf("  %s: %s\n", key, metadata.getValue(key));

    System.out.println("\n");

    // There are a few other key pieces of information that are interesting for
    // most containers; The duration, the starting time, and the estimated bit-rate.
    // This code extracts all three.
    final String formattedDuration = formatTimeStamp(demuxer.getDuration());
    System.out.printf("Duration: %s, start: %f, bitrate: %d kb/s\n", formattedDuration,
            demuxer.getStartTime() == Global.NO_PTS ? 0 : demuxer.getStartTime() / 1000000.0,
            demuxer.getBitRate() / 1000);

    System.out.println("\n");

    String output = "/Users/lxb200709/Documents/TransCloud/videosource/elephants dream_00_cv.WebM";

    // we're forcing this to be HTTP Live Streaming for this demo.
    final Muxer muxer = Muxer.make(output, null, "mp4");

    //final MuxerFormat format_muxer = MuxerFormat.guessFormat("mp4", null, null);

    /**
     * Create bit stream filters if we are asked to.
     */
    final BitStreamFilter vf = BitStreamFilter.make("dump_extra");
    final BitStreamFilter af = BitStreamFilter.make("aac_adtstoasc");

    // Finally, a container consists of several different independent streams of
    // data called Streams. In Humble there are two objects that represent streams:
    // DemuxerStream (when you are reading) and MuxerStreams (when you are writing).

    // First find the number of streams in this container.
    int ns = demuxer.getNumStreams();

    final Decoder[] decoders = new Decoder[ns];

    MediaPicture picture = null;
    MediaAudio samples = null;

    // Now, let's iterate through each of them.
    for (int i = 0; i < ns; i++) {

        DemuxerStream stream = demuxer.getStream(i);

        metadata = stream.getMetaData();
        // Language is usually embedded as metadata in a stream.
        final String language = metadata.getValue("language");

        // We will only be able to make a decoder for streams we can actually
        // decode, so the caller should check for null.
        decoders[i] = stream.getDecoder();

        System.out.printf(" Stream #0.%1$d (%2$s): %3$s\n", i, language,
                decoders[i] != null ? decoders[i].toString() : "unknown coder");
        System.out.println("  Metadata:");
        for (String key : metadata.getKeys())
            System.out.printf("    %s: %s\n", key, metadata.getValue(key));

        if (decoders[i].getCodecType() == Type.MEDIA_VIDEO) {
            System.out.printf("    frame rate: %s\n", stream.getFrameRate());
            System.out.printf("    frame number: %s\n", stream.getNumFrames());
            System.out.printf("    stream tb: %s\n", stream.getTimeBase());

            //Open the video decoder
            decoders[i].open(null, null);
        }

        if (decoders[i].getCodecType() == Type.MEDIA_AUDIO) {
            decoders[i].open(null, null);

        }
        System.out.println("\n");

        muxer.addNewStream(decoders[i]);

    }

    muxer.open(null, null);

    final MediaPacket packet = MediaPacket.make();
    ArrayList<MediaPicture> keyFrameList = new ArrayList<MediaPicture>();
    ArrayList<MediaPicture> keyFrameListInOnePacket = new ArrayList<MediaPicture>();
    ArrayList<String> frameList = new ArrayList<String>();
    ArrayList<Long> gopDuration = new ArrayList<Long>();
    long gopSize = 0;
    long previousKeyFramePosition = 0;
    long currentKeyFramePosition = 0;
    long gopPts = 0;
    long previousKeyFramePts = 0;
    long currentKeyFramePts = 0;
    long gopDts = 0;
    long previousKeyFrameDts = 0;
    long currentKeyFrameDts = 0;
    long gopPosition = 0;

    int packetCount = 0;

    while (demuxer.read(packet) >= 0) {
        /**
         * Now we have a packet, but we can only write packets that had decoders we knew what to do with.
         */
        final Decoder d = decoders[packet.getStreamIndex()];

        if (d != null && d.getCodecType() == Type.MEDIA_VIDEO) {
            packetCount++;
            System.out.println("\npacket number: " + packetCount);
            System.out.println("packet position: " + packet.getPosition());
            System.out.println("packet duration: " + packet.getDuration());
            System.out.println("packet size: " + packet.getSize());
            System.out.println("packet dts: " + packet.getDts());
            System.out.println("packet pts: " + packet.getPts());
            System.out.println("this is a video packet");

            picture = MediaPicture.make(d.getWidth(), d.getHeight(), d.getPixelFormat());
            picture.setTimeBase(demuxer.getStream(packet.getStreamIndex()).getFrameRate());

            int offset = 0;
            int bytesDecoded = 0;

            while (offset < packet.getSize()) {
                bytesDecoded += d.decode(picture, packet, offset);
                if (bytesDecoded < 0)
                    throw new RuntimeException("got error decoding video");

                offset += bytesDecoded;

                if (bytesDecoded >= 0) {
                    if (picture.isComplete()) {

                        if (picture.getType() == MediaPicture.Type.PICTURE_TYPE_I) {
                            //Once a new GOP, create a new packet
                            final MediaPacket packetGOP = MediaPacket.make();
                            keyFrameList.add(picture);
                            keyFrameListInOnePacket.add(picture);
                            System.out.println("A I frame is created");
                            frameList.add("I");

                            //Calculate GOP size I, the previous GOP size will current I frame position
                            //minus previous I frame position.
                            currentKeyFramePosition = packet.getPosition();
                            gopSize = currentKeyFramePosition - previousKeyFramePosition;
                            gopPosition = previousKeyFramePosition;
                            previousKeyFramePosition = currentKeyFramePosition;

                            //Calculate GOP pts (deadline). It should the first frame in this GOP's pts, most time
                            //is key frame.
                            gopPts = previousKeyFramePts;
                            currentKeyFramePts = packet.getPts();
                            previousKeyFramePts = currentKeyFramePts;

                            gopDts = previousKeyFrameDts;
                            currentKeyFrameDts = packet.getDts();
                            previousKeyFrameDts = currentKeyFrameDts;

                            /*packetGOP.setKeyPacket(true);
                            packetGOP.setTimeBase(packet.getTimeBase());
                            packetGOP.setDuration(gopDuration.size());
                            packetGOP.setPts(gopPts);
                            packetGOP.setDts(gopDts);
                            packetGOP.setPosition(gopPosition);
                            //  packetGOP.setComplete(true);
                                    
                            gopDuration.clear();
                                    
                            if (vf != null && d.getCodecType() == Type.MEDIA_VIDEO)
                                vf.filter(packetGOP, null);
                             else if (af != null && d.getCodecType() == Type.MEDIA_AUDIO)
                               af.filter(packetGOP, null);
                                     
                             System.out.println("*******Writing packetGOP to muxer container*****");*/
                            muxer.write(packet, true);
                        }
                        if (picture.getType() == MediaPicture.Type.PICTURE_TYPE_P) {
                            System.out.println("A P frame is created");
                            frameList.add("P");

                        }

                        if (picture.getType() == MediaPicture.Type.PICTURE_TYPE_B) {
                            System.out.println("A B frame is created");
                            frameList.add("B");

                        }

                    }

                }
            }
        }

        /*     
             if(d.getCodecType() == Type.MEDIA_AUDIO) { 
                        
                System.out.println("this is a audio packet");
                samples = MediaAudio.make(
                   d.getFrameSize(),
                   d.getSampleRate(),
                   d.getChannels(),
                   d.getChannelLayout(),
                   d.getSampleFormat());
                int offset = 0;
                 int bytesRead = 0;
                 do {
                   bytesRead += d.decodeAudio(samples, packet, offset);
                   if (samples.isComplete()) {
                 
                   }
                   offset += bytesRead;
                 } while (offset < packet.getSize());
                        
             }*/

        if (packet.isComplete() && d != null && d.getCodecType() == Type.MEDIA_VIDEO) {

            if (packet.isKeyPacket()) {
                System.out.println("This is a keypacket");
            }

            //Calculate the total GOP duration 
            gopDuration.add(packet.getDuration());

            //   System.out.printf("****Find %d I frames in this packet****", keyFrameListInOnePacket.size());

            //System.out.println("\n");

            for (MediaPicture pic : keyFrameListInOnePacket) {
                System.out.println(
                        "\nI frame #" + keyFrameListInOnePacket.indexOf(pic) + " pts: " + pic.getPts());
            }
            keyFrameListInOnePacket.clear();
            System.out.println("\n");

            // System.out.println(frameList);
            // System.out.println("\n");
        }

    }

    for (int i = 0; i < ns; i++) {
        do {
            decoders[i].decode(picture, null, 0);
            if (picture.isComplete()) {

            }
        } while (picture.isComplete());

    }
    // It is good practice to close demuxers when you're done to free
    // up file handles. Humble will EVENTUALLY detect if nothing else
    // references this demuxer and close it then, but get in the habit
    // of cleaning up after yourself, and your future girlfriend/boyfriend
    // will appreciate it.
    muxer.close();
    demuxer.close();
}

From source file:com.zoffcc.applications.aagtl.FieldnotesUploader.java

public Boolean upload_v2() {
    this.downloader.login();
    String page = this.downloader.getUrlData(this.URL);
    String viewstate = "";
    Pattern p = Pattern/*  ww w  .ja v a  2 s  . co m*/
            .compile("<input type=\"hidden\" name=\"__VIEWSTATE\" id=\"__VIEWSTATE\" value=\"([^\"]+)\" />");
    Matcher m = p.matcher(page);
    m.find();
    viewstate = m.group(1);

    //System.out.println("viewstate=" + viewstate);
    // got viewstate

    InputStream fn_is = null;
    String raw_upload_data = "";
    try {
        fn_is = new ByteArrayInputStream(
                ("GC2BNHP,2010-11-07T14:00Z,Write note,\"bla bla\"").getBytes("UTF-8"));
        raw_upload_data = "GC2BNHP,2010-11-07T20:50Z,Write note,\"bla bla\"".getBytes("UTF-8").toString();
    } catch (UnsupportedEncodingException e) {
        e.printStackTrace();
    }

    String cookies_string = this.downloader.getCookies();

    ArrayList<InputStream> files = new ArrayList();
    files.add(fn_is);

    Hashtable<String, String> ht = new Hashtable<String, String>();
    ht.put("ctl00$ContentBody$btnUpload", "Upload Field Note");
    ht.put("ctl00$ContentBody$chkSuppressDate", "");
    //   ht.put("ctl00$ContentBody$FieldNoteLoader", "geocache_visits.txt");
    ht.put("__VIEWSTATE", viewstate);

    HttpData data = HttpRequest.post(this.URL, ht, files, cookies_string);
    //System.out.println(data.content);

    String boundary = "----------ThIs_Is_tHe_bouNdaRY_$";
    String crlf = "\r\n";

    URL url = null;
    try {
        url = new URL(this.URL);
    } catch (MalformedURLException e2) {
        e2.printStackTrace();
    }
    HttpURLConnection con = null;
    try {
        con = (HttpURLConnection) url.openConnection();
    } catch (IOException e2) {
        e2.printStackTrace();
    }
    con.setDoInput(true);
    con.setDoOutput(true);
    con.setUseCaches(false);
    try {
        con.setRequestMethod("POST");
    } catch (java.net.ProtocolException e) {
        e.printStackTrace();
    }

    con.setRequestProperty("Cookie", cookies_string);
    //System.out.println("Cookie: " + cookies_string[0] + "=" + cookies_string[1]);

    con.setRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)");
    con.setRequestProperty("Pragma", "no-cache");
    //con.setRequestProperty("Connection", "Keep-Alive");
    String content_type = String.format("multipart/form-data; boundary=%s", boundary);
    con.setRequestProperty("Content-Type", content_type);

    DataOutputStream dos = null;
    try {
        dos = new DataOutputStream(con.getOutputStream());
    } catch (IOException e) {
        e.printStackTrace();
    }

    String raw_data = "";

    //
    raw_data = raw_data + "--" + boundary + crlf;
    raw_data = raw_data
            + String.format("Content-Disposition: form-data; name=\"%s\"", "ctl00$ContentBody$btnUpload")
            + crlf;
    raw_data = raw_data + crlf;
    raw_data = raw_data + "Upload Field Note" + crlf;
    //

    //
    raw_data = raw_data + "--" + boundary + crlf;
    raw_data = raw_data
            + String.format("Content-Disposition: form-data; name=\"%s\"", "ctl00$ContentBody$chkSuppressDate")
            + crlf;
    raw_data = raw_data + crlf;
    raw_data = raw_data + "" + crlf;
    //

    //
    raw_data = raw_data + "--" + boundary + crlf;
    raw_data = raw_data + String.format("Content-Disposition: form-data; name=\"%s\"", "__VIEWSTATE") + crlf;
    raw_data = raw_data + crlf;
    raw_data = raw_data + viewstate + crlf;
    //

    //
    raw_data = raw_data + "--" + boundary + crlf;
    raw_data = raw_data + String.format("Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"",
            "ctl00$ContentBody$FieldNoteLoader", "geocache_visits.txt") + crlf;
    raw_data = raw_data + String.format("Content-Type: %s", "text/plain") + crlf;
    raw_data = raw_data + crlf;
    raw_data = raw_data + raw_upload_data + crlf;
    //

    //
    raw_data = raw_data + "--" + boundary + "--" + crlf;
    raw_data = raw_data + crlf;

    try {
        this.SendPost(this.URL, raw_data, cookies_string);
    } catch (IOException e1) {
        e1.printStackTrace();
    }

    //System.out.println(raw_data);

    try {
        dos.writeBytes(raw_data);
        //dos.writeChars(raw_data);
        dos.flush();
    } catch (IOException e) {
        e.printStackTrace();
    }

    HttpData ret2 = new HttpData();
    BufferedReader rd = null;
    try {
        rd = new BufferedReader(new InputStreamReader(con.getInputStream()), HTMLDownloader.large_buffer_size);
        String line;
        while ((line = rd.readLine()) != null) {
            ret2.content += line + "\r\n";
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    //get headers
    Map<String, List<String>> headers = con.getHeaderFields();
    Set<Entry<String, List<String>>> hKeys = headers.entrySet();
    for (Iterator<Entry<String, List<String>>> i = hKeys.iterator(); i.hasNext();) {
        Entry<String, List<String>> m99 = i.next();

        //System.out.println("HEADER_KEY" + m99.getKey() + "=" + m99.getValue());
        ret2.headers.put(m99.getKey(), m99.getValue().toString());
        if (m99.getKey().equals("set-cookie"))
            ret2.cookies.put(m99.getKey(), m99.getValue().toString());
    }
    try {
        dos.close();
        rd.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    //System.out.println(ret2.content);

    //System.out.println("FFFFFFFFFFFFFFFFFFFFFFFFFFFF");
    ClientHttpRequest client_req;
    try {
        client_req = new ClientHttpRequest(this.URL);
        String[] cookies_string2 = this.downloader.getCookies2();
        for (int jk = 0; jk < cookies_string2.length; jk++) {
            System.out.println(cookies_string2[jk * 2] + "=" + cookies_string2[(jk * 2) + 1]);
            client_req.setCookie(cookies_string2[jk * 2], cookies_string2[(jk * 2) + 1]);
        }
        client_req.setParameter("ctl00$ContentBody$btnUpload", "Upload Field Note");
        client_req.setParameter("ctl00$ContentBody$FieldNoteLoader", "geocache_visits.txt", fn_is);
        InputStream response = client_req.post();
        //System.out.println(this.convertStreamToString(response));
    } catch (IOException e) {
        e.printStackTrace();
    }

    //ArrayList<InputStream> files = new ArrayList();
    files.clear();
    files.add(fn_is);

    Hashtable<String, String> ht2 = new Hashtable<String, String>();
    ht2.put("ctl00$ContentBody$btnUpload", "Upload Field Note");
    ht2.put("ctl00$ContentBody$chkSuppressDate", "");
    //   ht.put("ctl00$ContentBody$FieldNoteLoader", "geocache_visits.txt");
    ht2.put("__VIEWSTATE", viewstate);

    HttpData data3 = HttpRequest.post(this.URL, ht2, files, cookies_string);
    //System.out.println(data3.content);

    //      String the_page2 = this.downloader.get_reader_mpf(this.URL, raw_data, null, true, boundary);
    //System.out.println("page2=\n" + the_page2);

    Boolean ret = false;
    return ret;
}

From source file:com.clavain.munin.MuninNode.java

/**
 * Will load the plugin list from munin-node
 *//* ww w  . j a v a 2s  .c  om*/
public boolean loadPlugins() {
    setLoadedPlugins(new CopyOnWriteArrayList<MuninPlugin>());
    String l_lastProceeded = "";

    try {
        Socket cs = new Socket();
        cs.setKeepAlive(false);
        cs.setSoLinger(true, 0);
        cs.setReuseAddress(true);
        cs.setSoTimeout(com.clavain.muninmxcd.socketTimeout);
        if (!str_via.equals("unset")) {
            cs.connect(new InetSocketAddress(this.getStr_via(), this.getPort()),
                    com.clavain.muninmxcd.socketTimeout);
        } else {
            cs.connect(new InetSocketAddress(this.getHostname(), this.getPort()),
                    com.clavain.muninmxcd.socketTimeout);
        }

        if (p.getProperty("kill.sockets").equals("true")) {
            SocketCheck sc = new SocketCheck(cs, getUnixtime());
            sc.setHostname(this.getHostname());
            com.clavain.muninmxcd.v_sockets.add(sc);
        }
        PrintStream os = new PrintStream(cs.getOutputStream());
        BufferedReader in = new BufferedReader(new InputStreamReader(cs.getInputStream()));

        String s = in.readLine();

        if (s != null) {
            // Set version
            os.println("version");
            Thread.sleep(150);
            s = in.readLine();

            String version = s.substring(s.indexOf(":") + 1, s.length()).trim();
            this.str_muninVersion = version;

            if (authpw != null) {
                // if authpw is set, verify
                if (!authpw.trim().equals("")) {
                    os.println("config muninmxauth");
                    Thread.sleep(150);
                    String apw = in.readLine();
                    s = in.readLine();
                    if (!apw.trim().equals(this.getAuthpw())) {
                        logger.error("Invalid muninmxauth password for host: " + this.getHostname());
                        cs.close();
                        return false;
                    }
                }
            }
            // check anyway if muninmxauth plugin is present
            else {
                os.println("config muninmxauth");
                Thread.sleep(100);
                String apw = in.readLine();
                if (!apw.trim().equals("# Unknown service")) {
                    logger.error(
                            "no auth password given, but muninmxauth plugin present on " + this.getHostname());
                    cs.close();
                    return false;
                }
                s = in.readLine();
            }

            // get list of available plugins
            if (str_via.equals("unset")) {
                os.println("list");
            } else {
                os.println("list " + str_hostname);
            }

            Thread.sleep(250);
            s = in.readLine();

            // if response is empty and host is not via, do a list $hostname
            if (s.trim().equals("") && str_via.equals("unset")) {
                logger.info("Plugin Response Empty on " + this.getHostname()
                        + " trying to load with list $hostname");
                os.println("list " + this.getHostname());
                Thread.sleep(250);
                s = in.readLine();
            }

            String l_tmp;
            StringTokenizer l_st = new StringTokenizer(s, " ");

            // create plugin
            MuninPlugin l_mp = new MuninPlugin();
            // negative support
            ArrayList<String> tmp_negatives = new ArrayList<String>();

            while (l_st.hasMoreTokens()) {

                String l_strPlugin = l_st.nextToken();

                // check for track_pkg and muninmx essentials
                if (l_strPlugin.equals("muninmx_trackpkg")) {
                    this.setTrack_pkg(true);
                    continue;
                }

                // got essentials?
                if (l_strPlugin.equals("muninmx_essentials")) {
                    this.setEssentials(true);
                    continue;
                }

                if (isPluginIgnored(l_strPlugin.toUpperCase())) {
                    continue;
                }

                l_mp.setPluginName(l_strPlugin);

                os.println("config " + l_strPlugin);

                // create graphs for plugin
                int l_iGraphsFound = 0;
                int l_iTmp = 0;
                MuninGraph l_mg = new MuninGraph();
                l_mg.setQueryInterval(this.getQueryInterval());
                while ((l_tmp = in.readLine()) != null) {
                    if (l_tmp.startsWith(".")) {
                        break;
                    }
                    // collect graphs only for plugin
                    String l_strName;
                    String l_strType;
                    String l_strValue;

                    if (!l_tmp.contains("graph_") && !l_tmp.trim().equals("") && !l_tmp.contains("host_name")
                            && !l_tmp.contains("multigraph") && !l_tmp.trim().equals("graph no")
                            && !l_tmp.trim().equals("# Bad exit")
                            && !l_tmp.trim().contains("info Currently our peer")
                            && !l_tmp.trim().startsWith("#")
                            && !l_tmp.trim().contains("Bonding interface errors")) {
                        l_lastProceeded = l_tmp;
                        l_strName = l_tmp.substring(0, l_tmp.indexOf("."));
                        l_strType = l_tmp.substring(l_tmp.indexOf(".") + 1, l_tmp.indexOf(" "));
                        l_strValue = l_tmp.substring(l_tmp.indexOf(" ") + 1, l_tmp.length());
                        //System.err.println("Name: " + l_strName + " Type: " + l_strType + " Value: " + l_strValue);

                        if (l_strType.equals("label")) {
                            l_iTmp++;

                            if (l_iTmp > 1) {
                                l_mp.addGraph(l_mg);
                                l_mg = new MuninGraph();
                                l_mg.setQueryInterval(this.getQueryInterval());
                            }
                            l_mg.setGraphName(l_strName);
                            l_mg.setGraphLabel(l_strValue);
                        } else if (l_strType.equals("draw")) {
                            l_mg.setGraphDraw(l_strValue);
                        } else if (l_strType.equals("type")) {
                            l_mg.setGraphType(l_strValue);
                        } else if (l_strType.equals("info")) {
                            l_mg.setGraphInfo(l_strValue);
                        } else if (l_strType.equals("negative")) {
                            // add to temporary negative list to set negatives later
                            tmp_negatives.add(l_strValue);
                        }

                        //System.out.println(l_strName); 
                        //System.out.println(l_strType);
                        //System.out.println(l_strValue);
                    } else {
                        // set plugin title
                        if (l_tmp.contains("graph_title")) {
                            l_mp.setPluginTitle(l_tmp.substring(12, l_tmp.length()));
                        }
                        // set plugin info, if any
                        if (l_tmp.contains("graph_info")) {
                            l_mp.setPluginInfo(l_tmp.substring(11, l_tmp.length()));
                        }
                        // set graph category
                        if (l_tmp.contains("graph_category")) {
                            l_mp.setPluginCategory(l_tmp.substring(15, l_tmp.length()));
                        }
                        // set graph vlabel
                        if (l_tmp.contains("graph_vlabel")) {
                            l_mp.setPluginLabel(l_tmp.substring(13, l_tmp.length()));
                        }
                        // set plugin title
                        if (l_tmp.contains("graph_mxdraw")) {
                            l_mp.setStr_LineMode(l_tmp.substring(13, l_tmp.length()));
                        }
                    }

                }

                // add to pluginlist
                l_mp.addGraph(l_mg);

                Iterator it = l_mp.getGraphs().iterator();
                while (it.hasNext()) {
                    MuninGraph l_mpNg = (MuninGraph) it.next();
                    if (tmp_negatives.contains(l_mpNg.getGraphName())) {
                        l_mpNg.setNegative(true);
                    }
                }

                // add plugin if it got valid graphs and add nodeid (req. for alerts)
                if (l_mp.getGraphs().size() > 0) {
                    l_mp.set_NodeId(this.getNode_id());
                    getLoadedPlugins().add(l_mp);
                }
                // flush temporary negatives
                tmp_negatives.clear();
                l_mp = null;
                l_mp = new MuninPlugin();
                //String l_strGraphTitle = s.substring(s.indexOf("graph_title") + 11,s.length());
                //System.out.println(" - " + l_strGraphTitle);
            }
            cs.close();
            in.close();
            os.close();
            last_plugin_load = getUnixtime();
            //System.out.println(s);
        } else {
            cs.close();
            in.close();
            os.close();
            logger.warn("Error loading plugins on " + str_hostname + " (" + this.getNode_id()
                    + "). Check connectivity or munin-node");
        }
        /*
        for (MuninPlugin l_mn : getLoadedPlugins()) {
        i_GraphCount = i_GraphCount + l_mn.getGraphs().size();
        logger.debug(l_mn.getGraphs().size() + " graphs found for plugin: " + l_mn.getPluginName().toUpperCase() + " on node: " + this.getNodename());
        }*/
    } catch (Exception ex) {
        logger.error("Error loading plugins on " + str_hostname + " (" + this.getNode_id() + ") : "
                + ex.getMessage());
        ex.printStackTrace();
        return false;
    }

    return true;
}

From source file:com.almalence.plugins.capture.video.VideoCapturePlugin.java

protected void doExportVideo() {
    boolean onPause = this.onPause;
    this.onPause = false;
    boolean isDro = this.modeDRO();

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && (documentFileSaved != null || !isDro)) {
        DocumentFile fileSaved = VideoCapturePlugin.documentFileSaved;
        ArrayList<DocumentFile> filesListToExport = documentFilesList;
        String resultName = fileSaved.getName();
        DocumentFile resultFile = fileSaved;

        if (filesListToExport.size() > 0) {
            int inputFileCount = filesListToExport.size();
            if (!onPause)
                inputFileCount++;/*from w  ww .  java  2  s .c  o  m*/

            DocumentFile[] inputFiles = new DocumentFile[inputFileCount];

            for (int i = 0; i < filesListToExport.size(); i++) {
                inputFiles[i] = filesListToExport.get(i);
            }

            // If video recording hadn't been paused before STOP was
            // pressed, then last recorded file is not in the list with
            // other files, added to list of files manually.
            if (!onPause) {
                inputFiles[inputFileCount - 1] = fileSaved;
            }

            resultFile = appendNew(inputFiles);

            // Remove merged files, except first one, because it stores the
            // result of merge.
            for (int i = 0; i < filesListToExport.size(); i++) {
                DocumentFile currentFile = filesListToExport.get(i);
                currentFile.delete();
            }

            // If video recording hadn't been paused before STOP was
            // pressed, then last recorded file is not in the list with
            // other files, and should be deleted manually.
            if (!onPause)
                fileSaved.delete();

            String tmpName = resultFile.getName();
            if (resultFile.renameTo(resultName))
                ;

            // Make sure, that there won't be duplicate broken file
            // in phone memory at gallery.
            String args[] = { tmpName };
            ApplicationScreen.instance.getContentResolver().delete(Video.Media.EXTERNAL_CONTENT_URI,
                    Video.Media.DISPLAY_NAME + "=?", args);
        }

        String name = resultFile.getName();
        String data = null;
        // If we able to get File object, than get path from it. Gallery
        // doesn't show the file, if it's stored at phone memory and
        // we need insert new file to gallery manually.
        File file = Util.getFileFromDocumentFile(resultFile);
        if (file != null) {
            data = file.getAbsolutePath();
        } else {
            // This case should typically happen for files saved to SD
            // card.
            data = Util.getAbsolutePathFromDocumentFile(resultFile);
        }

        if (data != null) {
            values.put(VideoColumns.DISPLAY_NAME, name);
            values.put(VideoColumns.DATA, data);
            Uri uri = ApplicationScreen.instance.getContentResolver().insert(Video.Media.EXTERNAL_CONTENT_URI,
                    values);
            ApplicationScreen.getMainContext().sendBroadcast(new Intent(ACTION_NEW_VIDEO, uri));
        }
    } else {
        File fileSaved = VideoCapturePlugin.fileSaved;
        ArrayList<File> filesListToExport = filesList;

        File firstFile = fileSaved;

        if (filesListToExport.size() > 0) {
            firstFile = filesListToExport.get(0);

            int inputFileCount = filesListToExport.size();
            if (!onPause)
                inputFileCount++;

            File[] inputFiles = new File[inputFileCount];

            for (int i = 0; i < filesListToExport.size(); i++) {
                inputFiles[i] = filesListToExport.get(i);
            }

            if (!onPause)
                inputFiles[inputFileCount - 1] = fileSaved;

            File resultFile = append(inputFiles);

            for (int i = 0; i < filesListToExport.size(); i++) {
                File currentFile = filesListToExport.get(i);
                currentFile.delete();
            }

            if (resultFile != null) {
                if (!resultFile.getAbsoluteFile().equals(fileSaved.getAbsoluteFile())) {
                    fileSaved.delete();
                    resultFile.renameTo(fileSaved);
                }
            }
        }

        filesListToExport.clear();

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && isDro) {
            DocumentFile outputFile = getOutputMediaDocumentFile();
            File file = Util.getFileFromDocumentFile(outputFile);

            if (file != null) {
                // Don't do anything with ouputFile. It's useless, remove
                // it.
                outputFile.delete();
                Uri uri = ApplicationScreen.instance.getContentResolver()
                        .insert(Video.Media.EXTERNAL_CONTENT_URI, values);
                ApplicationScreen.getMainContext().sendBroadcast(new Intent(ACTION_NEW_VIDEO, uri));
            } else {
                // Copy result file from phone memory to selected folder at
                // SD-card.
                InputStream is = null;
                int len;
                byte[] buf = new byte[4096];
                try {
                    OutputStream os = ApplicationScreen.instance.getContentResolver()
                            .openOutputStream(outputFile.getUri());
                    is = new FileInputStream(firstFile);
                    while ((len = is.read(buf)) > 0) {
                        os.write(buf, 0, len);
                    }
                    is.close();
                    os.close();
                    firstFile.delete();

                    // Make sure, that there won't be duplicate broken file
                    // in phone memory at gallery.
                    String args[] = { firstFile.getAbsolutePath() };
                    ApplicationScreen.instance.getContentResolver().delete(Video.Media.EXTERNAL_CONTENT_URI,
                            Video.Media.DATA + "=?", args);

                    String data = Util.getAbsolutePathFromDocumentFile(outputFile);
                    if (data != null) {
                        values.put(VideoColumns.DATA, data);
                        Uri uri = ApplicationScreen.instance.getContentResolver()
                                .insert(Video.Media.EXTERNAL_CONTENT_URI, values);
                        ApplicationScreen.getMainContext().sendBroadcast(new Intent(ACTION_NEW_VIDEO, uri));
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        } else {
            Uri uri = ApplicationScreen.instance.getContentResolver().insert(Video.Media.EXTERNAL_CONTENT_URI,
                    values);
            ApplicationScreen.getMainContext().sendBroadcast(new Intent(ACTION_NEW_VIDEO, uri));
        }
    }

    try {
        Thread.sleep(500);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
    ApplicationScreen.getMessageHandler().sendEmptyMessage(ApplicationInterface.MSG_EXPORT_FINISHED);

}

From source file:com.krawler.spring.hrms.rec.job.hrmsRecJobController.java

public ModelAndView jobsearch(HttpServletRequest request, HttpServletResponse response) {
    KwlReturnObject result = null;//w  ww  .  j a va  2  s . c o m
    JSONObject jobj = new JSONObject();
    JSONObject jobj1 = new JSONObject();
    String jobtype = "Internal";
    int count = 0;
    String status = "";
    String userid = request.getParameter("userid");
    String ss = request.getParameter("ss");
    int start = 0;
    int limit = 15;
    HashMap<String, Object> requestParams = new HashMap<String, Object>();
    ArrayList filter_names = new ArrayList(), filter_values = new ArrayList();
    if (request.getParameter("start") != null) {
        start = Integer.parseInt(request.getParameter("start"));
        limit = Integer.parseInt(request.getParameter("limit"));
    }

    try {
        List lst = null;
        if (StringUtil.isNullOrEmpty(request.getParameter("position"))) {
            filter_names.add("!jobtype");
            filter_names.add("company.companyID");
            filter_names.add("delflag");
            filter_names.add("<=startdate");
            filter_names.add(">=enddate");

            filter_values.add(jobtype);
            filter_values.add(sessionHandlerImplObj.getCompanyid(request));
            filter_values.add(0);
            filter_values.add(new Date());
            filter_values.add(new Date());

        } else {
            filter_names.add("position.id");
            filter_names.add("!jobtype");
            filter_names.add("company.companyID");
            filter_names.add("delflag");
            filter_names.add("<=startdate");
            filter_names.add(">=enddate");

            filter_values.add(request.getParameter("position"));
            filter_values.add(jobtype);
            filter_values.add(sessionHandlerImplObj.getCompanyid(request));
            filter_values.add(0);
            filter_values.add(new Date());
            filter_values.add(new Date());
        }

        requestParams.put("filter_names", filter_names);
        requestParams.put("filter_values", filter_values);
        requestParams.put("searchcol", new String[] { "jobid" });
        requestParams.put("ss", ss);
        requestParams.put("allflag", false);
        requestParams.put("start", start);
        requestParams.put("limit", limit);
        result = hrmsRecJobDAOObj.getPositionmain(requestParams);
        lst = result.getEntityList();
        count = result.getRecordTotalCount();
        for (int ctr = 0; ctr < count; ctr++) {
            Positionmain extmt = (Positionmain) lst.get(ctr);
            JSONObject tmpObj = new JSONObject();
            tmpObj.put("jid", extmt.getPositionid());
            //                status = getappPositionstatus(userid,extmt.getPositionid(), session, request);
            filter_names.clear();
            filter_values.clear();
            filter_names.add("configjobapplicant.id");
            filter_names.add("position.positionid");
            filter_names.add("delflag");
            filter_values.add(userid);
            filter_values.add(extmt.getPositionid());
            filter_values.add(0);
            requestParams.clear();
            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);
            result = hrmsRecJobDAOObj.getPositionstatus(requestParams);
            Allapplications app = null;
            if (StringUtil.checkResultobjList(result)) {
                app = (Allapplications) result.getEntityList().get(0);
                status = app.getStatus();
            } else {
                status = "none";
            }

            if (status.equalsIgnoreCase("none")) {
                tmpObj.put("status", 0);
                tmpObj.put("selectionstatus", messageSource.getMessage("hrms.recruitment.not.applied", null,
                        RequestContextUtils.getLocale(request)));
            } else {
                tmpObj.put("status", 1);
                tmpObj.put("applicationid", app.getId());
                tmpObj.put("selectionstatus", status);
            }
            tmpObj.put("jobname", extmt.getPosition().getValue());
            tmpObj.put("jobpositionid", extmt.getJobid());
            tmpObj.put("jdescription", extmt.getDetails());
            requestParams.clear();
            requestParams.put("request", request);
            DateFormat df = kwlCommonTablesDAOObj.getUserDateFormatter(
                    sessionHandlerImplObj.getDateFormatID(request),
                    sessionHandlerImplObj.getUserTimeFormat(request),
                    sessionHandlerImplObj.getTimeZoneDifference(request));
            tmpObj.put("jstartdate", df.format(extmt.getStartdate()));
            tmpObj.put("jenddate", df.format(extmt.getEnddate()));

            tmpObj.put("jdepartment", extmt.getDepartmentid().getValue());
            tmpObj.put("posmasterid", extmt.getPosition().getId());
            jobj.append("data", tmpObj);
        }
        if (jobj.isNull("data")) {
            jobj.put("data", new com.krawler.utils.json.JSONArray());
        }
        jobj.put("count", count);
        jobj1.put("data", jobj.toString());
        jobj1.put("valid", true);
    } catch (Exception e) {

    } finally {
        return new ModelAndView("jsonView", "model", jobj1.toString());
    }
}

From source file:diffhunter.Indexer.java

public void Make_Index(Database hashdb, String file_name, String read_gene_location)
        throws FileNotFoundException, IOException {
    Set_Parameters();/*from  ww  w . j  av a 2  s .  co m*/
    //System.out.print("Sasa");
    ConcurrentHashMap<String, Map<Integer, Integer>> dic_gene_loc_count = new ConcurrentHashMap<>();
    ArrayList<String> lines_from_bed_file = new ArrayList<>();
    BufferedReader br = new BufferedReader(new FileReader(file_name));

    String line = br.readLine();
    List<String> toks = Arrays.asList(line.split("\t"));
    lines_from_bed_file.add(line);
    String last_Seen_chromosome = toks.get(0).replace("chr", "");
    line = br.readLine();
    lines_from_bed_file.add(line);
    toks = Arrays.asList(line.split("\t"));
    String new_chromosome = toks.get(0).replace("chr", "");

    while (((line = br.readLine()) != null) || lines_from_bed_file.size() > 0) {
        if (line != null) {
            toks = Arrays.asList(line.split("\t"));
            new_chromosome = toks.get(0).replace("chr", "");
        }
        // process the line.
        if (line == null || !new_chromosome.equals(last_Seen_chromosome)) {
            System.out.println("Processing chromosome" + "\t" + last_Seen_chromosome);
            last_Seen_chromosome = new_chromosome;
            lines_from_bed_file.parallelStream().forEach(content -> {

                List<String> inner_toks = Arrays.asList(content.split("\t"));
                //WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING WARNINNG WARNING WARNING 
                //STRAND column count should be changed. 
                String strand = inner_toks.get(5);
                String chromosome_ = inner_toks.get(0).replace("chr", "");
                if (!dic_Loc_gene.get(strand).containsKey(chromosome_)) {
                    return;
                }
                Integer start_loc = Integer.parseInt(inner_toks.get(1));
                Integer end_loc = Integer.parseInt(inner_toks.get(2));
                List<Interval<String>> res__ = dic_Loc_gene.get(strand).get(chromosome_).getIntervals(start_loc,
                        end_loc);
                //IntervalTree<String> pot_gene_name=new IntervalTree<>(res__);
                //                        for (int z = 0; z < pot_gene_name.Intervals.Count; z++)
                //{
                for (int z = 0; z < res__.size(); z++) {

                    dic_gene_loc_count.putIfAbsent(res__.get(z).getData(), new HashMap<>());
                    String gene_symbol = res__.get(z).getData();
                    Integer temp_gene_start_loc = dic_genes.get(gene_symbol).start_loc;
                    Integer temp_gene_end_loc = dic_genes.get(gene_symbol).end_loc;
                    if (start_loc < temp_gene_start_loc) {
                        start_loc = temp_gene_start_loc;
                    }
                    if (end_loc > temp_gene_end_loc) {
                        end_loc = temp_gene_end_loc;
                    }
                    synchronized (dic_synchrinzer_genes.get(gene_symbol)) {
                        for (int k = start_loc; k <= end_loc; k++) {
                            Integer value_inside = 0;
                            value_inside = dic_gene_loc_count.get(gene_symbol).get(k);
                            dic_gene_loc_count.get(gene_symbol).put(k,
                                    value_inside == null ? 1 : (value_inside + 1));
                        }
                    }
                }
            });
            /*                    List<string> keys_ = dic_gene_loc_count.Keys.ToList();
             List<string> alt_keys = new List<string>();// dic_gene_loc_count.Keys.ToList();
             for (int i = 0; i < keys_.Count; i++)
             {
             Dictionary<int, int> dicccc_ = new Dictionary<int, int>();
             dic_gene_loc_count[keys_[i]] = new Dictionary<int, int>(dic_gene_loc_count[keys_[i]].Where(x => x.Value >= 2).ToDictionary(x => x.Key, x => x.Value));
             if (dic_gene_loc_count[keys_[i]].Count == 0)
             {
                    
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             continue;
             }
             hashdb.Put(Get_BDB(keys_[i]), Get_BDB_Dictionary(dic_gene_loc_count[keys_[i]]));
             alt_keys.Add(keys_[i]);
             dic_gene_loc_count.TryRemove(keys_[i], out dicccc_);
             }*/
            ArrayList<String> keys_ = new ArrayList<>(dic_gene_loc_count.keySet());
            ArrayList<String> alt_keys = new ArrayList<>();
            for (int i = 0; i < keys_.size(); i++) {

                //LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>(dic_gene_loc_count.get(keys_.get(i)));
                LinkedHashMap<Integer, Integer> tmep_map = new LinkedHashMap<>();
                /*tmep_map = */
                dic_gene_loc_count.get(keys_.get(i)).entrySet().stream().filter(p -> p.getValue() >= 2)
                        .sorted(Comparator.comparing(E -> E.getKey()))
                        .forEach((entry) -> tmep_map.put(entry.getKey(), entry.getValue()));//.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
                if (tmep_map.isEmpty()) {
                    dic_gene_loc_count.remove(keys_.get(i));
                    continue;
                }

                //Map<Integer, Integer> tmep_map1 = new LinkedHashMap<>();
                //tmep_map1=sortByKey(tmep_map);
                //tmep_map.entrySet().stream().sorted(Comparator.comparing(E -> E.getKey())).forEach((entry) -> tmep_map1.put(entry.getKey(), entry.getValue()));
                //BerkeleyDB_Box box=new BerkeleyDB_Box();
                hashdb.put(null, BerkeleyDB_Box.Get_BDB(keys_.get(i)),
                        BerkeleyDB_Box.Get_BDB_Dictionary(tmep_map));
                alt_keys.add(keys_.get(i));
                dic_gene_loc_count.remove(keys_.get(i));
                //dic_gene_loc_count.put(keys_.get(i),tmep_map);
            }

            hashdb.sync();
            int a = 1111;
            /*                    hashdb.Sync();
             File.AppendAllLines("InputDB\\" + Path.GetFileNameWithoutExtension(file_name) + "_genes.txt", alt_keys);
             //total_lines_processed_till_now += lines_from_bed_file.Count;
             //worker.ReportProgress(total_lines_processed_till_now / count_);
             lines_from_bed_file.Clear();
             if (!reader.EndOfStream)
             {
             lines_from_bed_file.Add(_line_);
             }
             last_Seen_chromosome = new_choromosome;*/
            lines_from_bed_file.clear();
            if (line != null) {
                lines_from_bed_file.add(line);
            }
            Path p = Paths.get(file_name);
            file_name = p.getFileName().toString();

            BufferedWriter output = new BufferedWriter(new FileWriter((Paths
                    .get(read_gene_location, FilenameUtils.removeExtension(file_name) + ".txt").toString()),
                    true));
            for (String alt_key : alt_keys) {
                output.append(alt_key);
                output.newLine();
            }
            output.close();
            /*if (((line = br.readLine()) != null))
            {
            lines_from_bed_file.add(line);
            toks=Arrays.asList(line.split("\t"));
            new_chromosome=toks.get(0).replace("chr", "");
            }*/
            //last_Seen_chromosome=new_chromosome;
        } else if (new_chromosome.equals(last_Seen_chromosome)) {
            lines_from_bed_file.add(line);
        }

    }
    br.close();
    hashdb.sync();
    hashdb.close();

}

From source file:com.all4tec.sa.maven.proguard.ProGuardMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {

    log = getLog();//from   w w w  . j  a v a 2s.  c  o  m

    if (skip) {
        log.info("Bypass ProGuard processing because \"proguard.skip=true\"");
        return;
    }

    boolean mainIsJar = mavenProject.getPackaging().equals("jar");
    boolean mainIsPom = mavenProject.getPackaging().equals("pom");

    File inJarFile = new File(outputDirectory, injar);
    if (mainIsJar && (!inJarFile.exists())) {
        if (injarNotExistsSkip) {
            log.info("Bypass ProGuard processing because \"injar\" dos not exist");
            return;
        }
        throw new MojoFailureException("Can't find file " + inJarFile);
    }

    if (mainIsPom && (!inJarFile.exists()) && injarNotExistsSkip) {
        log.info("Bypass ProGuard processing because \"injar\" dos not exist");
        return;
    }

    if (!outputDirectory.exists()) {
        if (!outputDirectory.mkdirs()) {
            throw new MojoFailureException("Can't create " + outputDirectory);
        }
    }

    File outJarFile;
    boolean sameArtifact;

    if (attach) {
        outjar = nameNoType(injar);
        if (useArtifactClassifier()) {
            outjar += "-" + attachArtifactClassifier;
        }
        outjar += "." + attachArtifactType;
    }

    if ((outjar != null) && (!outjar.equals(injar))) {
        sameArtifact = false;
        outJarFile = (new File(outputDirectory, outjar)).getAbsoluteFile();
        if (outJarFile.exists()) {
            if (!deleteFileOrDirectory(outJarFile)) {
                throw new MojoFailureException("Can't delete " + outJarFile);
            }
        }
    } else {
        sameArtifact = true;
        outJarFile = inJarFile.getAbsoluteFile();
        File baseFile;
        if (inJarFile.isDirectory()) {
            baseFile = new File(outputDirectory, nameNoType(injar) + "_proguard_base");
        } else {
            baseFile = new File(outputDirectory, nameNoType(injar) + "_proguard_base.jar");
        }
        if (baseFile.exists()) {
            if (!deleteFileOrDirectory(baseFile)) {
                throw new MojoFailureException("Can't delete " + baseFile);
            }
        }
        if (inJarFile.exists()) {
            if (!inJarFile.renameTo(baseFile)) {
                throw new MojoFailureException("Can't rename " + inJarFile);
            }
        }
        inJarFile = baseFile;
    }

    ArrayList<String> args = new ArrayList<String>();

    if (log.isDebugEnabled()) {
        List dependancy = mavenProject.getCompileArtifacts();
        for (Iterator i = dependancy.iterator(); i.hasNext();) {
            Artifact artifact = (Artifact) i.next();
            log.debug("--- compile artifact " + artifact.getGroupId() + ":" + artifact.getArtifactId() + ":"
                    + artifact.getType() + ":" + artifact.getClassifier() + " Scope:" + artifact.getScope());
        }
        for (Iterator i = mavenProject.getArtifacts().iterator(); i.hasNext();) {
            Artifact artifact = (Artifact) i.next();
            log.debug("--- artifact " + artifact.getGroupId() + ":" + artifact.getArtifactId() + ":"
                    + artifact.getType() + ":" + artifact.getClassifier() + " Scope:" + artifact.getScope());
        }
        for (Iterator i = mavenProject.getDependencies().iterator(); i.hasNext();) {
            Dependency artifact = (Dependency) i.next();
            log.debug("--- dependency " + artifact.getGroupId() + ":" + artifact.getArtifactId() + ":"
                    + artifact.getType() + ":" + artifact.getClassifier() + " Scope:" + artifact.getScope());
        }
    }

    Set inPath = new HashSet();
    boolean hasInclusionLibrary = false;
    if (assembly != null) {
        for (Iterator iter = assembly.inclusions.iterator(); iter.hasNext();) {
            Inclusion inc = (Inclusion) iter.next();
            if (!inc.library) {
                File file = getClasspathElement(getDependancy(inc, mavenProject), mavenProject);
                inPath.add(file.toString());
                log.debug("--- ADD injars:" + inc.artifactId);
                StringBuffer filter = new StringBuffer(fileToString(file));
                filter.append("(!META-INF/MANIFEST.MF");
                if (!addMavenDescriptor) {
                    filter.append(",");
                    filter.append("!META-INF/maven/**");
                }
                if (inc.filter != null) {
                    filter.append(",").append(inc.filter);
                }
                filter.append(")");
                args.add("-injars");
                args.add(filter.toString());
            } else {
                hasInclusionLibrary = true;
                log.debug("--- ADD libraryjars:" + inc.artifactId);
                // This may not be CompileArtifacts, maven 2.0.6 bug
                File file = getClasspathElement(getDependancy(inc, mavenProject), mavenProject);
                inPath.add(file.toString());
                args.add("-libraryjars");
                args.add(fileToString(file));
            }
        }
    }

    if ((!mainIsPom) && inJarFile.exists()) {
        args.add("-injars");
        StringBuffer filter = new StringBuffer(fileToString(inJarFile));
        if ((inFilter != null) || (!addMavenDescriptor)) {
            filter.append("(");
            boolean coma = false;

            if (!addMavenDescriptor) {
                coma = true;
                filter.append("!META-INF/maven/**");
            }

            if (inFilter != null) {
                if (coma) {
                    filter.append(",");
                }
                filter.append(inFilter);
            }

            filter.append(")");
        }
        args.add(filter.toString());
    }
    args.add("-outjars");
    args.add(fileToString(outJarFile));

    if (!obfuscate) {
        args.add("-dontobfuscate");
    }

    if (proguardInclude != null) {
        if (proguardInclude.exists()) {
            args.add("-include");
            args.add(fileToString(proguardInclude));
            log.debug("proguardInclude " + proguardInclude);
        } else {
            log.debug("proguardInclude config does not exists " + proguardInclude);
        }
    }

    if (includeDependency) {
        List dependency = this.mavenProject.getCompileArtifacts();
        for (Iterator i = dependency.iterator(); i.hasNext();) {
            Artifact artifact = (Artifact) i.next();
            // dependency filter
            if (isExclusion(artifact)) {
                continue;
            }
            File file = getClasspathElement(artifact, mavenProject);

            if (inPath.contains(file.toString())) {
                log.debug("--- ignore libraryjars since one in injar:" + artifact.getArtifactId());
                continue;
            }
            log.debug("--- ADD libraryjars:" + artifact.getArtifactId());
            args.add("-libraryjars");
            args.add(fileToString(file));
        }
    }

    if (libs != null) {
        for (Iterator i = libs.iterator(); i.hasNext();) {
            Object lib = i.next();
            args.add("-libraryjars");
            args.add(fileNameToString(lib.toString()));
        }
    }

    args.add("-printmapping");
    args.add(fileToString((new File(outputDirectory, "proguard_map.txt").getAbsoluteFile())));

    args.add("-printseeds");
    args.add(fileToString((new File(outputDirectory, "proguard_seeds.txt").getAbsoluteFile())));

    if (log.isDebugEnabled()) {
        args.add("-verbose");
    }

    if (options != null) {
        for (int i = 0; i < options.length; i++) {
            args.add(options[i]);
        }
    }

    // Check if args should be inlined in a proguard configuration file or not. If args total size is more than 32k,
    // process launch will failed
    File vTempFile = null;
    if (writeCommandLineToFile) {
        log.info("Transform command line in file configuration");
        vTempFile = createFileConfiguration(args, mavenProject, outputDirectory);

        // Remove all args, and add just path to Proguard configuration file just created
        args.clear();
        args.add("@" + vTempFile.getAbsolutePath());
        log.info("Configuration file created : " + vTempFile.getAbsolutePath());
    }

    log.info("execute ProGuard " + args.toString());

    proguardMain(getProguardJar(this), args, this);

    if ((assembly != null) && (hasInclusionLibrary)) {

        log.info("creating assembly");

        File baseFile = new File(outputDirectory, nameNoType(injar) + "_proguard_result.jar");
        if (baseFile.exists()) {
            if (!baseFile.delete()) {
                throw new MojoFailureException("Can't delete " + baseFile);
            }
        }
        File archiverFile = outJarFile.getAbsoluteFile();
        if (!outJarFile.renameTo(baseFile)) {
            throw new MojoFailureException("Can't rename " + outJarFile);
        }

        MavenArchiver archiver = new MavenArchiver();
        archiver.setArchiver(jarArchiver);
        archiver.setOutputFile(archiverFile);
        archive.setAddMavenDescriptor(addMavenDescriptor);

        try {
            jarArchiver.addArchivedFileSet(baseFile);

            for (Iterator iter = assembly.inclusions.iterator(); iter.hasNext();) {
                Inclusion inc = (Inclusion) iter.next();
                if (inc.library) {
                    File file;
                    Artifact artifact = getDependancy(inc, mavenProject);
                    file = getClasspathElement(artifact, mavenProject);
                    if (file.isDirectory()) {
                        getLog().info("merge project: " + artifact.getArtifactId() + " " + file);
                        jarArchiver.addDirectory(file);
                    } else {
                        getLog().info("merge artifact: " + artifact.getArtifactId());
                        jarArchiver.addArchivedFileSet(file);
                    }
                }
            }

            archiver.createArchive(mavenProject, archive);

        } catch (Exception e) {
            throw new MojoExecutionException("Unable to create jar", e);
        }

    }

    if (attach && !sameArtifact) {
        if (useArtifactClassifier()) {
            projectHelper.attachArtifact(mavenProject, attachArtifactType, attachArtifactClassifier,
                    outJarFile);
        } else {
            projectHelper.attachArtifact(mavenProject, attachArtifactType, null, outJarFile);
        }
    }
}

From source file:org.telegram.android.MessagesController.java

private boolean processUpdatesQueue(int type, int state) {
    ArrayList<TLRPC.Updates> updatesQueue = null;
    if (type == 0) {
        updatesQueue = updatesQueueSeq;//  w  w w. j a  va2s . co m
        Collections.sort(updatesQueue, new Comparator<TLRPC.Updates>() {
            @Override
            public int compare(TLRPC.Updates updates, TLRPC.Updates updates2) {
                return AndroidUtilities.compare(getUpdateSeq(updates), getUpdateSeq(updates2));
            }
        });
    } else if (type == 1) {
        updatesQueue = updatesQueuePts;
        Collections.sort(updatesQueue, new Comparator<TLRPC.Updates>() {
            @Override
            public int compare(TLRPC.Updates updates, TLRPC.Updates updates2) {
                return AndroidUtilities.compare(updates.pts, updates2.pts);
            }
        });
    } else if (type == 2) {
        updatesQueue = updatesQueueQts;
        Collections.sort(updatesQueue, new Comparator<TLRPC.Updates>() {
            @Override
            public int compare(TLRPC.Updates updates, TLRPC.Updates updates2) {
                return AndroidUtilities.compare(updates.qts, updates2.qts);
            }
        });
    }
    if (!updatesQueue.isEmpty()) {
        boolean anyProceed = false;
        if (state == 2) {
            TLRPC.Updates updates = updatesQueue.get(0);
            if (type == 0) {
                MessagesStorage.lastSeqValue = getUpdateSeq(updates);
            } else if (type == 1) {
                MessagesStorage.lastPtsValue = updates.pts;
            } else if (type == 2) {
                MessagesStorage.lastQtsValue = updates.qts;
            }
        }
        for (int a = 0; a < updatesQueue.size(); a++) {
            TLRPC.Updates updates = updatesQueue.get(a);
            int updateState = isValidUpdate(updates, type);
            if (updateState == 0) {
                processUpdates(updates, true);
                anyProceed = true;
                updatesQueue.remove(a);
                a--;
            } else if (updateState == 1) {
                if (getUpdatesStartTime(type) != 0
                        && (anyProceed || getUpdatesStartTime(type) + 1500 > System.currentTimeMillis())) {
                    FileLog.e("tmessages", "HOLE IN UPDATES QUEUE - will wait more time");
                    if (anyProceed) {
                        setUpdatesStartTime(type, System.currentTimeMillis());
                    }
                    return false;
                } else {
                    FileLog.e("tmessages", "HOLE IN UPDATES QUEUE - getDifference");
                    setUpdatesStartTime(type, 0);
                    updatesQueue.clear();
                    getDifference();
                    return false;
                }
            } else {
                updatesQueue.remove(a);
                a--;
            }
        }
        updatesQueue.clear();
        FileLog.e("tmessages", "UPDATES QUEUE PROCEED - OK");
    }
    setUpdatesStartTime(type, 0);
    return true;
}

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

/**
 * Batched version of putCheckpointState. This is a complicated method with
 * very different behaviors for updates and inserts. If update is true, it
 * attempts to batch-update all the checkpoints and for those
 * updates/inserts that failed, it attempts to individually update/insert
 * them through/*from w w  w  . j a  v a  2  s  . c o  m*/
 * {@link #putCheckpointState(String, int, Set, int, Ballot, String, int)}.
 * It is still possible that only a subset of the updates succeed, but that
 * is okay as checkpoint failure is not fatal except in the case of initial
 * checkpoint insertion.
 * 
 * If update is false, it means that this is a batch-insertion of initial
 * checkpoints, and it is critical that this batch operation is atomic. If
 * the batch operation only partly succeeds, it should throw an exception so
 * that the caller can not proceed any further with the batch insertion but
 * it should also rollback the changes.
 * 
 * The reason batched creation of initial checkpoints should be atomic is
 * that otherwise, the checkpoints that did get written essentially are
 * created paxos instances, but there is no easy way for the caller to know
 * that they got created and this could lead to nasty surprises later. If
 * the caller always follows up failed batch creations with sequential
 * creation, then the rollback is not critical as the sequential creation
 * will simply "recover" from the checkpoint if any left behind during a
 * previous failed batch creation. If the caller chooses to keep re-issuing
 * the batch creation and expects to eventually succeed (assuming that the
 * instances in the batch didn't actually exist a priori), then rolling back
 * failed batch creations like below will not help in the event of crashes.
 * So, the caller should really just issue sequential creation requests if a
 * batch creation fails or times out for any reason.
 * 
 * Note: this method just needs to be atomic, i.e., all or none, but not
 * synchronized. Synchronizing it will invert the invariant that messageLog
 * is always locked before (because of the getMinLogFile invocation)
 * SQLPaxosLogger.
 * 
 * @param tasks
 * @param update
 */
@Override
public boolean putCheckpointState(CheckpointTask[] tasks, boolean update) {
    if (isClosed() || DISABLE_CHECKPOINTING)
        return false;

    boolean batchSuccess = true;
    boolean[] committed = new boolean[tasks.length];
    long t1 = System.currentTimeMillis();
    String insertCmd = "insert into " + getCTable()
            + " (version,members,slot,ballotnum,coordinator,state,create_time, min_logfile, paxos_id) values (?,?,?,?,?,?,?,?,?)";

    String updateCmd = "update " + getCTable()
            + " set version=?,members=?, slot=?, ballotnum=?, coordinator=?, state=?, create_time=?, min_logfile=? where paxos_id=?";
    String cmd = update ? updateCmd : insertCmd;
    PreparedStatement insertCP = null;
    Connection conn = null;
    String minLogfile = null;
    ArrayList<Integer> batch = new ArrayList<Integer>();
    try {
        for (int i = 0; i < tasks.length; i++) {
            CheckpointTask task = tasks[i];
            assert (task != null);
            assert (update || task.slot == 0);
            if ((task.slot == 0) == update) {
                this.putCheckpointState(task.paxosID, task.version, (task.members), task.slot, task.ballot,
                        task.state, task.gcSlot, task.createTime);
                committed[i] = true;
                continue;
            }
            if (conn == null) {
                conn = this.getDefaultConn();
                conn.setAutoCommit(false);
                insertCP = conn.prepareStatement(cmd);
            }
            insertCP.setInt(1, task.version);
            insertCP.setString(2, Util.toJSONString(task.members));
            insertCP.setInt(3, task.slot);
            insertCP.setInt(4, task.ballot.ballotNumber);
            insertCP.setInt(5, task.ballot.coordinatorID);
            if (getCheckpointBlobOption()) {
                Blob blob = conn.createBlob();
                blob.setBytes(1, task.state.getBytes(CHARSET));
                insertCP.setBlob(6, blob);
            } else
                insertCP.setString(6, task.state);
            insertCP.setLong(7, task.createTime);
            insertCP.setString(8, minLogfile = this.getSetGCAndGetMinLogfile(task.paxosID, task.version,
                    task.slot - task.gcSlot < 0 ? task.slot : task.gcSlot));
            insertCP.setString(9, task.paxosID);
            insertCP.addBatch();
            batch.add(i);
            incrTotalCheckpoints();
            if (shouldLogCheckpoint(1))
                log.log(Level.INFO, "{0} checkpointed> ({1}:{2}, {3}{4}, {5}, ({6}, {7}) [{8}]) {9}",
                        new Object[] { this, task.paxosID, task.version, (task.members), task.slot, task.ballot,
                                task.gcSlot, minLogfile,
                                Util.truncate(task.state, TRUNCATED_STATE_SIZE, TRUNCATED_STATE_SIZE),
                                (tasks.length > 1 ? "(batched=" + tasks.length + ")" : "") });

            if ((i + 1) % MAX_DB_BATCH_SIZE == 0 || (i + 1) == tasks.length) {
                int[] executed = insertCP.executeBatch();
                conn.commit();
                insertCP.clearBatch();
                for (int j = 0; j < executed.length; j++)
                    batchSuccess = batchSuccess && (committed[batch.get(j)] = (executed[j] > 0));
                batch.clear();
            }
        }
        if (ENABLE_INSTRUMENTATION && Util.oneIn(10))
            DelayProfiler.updateDelay("checkpoint", t1, tasks.length);
    } catch (SQLException | UnsupportedEncodingException sqle) {
        log.log(Level.SEVERE, "{0} SQLException while batched checkpointing", new Object[] { this });
        sqle.printStackTrace();
    } finally {
        cleanup(insertCP);
        cleanup(conn);
    }

    if (!batchSuccess) {
        if (update) {
            for (int i = 0; i < tasks.length; i++)
                if (!committed[i])
                    this.putCheckpointState(tasks[i].paxosID, tasks[i].version, tasks[i].members, tasks[i].slot,
                            tasks[i].ballot, tasks[i].state, tasks[i].gcSlot);
        } else {
            // rollback
            for (int i = 0; i < tasks.length; i++)
                if (committed[i])
                    this.deleteCheckpoint(tasks[i].paxosID, tasks[i].version, tasks[i].members, tasks[i].slot,
                            tasks[i].ballot, tasks[i].state, tasks[i].gcSlot);

            throw new PaxosInstanceCreationException(
                    "Rolled back failed batch-creation of " + tasks.length + " paxos instances");
        }
    }

    for (CheckpointTask task : tasks)
        this.deleteOutdatedMessages(task.paxosID, task.version, task.ballot, task.slot,
                task.ballot.ballotNumber, task.ballot.coordinatorID, task.gcSlot);
    return true;
}