Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:com.hygenics.parser.GetImages.java

private void getImages() {
    // controls the web process from a removed method
    log.info("Setting Up Pull");
    String[] proxyarr = (proxies == null) ? null : proxies.split(",");
    // cleanup//from w w  w .  ja  v  a2  s.c  om
    if (cleanup) {
        cleanupDir(fpath);
    }

    // image grab
    CookieManager cm = new CookieManager();
    cm.setCookiePolicy(CookiePolicy.ACCEPT_ALL);
    CookieHandler.setDefault(cm);
    int numimages = 0;
    InputStream is;
    byte[] bytes;
    int iter = 0;
    int found = 0;

    // set proxy if needed
    if (proxyuser != null) {
        proxy(proxyhost, proxyport, https, proxyuser, proxypass);
    }

    int i = 0;
    ArrayList<String> postImages = new ArrayList<String>();
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    Set<Callable<String>> pulls = new HashSet<Callable<String>>();
    Set<Callable<ArrayList<String>>> sqls = new HashSet<Callable<ArrayList<String>>>();
    List<Future<String>> imageFutures;

    ArrayList<String> images;
    int chunksize = (int) Math.ceil(commitsize / numqueries);
    log.info("Chunksize: " + chunksize);
    if (baseurl != null || baseurlcolumn != null) {
        do {
            log.info("Offset: " + offset);
            log.info("Getting Images");
            images = new ArrayList<String>(commitsize);
            log.info("Getting Columns");
            for (int n = 0; n < numqueries; n++) {
                String tempsql = sql + " WHERE " + idString + " >= " + offset + " AND " + idString + " < "
                        + (offset + chunksize);

                if (conditions != null) {
                    tempsql += conditions;
                }

                sqls.add(new QueryDatabase(
                        ((extracondition != null) ? tempsql + " " + extracondition : tempsql)));

                offset += chunksize;
            }

            List<Future<ArrayList<String>>> futures = fjp.invokeAll(sqls);

            int w = 0;
            while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                w++;
            }

            for (Future<ArrayList<String>> f : futures) {
                try {
                    ArrayList<String> fjson;
                    fjson = f.get();
                    if (fjson.size() > 0) {
                        images.addAll(fjson);
                    }

                    if (f.isDone() == false) {
                        f.cancel(true);
                    }
                } catch (InterruptedException e) {
                    e.printStackTrace();
                } catch (ExecutionException e) {
                    e.printStackTrace();
                }
            }
            log.info(Integer.toString(images.size()) + " image links found. Pulling.");

            ArrayList<String> tempproxies = new ArrayList<String>();

            if (proxyarr != null) {
                for (String proxy : proxyarr) {
                    tempproxies.add(proxy.trim());
                }
            }

            if (maxproxies > 0) {
                maxproxies -= 1;// 0 and 1 should be equivalent conditions
                // --num is not like most 0 based still due
                // to >=
            }

            // get images
            for (int num = 0; num < images.size(); num++) {
                String icols = images.get(num);
                int proxnum = (int) Math.random() * (tempproxies.size() - 1);
                String proxy = (tempproxies.size() == 0) ? null : tempproxies.get(proxnum);

                // add grab
                pulls.add(new ImageGrabber(icols, proxy));

                if (proxy != null) {
                    tempproxies.remove(proxy);
                }

                // check for execution
                if (num + 1 == images.size() || pulls.size() >= commitsize || tempproxies.size() == 0) {
                    if (tempproxies.size() == 0 && proxies != null) {
                        tempproxies = new ArrayList<String>(proxyarr.length);

                        for (String p : proxyarr) {
                            tempproxies.add(p.trim());
                        }
                    }

                    imageFutures = fjp.invokeAll(pulls);
                    w = 0;

                    while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                        w++;
                    }

                    for (Future<String> f : imageFutures) {
                        String add;
                        try {
                            add = f.get();

                            if (add != null) {
                                postImages.add(add);
                            }
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        } catch (ExecutionException e) {
                            e.printStackTrace();
                        }
                    }
                    imageFutures = null;// garbage collect elligible
                    pulls = new HashSet<Callable<String>>(commitsize);
                }

                if (postImages.size() >= commitsize && addtoDB == true) {
                    if (addtoDB) {
                        log.info("Posting to Database");
                        log.info("Found " + postImages.size() + " images");
                        numimages += postImages.size();
                        int size = (int) Math.floor(postImages.size() / numqueries);
                        for (int n = 0; n < numqueries; n++) {
                            if (((n + 1) * size) < postImages.size() && (n + 1) < numqueries) {
                                fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size)));
                            } else {
                                fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size() - 1)));
                            }
                        }

                        w = 0;
                        while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                            w++;
                        }
                    }
                    found += postImages.size();
                    postImages.clear();
                }

            }

            if (postImages.size() > 0 && addtoDB == true) {
                log.info("Posting to Database");
                numimages += postImages.size();
                int size = (int) Math.floor(postImages.size() / numqueries);
                for (int n = 0; n < numqueries; n++) {
                    if (((n + 1) * size) < postImages.size()) {
                        fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size)));
                    } else {
                        fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size())));
                    }
                }

                w = 0;
                while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                    w++;
                }

                found += postImages.size();
                postImages.clear();
            }

            // handle iterations specs
            iter += 1;
            log.info("Iteration: " + iter);
            if ((iter < iterations && found < images.size()) || tillfound == true) {
                log.info("Not All Images Obtained Trying Iteration " + iter + " of " + iterations);
                offset -= commitsize;
            } else if ((iter < iterations && found >= images.size()) && tillfound == false) {
                log.info("Images Obtained in " + iter + " iterations. Continuing.");
                iter = 0;
            } else {
                // precautionary
                log.info("Images Obtained in " + iter + " iterations. Continuing");
                iter = 0;
            }

        } while (images.size() > 0 && iter < iterations);

        if (fjp.isShutdown()) {
            fjp.shutdownNow();
        }
    }

    log.info("Complete. Check for Errors \n " + numimages + " Images Found");
}

From source file:ldbc.snb.datagen.generator.CommentGenerator.java

public long createComments(RandomGeneratorFarm randomFarm, final Forum forum, final Post post, long numComments,
        long startId, PersonActivityExporter exporter) throws IOException {
    long nextId = startId;
    ArrayList<Message> replyCandidates = new ArrayList<Message>();
    replyCandidates.add(post);/*from w  w w.  ja  v  a2s  . c om*/

    Properties prop = new Properties();
    prop.setProperty("type", "comment");
    for (int i = 0; i < numComments; ++i) {
        int replyIndex = randomFarm.get(RandomGeneratorFarm.Aspect.REPLY_TO).nextInt(replyCandidates.size());
        Message replyTo = replyCandidates.get(replyIndex);
        ArrayList<ForumMembership> validMemberships = new ArrayList<ForumMembership>();
        for (ForumMembership fM : forum.memberships()) {
            if (fM.creationDate() + DatagenParams.deltaTime <= replyTo.creationDate()) {
                validMemberships.add(fM);
            }
        }
        if (validMemberships.size() == 0) {
            return nextId;
        }
        ForumMembership member = validMemberships.get(
                randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX).nextInt(validMemberships.size()));
        TreeSet<Integer> tags = new TreeSet<Integer>();
        String content = "";
        String gif = "";

        boolean isShort = false;
        if (randomFarm.get(RandomGeneratorFarm.Aspect.REDUCED_TEXT).nextDouble() > 0.6666) {

            ArrayList<Integer> currentTags = new ArrayList<Integer>();
            Iterator<Integer> it = replyTo.tags().iterator();
            while (it.hasNext()) {
                Integer tag = it.next();
                if (randomFarm.get(RandomGeneratorFarm.Aspect.TAG).nextDouble() > 0.5) {
                    tags.add(tag);
                }
                currentTags.add(tag);
            }

            for (int j = 0; j < (int) Math.ceil(replyTo.tags().size() / 2.0); ++j) {
                int randomTag = currentTags
                        .get(randomFarm.get(RandomGeneratorFarm.Aspect.TAG).nextInt(currentTags.size()));
                tags.add(Dictionaries.tagMatrix
                        .getRandomRelated(randomFarm.get(RandomGeneratorFarm.Aspect.TOPIC), randomTag));
            }
            content = this.generator.generateText(member.person(), tags, prop);
        } else {
            isShort = true;
            if (!richRdf || randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_ISGIF).nextDouble() > 0.8) {
                int index = randomFarm.get(RandomGeneratorFarm.Aspect.TEXT_SIZE).nextInt(shortComments_.length);
                content = shortComments_[index];
            } else {
                int index = randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_GIF).nextInt(gifs_.length);
                gif = gifs_[index];
            }
        }

        long creationDate = Dictionaries.dates.powerlawCommDateDay(
                randomFarm.get(RandomGeneratorFarm.Aspect.DATE),
                replyTo.creationDate() + DatagenParams.deltaTime);
        /*if( creationDate <= Dictionaries.dates.getEndDateTime() )*/ {
            Comment comment = new Comment(SN.formId(SN.composeId(nextId++, creationDate)), creationDate,
                    member.person(), forum.id(), content, tags,
                    Dictionaries.ips.getIP(randomFarm.get(RandomGeneratorFarm.Aspect.IP),
                            randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_IP),
                            randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_IP_FOR_TRAVELER),
                            member.person().ipAddress(), creationDate),
                    Dictionaries.browsers.getPostBrowserId(
                            randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_BROWSER),
                            randomFarm.get(RandomGeneratorFarm.Aspect.BROWSER), member.person().browserId()),
                    post.messageId(), replyTo.messageId(), gif);
            if (richRdf) {
                comment.richRdf(true);
                if (randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_MENTIONED).nextDouble() > 0.6) {
                    TreeSet<Long> t = new TreeSet<Long>();
                    // The user mentions one or more (up to 4) members of the forum                                        
                    t.add(validMemberships
                            .get(randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_COMMENT_MENTIONED)
                                    .nextInt(validMemberships.size()))
                            .person().accountId());
                    double probabilityForNumberOfMentions = randomFarm
                            .get(RandomGeneratorFarm.Aspect.COMMENT_MENTIONED_NUM).nextDouble();
                    if (probabilityForNumberOfMentions > 0.5)
                        t.add(validMemberships.get(
                                randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_COMMENT_MENTIONED)
                                        .nextInt(validMemberships.size()))
                                .person().accountId());
                    if (probabilityForNumberOfMentions > 0.75)
                        t.add(validMemberships.get(
                                randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_COMMENT_MENTIONED)
                                        .nextInt(validMemberships.size()))
                                .person().accountId());
                    if (probabilityForNumberOfMentions > 0.95)
                        t.add(validMemberships.get(
                                randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_COMMENT_MENTIONED)
                                        .nextInt(validMemberships.size()))
                                .person().accountId());
                    comment.mentioned(t);
                }
                if (randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_VISIBILITY).nextDouble() > 0.95) {
                    if (comment.mentioned() == null || randomFarm
                            .get(RandomGeneratorFarm.Aspect.COMMENT_VISIBILITY_TF).nextDouble() > 0.5)
                        comment.setPublic(true);
                    else
                        comment.setPublic(false);
                }
                if (randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_LINK).nextDouble() > 0.57) {
                    comment.link("http://ld.bc/" + RandomStringUtils.random(6, true, false));
                }
            }
            if (richRdf && randomFarm.get(RandomGeneratorFarm.Aspect.COMMENT_COUNTRY).nextDouble() > 0.02)
                comment.countryKnown(false);
            if (!isShort)
                replyCandidates.add(new Comment(comment));
            exporter.export(comment);
            if (comment.content().length() > 10
                    && randomFarm.get(RandomGeneratorFarm.Aspect.NUM_LIKE).nextDouble() <= 0.1) {
                likeGenerator_.generateLikes(randomFarm.get(RandomGeneratorFarm.Aspect.NUM_LIKE), forum,
                        comment, Like.LikeType.COMMENT, exporter);
            }
        }
    }
    replyCandidates.clear();
    return nextId;
}

From source file:com.juick.android.MessagesFragment.java

public void lastPrepareMessages(final List<JuickMessage> list, final Runnable then) {
    databaseGetter.getService(new Utils.ServiceGetter.Receiver<DatabaseService>() {
        @Override/*from  w  w  w .j a  v a 2s  .  c  om*/
        public void withService(final DatabaseService service) {
            new Thread("processLastReads cont") {
                @Override
                public void run() {
                    for (JuickMessage juickMessage : list) {
                        // we can use service inside here, because getMessageReadStatus0 is thread-safe
                        final DatabaseService.MessageReadStatus messageReadStatus0 = service
                                .getMessageReadStatus0(juickMessage.getMID());
                        if (messageReadStatus0.read) {
                            juickMessage.read = true;
                            juickMessage.readComments = messageReadStatus0.nreplies;
                        }
                    }

                    final Activity act = getActivity();

                    if (act != null) {
                        if (sp.getBoolean("text_accelerated", false)) {
                            // accelerate text draw

                            final Bitmap bm = Bitmap.createBitmap(10, 10, Bitmap.Config.ALPHA_8);
                            while (MessagesFragment.this.getView().getMeasuredWidth() == 0) {
                                try {
                                    Thread.sleep(100);
                                } catch (InterruptedException e) {
                                    e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
                                }
                            }

                            TextView tv = new TextView(act);
                            tv.setTextSize(JuickMessagesAdapter.getTextSize(act));
                            MainActivity.restyleChildrenOrWidget(tv);
                            final int outerViewWidth = MessagesFragment.this.getView().getMeasuredWidth();
                            final int width = outerViewWidth - 6;
                            tv.setWidth(width);
                            tv.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                                    ViewGroup.LayoutParams.WRAP_CONTENT));
                            final ArrayList<JuickMessagesAdapter.CanvasPainter> stuff = new ArrayList<JuickMessagesAdapter.CanvasPainter>();

                            Canvas canvas = new Canvas(bm) {
                                @Override
                                public void drawText(char[] text, int index, int count, float x, float y,
                                        Paint paint) {
                                    super.drawText(text, index, count, x, y, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawText(String text, float x, float y, Paint paint) {
                                    super.drawText(text, x, y, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawText(String text, int start, int end, float x, float y,
                                        Paint paint) {
                                    super.drawText(text, start, end, x, y, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawText(CharSequence text, int start, int end, float x, float y,
                                        Paint paint) {
                                    super.drawText(text, start, end, x, y, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawVertices(VertexMode mode, int vertexCount, float[] verts,
                                        int vertOffset, float[] texs, int texOffset, int[] colors,
                                        int colorOffset, short[] indices, int indexOffset, int indexCount,
                                        Paint paint) {
                                    super.drawVertices(mode, vertexCount, verts, vertOffset, texs, texOffset,
                                            colors, colorOffset, indices, indexOffset, indexCount, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawPosText(char[] text, int index, int count, float[] pos,
                                        Paint paint) {
                                    super.drawPosText(text, index, count, pos, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawPosText(String text, float[] pos, Paint paint) {
                                    super.drawPosText(text, pos, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawTextOnPath(char[] text, int index, int count, Path path,
                                        float hOffset, float vOffset, Paint paint) {
                                    super.drawTextOnPath(text, index, count, path, hOffset, vOffset, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                @Override
                                public void drawTextOnPath(String text, Path path, float hOffset, float vOffset,
                                        Paint paint) {
                                    super.drawTextOnPath(text, path, hOffset, vOffset, paint); //To change body of overridden methods use File | Settings | File Templates.
                                }

                                public void drawTextRun(final CharSequence text, final int start, final int end,
                                        final int contextStart, final int contextEnd, final float x,
                                        final float y, final int dir, final Paint paint) {
                                    stuff.add(new JuickMessagesAdapter.CanvasPainter() {

                                        Paint paintClone = getPaintFromCache(paint);

                                        @Override
                                        public void paintOnCanvas(Canvas c, Paint workPaint) {
                                            try {
                                                drawTextRun.invoke(c, text, start, end, contextStart,
                                                        contextEnd, x, y, dir, paintClone);
                                            } catch (Throwable t) {
                                            }
                                        }
                                    });
                                }

                                @Override
                                public boolean getClipBounds(Rect bounds) {
                                    bounds.top = 0;
                                    bounds.bottom = 100000;
                                    bounds.left = 0;
                                    bounds.right = width;
                                    return true;
                                }
                            };

                            for (JuickMessage juickMessage : list) {
                                JuickMessagesAdapter.ParsedMessage pm = (JuickMessagesAdapter.ParsedMessage) juickMessage.parsedText;
                                if (pm != null && pm.textContent != null)
                                    tv.setText(pm.textContent);
                                stuff.clear();
                                tv.measure(View.MeasureSpec.makeMeasureSpec(width, View.MeasureSpec.EXACTLY),
                                        View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
                                tv.draw(canvas);
                                if (stuff.size() > 0) {
                                    juickMessage.parsedUI = new JuickMessagesAdapter.RenderedText(
                                            outerViewWidth, tv.getMeasuredHeight(),
                                            new ArrayList<JuickMessagesAdapter.CanvasPainter>(stuff));
                                } else {
                                    juickMessage.parsedUI = null;
                                }
                            }
                        }
                        act.runOnUiThread(then);
                    }
                }
            }.start();
        }
    });
}

From source file:com.sentaroh.android.SMBExplorer.SMBExplorerMain.java

private void refreshRemoteFileList(String user, String pass) {
    final ArrayList<FileListItem> remoteFileList = new ArrayList<FileListItem>();

    final ThreadCtrl tc = new ThreadCtrl();
    tc.setEnabled();//from  w  ww  .  j  av  a  2s.c o  m

    showRemoteProgressView();

    mRemoteProgressMsg.setText(R.string.msgs_progress_spin_dlg_title3);

    mRemoteProgressCancel.setEnabled(true);
    mRemoteProgressCancel.setText("Cancel");
    mRemoteProgressCancel.setOnClickListener(new OnClickListener() {
        @Override
        public void onClick(View v) {
            sendDebugLogMsg(1, "W", "Filelist is cancelled.");
            tc.setDisabled();
            mRemoteProgressCancel.setEnabled(false);
            mRemoteProgressCancel.setText("Cancelling");
        }
    });

    NotifyEvent ne = new NotifyEvent(this);
    ne.setListener(new NotifyEventListener() {
        @Override
        public void positiveResponse(Context c, Object[] o) {
            hideRemoteProgressView();
            if (tc.isThreadResultSuccess()) {
                int fv = remoteFileListView.getFirstVisiblePosition();
                int top = 0;
                if (remoteFileListView.getChildAt(0) != null)
                    top = remoteFileListView.getChildAt(0).getTop();
                remoteFileListAdapter.setDataList(remoteFileList);
                remoteFileListAdapter.sort();
                remoteFileListAdapter.notifyDataSetChanged();
                remoteFileListView.setSelectionFromTop(fv, top);
                removeFileListCache(buildFullPath(remoteBase, remoteDir), remoteFileListCache);

                FileListCacheItem dhi = new FileListCacheItem();
                dhi.profile_name = remoteFileListDirSpinner.getSelectedItem().toString();
                dhi.base = remoteBase;
                dhi.directory = buildFullPath(remoteBase, remoteDir);
                dhi.file_list = remoteFileList;
                dhi.directory_history.addAll(remoteDirHist);
                putFileListCache(dhi, remoteFileListCache);
                remoteCurrFLI = dhi;

                setEmptyFolderView();
                updateFileListCacheByMove();
                removeDeletedFileListCache(remoteCurrFLI, remoteFileListCache);
            } else {
                String err = "";
                if (tc.isThreadResultCancelled())
                    err = "Filelist was cancelled";
                else
                    err = tc.getThreadMessage();
                //               commonDlg.showCommonDialog(false,"E",
                //                     getString(R.string.msgs_remote_file_list_create_error),err,null);
                showDialogMsg("E", getString(R.string.msgs_remote_file_list_create_error), err);
                remoteFileListDirSpinner.setSelection(0);
                remoteBase = "";
                if (remoteFileListAdapter != null) {
                    remoteFileList.clear();
                }
                setEmptyFolderView();
                mIsMovedListAvailable = false;
            }
        }

        @Override
        public void negativeResponse(Context c, Object[] o) {
            hideRemoteProgressView();
        }
    });
    Thread th = new Thread(new RetrieveFileList(mGp, tc, RetrieveFileList.OPCD_FILE_LIST,
            formatRemoteSmbUrl(remoteBase + "/" + remoteDir + "/"), remoteFileList, user, pass, ne));
    th.start();
}

From source file:com.krawler.spring.hrms.common.hrmsCommonController.java

public ModelAndView getAllUserDetailsHrms(HttpServletRequest request, HttpServletResponse response) {
    KwlReturnObject kmsg = null;//from  w  w w.j av  a 2 s  .  co  m
    JSONObject jobj = new JSONObject();
    JSONArray jarr = new JSONArray();
    JSONObject countobj = new JSONObject();
    JSONObject jobj1 = new JSONObject();
    try {
        String Searchjson = request.getParameter("searchJson");
        String appendCase = "and";
        String companyid = sessionHandlerImplObj.getCompanyid(request);
        String lid = StringUtil.checkForNull(request.getParameter("lid"));
        HashMap<String, Object> requestParams = new HashMap<String, Object>();
        ArrayList filter_names = new ArrayList(
                Arrays.asList("ua.user.company.companyID", "ua.user.deleteflag"));
        ArrayList filter_values = new ArrayList(Arrays.asList(companyid, 0));
        requestParams.put("ss", StringUtil.checkForNull(request.getParameter("ss")));
        requestParams.put("allflag", false);
        requestParams.put("searchcol",
                new String[] { "u.firstName", "u.lastName", "ua.role.name", "u.emailID" });
        if (request.getParameter("combo") != null) {
            requestParams.put("combo", request.getParameter("combo"));
            requestParams.put("allflag", true);
        } else {
            requestParams.put("combo", "");
        }
        StringUtil.checkpaging(requestParams, request);
        SimpleDateFormat df = new SimpleDateFormat("yyyy/MM/dd");
        if (!StringUtil.isNullOrEmpty(request.getParameter("stdate"))) {
            filter_names.add(">=emp.joindate");
            filter_values.add(new Date(df.format(new Date(request.getParameter("stdate")))));
            filter_names.add("<=emp.joindate");
            filter_values.add(new Date(df.format(new Date(request.getParameter("enddate")))));
        }

        if (!StringUtil.isNullOrEmpty(Searchjson)) {
            getMyAdvanceSearchparams(Searchjson, filter_names);
            insertParamAdvanceSearchString(filter_values, Searchjson);
        }
        requestParams.put("filter_names", filter_names);
        requestParams.put("filter_values", filter_values);

        kmsg = hrmsCommonDAOObj.getUserDetailsHrms(requestParams);
        List lst = kmsg.getEntityList();
        jarr = kwlCommonTablesDAOObj.getDetailsJson(lst, 0, "com.krawler.common.admin.User");

        int count = 0;
        for (int ctr = 0; ctr < jarr.length(); ctr++) {
            jobj = jarr.getJSONObject(ctr);
            Object[] row = (Object[]) lst.get(ctr);
            User u = (User) jobj.get("instance");
            Useraccount ua = (Useraccount) kwlCommonTablesDAOObj
                    .getObject("com.krawler.common.admin.Useraccount", row[0].toString());
            if (row[1] != null) {
                Empprofile e = (Empprofile) kwlCommonTablesDAOObj.getObject("com.krawler.hrms.ess.Empprofile",
                        row[1].toString());
                if (!StringUtil.isNullOrEmpty(e.getStatus())) {
                    jobj.put("status", e.getStatus());
                } else {
                    jobj.put("status", "Pending");
                }
                jobj.put("joindate", (e.getJoindate() == null ? ""
                        : sessionHandlerImplObj.getDateFormatter(request).format(e.getJoindate())));
            } else {
                jobj.put("status", "Incomplete");
            }
            jobj.put("department", (ua.getDepartment() == null ? "" : ua.getDepartment().getId()));
            jobj.put("departmentname", (ua.getDepartment() == null ? "" : ua.getDepartment().getValue()));
            jobj.put("role", (ua.getRole() == null ? "" : ua.getRole().getID()));
            String name = "";
            if (ua.getRole() != null && ua.getRole().getCompany() != null) {
                name = ua.getRole().getName();
            } else {
                name = messageSource.getMessage("hrms.common.role." + ua.getRole().getID(), null,
                        ua.getRole().getName(), RequestContextUtils.getLocale(request));
            }
            jobj.put("rolename", (ua.getRole() == null ? "" : name));
            jobj.put("username", u.getUserLogin().getUserName());
            jobj.put("fullname", u.getFirstName() + " " + (u.getLastName() == null ? "" : u.getLastName()));
            jobj.put("lastlogin",
                    (u.getUserLogin().getLastActivityDate() == null ? ""
                            : sessionHandlerImplObj.getDateFormatter(request)
                                    .format(u.getUserLogin().getLastActivityDate())));
            jobj.put("designation", ua.getDesignationid() == null ? "" : ua.getDesignationid().getValue());
            jobj.put("designationid", ua.getDesignationid() == null ? "" : ua.getDesignationid().getId());
            jobj.put("templateid", ua.getTemplateid() != null ? ua.getTemplateid() : "");
            jobj.put("salary", ua.getSalary());
            jobj.put("accno", ua.getAccno());
            jobj.put("frequency", u.getFrequency());
            requestParams.clear();
            requestParams.put("companyid", sessionHandlerImplObj.getCompanyid(request));
            requestParams.put("empid", ua.getEmployeeid());
            KwlReturnObject result;
            //                KwlReturnObject result = profileHandlerDAOObj.getEmpidFormatEdit(requestParams);
            if (ua.getEmployeeIdFormat() == null) {
                jobj.put("employeeid", ua.getEmployeeid() == null ? ""
                        : profileHandlerDAOObj.getEmpidFormatEdit(requestParams).getEntityList().get(0));
            } else {
                requestParams.put("standardEmpId", profileHandlerDAOObj.getEmpidFormatEdit(requestParams)
                        .getEntityList().get(0).toString());
                requestParams.put("employeeIdFormat", ua.getEmployeeIdFormat());
                jobj.put("employeeid", profileHandlerDAOObj.getNewEmployeeIdFormat(requestParams));
            }

            requestParams.clear();
            filter_names.clear();
            filter_values.clear();
            filter_names.add("assignemp.userID");
            filter_values.add(u.getUserID());

            filter_names.add("assignman.deleteflag");
            filter_values.add(0);

            filter_names.add("managerstatus");
            filter_values.add(1);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getAssignmanager(requestParams);
            List lst1 = result.getEntityList();
            Iterator itr1 = lst1.iterator();

            if (itr1.hasNext()) {
                while (itr1.hasNext()) {
                    Assignmanager asm = (Assignmanager) itr1.next();
                    if (asm.getAssignman() != null) {
                        jobj.append("managerid", asm.getAssignman().getUserID());
                        jobj.append("manager",
                                asm.getAssignman().getFirstName() + " " + asm.getAssignman().getLastName());
                    }
                }
            } else {
                jobj.put("manager", " ");
                jobj.put("managerid", " ");
            }

            requestParams.clear();
            filter_names.clear();
            filter_values.clear();
            filter_names.add("employee.userID");
            filter_values.add(u.getUserID());

            filter_names.add("reviewer.deleteflag");
            filter_values.add(0);

            filter_names.add("reviewerstatus");
            filter_values.add(1);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getAssignreviewer(requestParams);
            lst1 = result.getEntityList();
            itr1 = lst1.iterator();

            if (itr1.hasNext()) {
                while (itr1.hasNext()) {
                    Assignreviewer rev = (Assignreviewer) itr1.next();
                    if (rev.getReviewer() != null) {
                        jobj.append("reviewerid", rev.getReviewer().getUserID());
                        jobj.append("reviewer",
                                rev.getReviewer().getFirstName() + " " + rev.getReviewer().getLastName());
                    }
                }
            } else {
                jobj.put("reviewer", " ");
                jobj.put("reviewerid", " ");
            }
            jarr.put(ctr, jobj);
            count++;
        }

        countobj.put("data", jarr);
        countobj.put("count", kmsg.getRecordTotalCount());
        jobj1.put("data", countobj);
        jobj1.put("valid", true);
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        return new ModelAndView("jsonView", "model", jobj1.toString());
    }
}

From source file:com.hygenics.parser.BreakMultiple.java

/**
 * run the class//from ww  w .  j  av a 2 s  . com
 */
public void run() {
    int j = 0;
    checkTable();
    rows = new ArrayList<String>();
    log.info("Starting Break");

    // the pool
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * procnum);

    // for returned results
    List<Future<ArrayList<String>>> futures = new ArrayList<Future<ArrayList<String>>>();

    // for parsing
    Set<Callable<ArrayList<String>>> collect = new HashSet<Callable<ArrayList<String>>>();

    // for querying
    Set<Callable<ArrayList<String>>> qcollect = new HashSet<Callable<ArrayList<String>>>();

    // results
    ArrayList<String> jsons = new ArrayList<String>();

    String condition = null;
    int size = (int) Math.ceil(pullsize / qnum);
    // get initial data from user
    for (int i = 0; i < qnum; i++) {
        condition = " WHERE " + idcolumn + " > " + Integer.toString(offset + (Math.round(pullsize / qnum) * i))
                + " AND " + idcolumn + " <= "
                + Integer.toString(offset + (Math.round(pullsize / qnum) * (i + 1)));

        if (extracondition != null) {
            condition += " " + extracondition.trim();
        }

        qcollect.add(new GetFromDB((select + condition), template));
        log.info("SELECTING " + select + " " + condition);
    }

    log.info("Getting From DB @" + Calendar.getInstance().getTime().toString());
    futures = fjp.invokeAll(qcollect);

    int w = 0;
    while (fjp.getActiveThreadCount() > 0 && fjp.isQuiescent() == false) {
        w++;
    }

    log.info("Waited for " + w + "Cycles");

    for (Future<ArrayList<String>> f : futures) {
        try {
            rows.addAll(f.get());
            f.cancel(true);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (ExecutionException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    qcollect = new HashSet<Callable<ArrayList<String>>>();
    futures = null;

    log.info("Breaking");
    // process while there is still data to process
    while (rows.size() > 0) {
        log.info("Iteration Contains " + rows.size() + " Rows");
        // add to the commit size for future processing
        offset += pullsize;
        log.info("Submitting Tasks");
        // submit for breaking apart

        for (String r : rows) {

            if (fjp.isShutdown()) {
                fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * procnum);
            }

            if (r != null) {

                if (mustcontain != null) {
                    if (r.contains(mustcontain)) {
                        if (cannotcontain != null) {
                            if (r.contains(cannotcontain) == false) {
                                Map<String, Json> rowmap = Json.read(r).asJsonMap();

                                // final getDAOTemplate template, final
                                // String row, final String token, final
                                // String replacementPattern, final
                                // Map<String,String> positions,final String
                                // date, final String table, final String
                                // offenderhash
                                if (rowmap.size() > 0) {
                                    collect.add(new Break(unescape, repeatkeys, template,
                                            rowmap.get(rowcolumn).asString(), token, replacementPattern,
                                            positions, (Calendar.getInstance().getTime().toString()),
                                            targettable, rowmap.get("offenderhash").asString(), maxpos,
                                            genhash));
                                }
                            }
                        } else {
                            Map<String, Json> rowmap = Json.read(r).asJsonMap();

                            // final getDAOTemplate template, final String
                            // row, final String token, final String
                            // replacementPattern, final Map<String,String>
                            // positions,final String date, final String
                            // table, final String offenderhash
                            if (rowmap.size() > 0) {
                                collect.add(new Break(unescape, repeatkeys, template,
                                        rowmap.get(rowcolumn).asString(), token, replacementPattern, positions,
                                        (Calendar.getInstance().getTime().toString()), targettable,
                                        rowmap.get("offenderhash").asString(), maxpos, genhash));
                            }
                        }
                    }
                } else {

                    if (cannotcontain != null) {
                        if (r.contains(cannotcontain) == false) {
                            Map<String, Json> rowmap = Json.read(r).asJsonMap();

                            // to ascend you must die, to die you must be
                            // crucified; so get off your -- cross so that
                            // we can nail down the nex martyr
                            // final getDAOTemplate template, final String
                            // row, final String token, final String
                            // replacementPattern, final Map<String,String>
                            // positions,final String date, final String
                            // table, final String offenderhash
                            if (rowmap.size() > 0) {
                                collect.add(new Break(unescape, repeatkeys, template,
                                        rowmap.get(rowcolumn).asString(), token, replacementPattern, positions,
                                        (Calendar.getInstance().getTime().toString()), targettable,
                                        rowmap.get("offenderhash").asString(), maxpos, genhash));
                            }
                        }
                    } else {
                        Map<String, Json> rowmap = Json.read(r).asJsonMap();

                        // final getDAOTemplate template, final String row,
                        // final String token, final String
                        // replacementPattern, final Map<String,String>
                        // positions,final String date, final String table,
                        // final String offenderhash
                        if (rowmap.size() > 0) {
                            collect.add(new Break(unescape, repeatkeys, template,
                                    rowmap.get(rowcolumn).asString(), token, replacementPattern, positions,
                                    (Calendar.getInstance().getTime().toString()), targettable,
                                    rowmap.get("offenderhash").asString(), maxpos, genhash));
                        }
                    }
                }
            }
        }

        log.info("SUBMITTED " + collect.size() + " tasks");

        futures = fjp.invokeAll(collect);

        w = 0;

        while (fjp.getActiveThreadCount() > 0 && fjp.isQuiescent() == false) {
            w++;
        }

        log.info("Waited for " + w + " Cycles");

        jsons.clear();
        log.info("Getting Strings");
        try {

            for (Future<ArrayList<String>> p : futures) {
                ArrayList<String> retlist = p.get();

                if (retlist != null) {
                    if (retlist.size() > 0) {
                        jsons.addAll(retlist);
                    }

                    if (jsons.size() >= commit_size) {
                        // send to db
                        if (jsons.size() > SPLITSIZE) {
                            log.info("Split True: Sending to DB @ "
                                    + Calendar.getInstance().getTime().toString());

                            postToDb(jsons, true);
                            jsons = new ArrayList<String>();
                            log.info("Posted to DB @ " + Calendar.getInstance().getTime().toString());
                        } else {
                            log.info("Split False: Sending to DB @ "
                                    + Calendar.getInstance().getTime().toString());
                            postToDb(jsons, false);
                            jsons = new ArrayList<String>();
                            log.info("Posted to DB @ " + Calendar.getInstance().getTime().toString());
                        }
                    }
                }
                p.cancel(true);
            }
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (ExecutionException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        futures = null;
        collect = new HashSet<Callable<ArrayList<String>>>();

        // send to db
        if (jsons.size() > SPLITSIZE) {
            log.info("Split True: Sending to DB @" + Calendar.getInstance().getTime().toString());
            postToDb(jsons, true);
            jsons = new ArrayList<String>();
            log.info("Posted to DB @ " + Calendar.getInstance().getTime().toString());
        } else {
            log.info("Split False: Sending to DB @" + Calendar.getInstance().getTime().toString());
            postToDb(jsons, false);
            jsons = new ArrayList<String>();
            log.info("Posted to DB @ " + Calendar.getInstance().getTime().toString());
        }

        // get more information
        rows = new ArrayList<String>();

        if (Runtime.getRuntime().freeMemory() < 500000 | ((loops % waitloops) == 0 & waitloops != 0)) {
            log.info("Paused Free Memory Left: " + Runtime.getRuntime().freeMemory());
            System.gc();
            Runtime.getRuntime().gc();

            try {
                Thread.sleep(2000);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

            while (Runtime.getRuntime().freeMemory() < 500000) {
                try {
                    Thread.sleep(2000);
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

            log.info("Restart Free Memory Left: " + Runtime.getRuntime().freeMemory());
        }

        rows = new ArrayList<String>();

        // attempt to query the database from multiple threads
        for (int conn = 1; conn <= qnum; conn++) {
            // change condition
            condition = " WHERE " + idcolumn + " > "
                    + Integer.toString(offset + (Math.round(pullsize / qnum) * conn)) + " AND " + idcolumn
                    + " <= " + Integer.toString(offset + (Math.round(pullsize / qnum) * (conn + 1)));

            if (extracondition != null) {
                condition += " " + extracondition.trim();
            }

            qcollect.add(new GetFromDB((select + condition), template));
            log.info("SELECTING " + select + " " + condition);
        }

        futures = fjp.invokeAll(qcollect);

        w = 0;

        while (fjp.getActiveThreadCount() > 0 && fjp.isQuiescent() == false) {
            w++;
        }

        log.info("Waited for " + w + " Cycles");

        for (Future<ArrayList<String>> f : futures) {
            try {

                ArrayList<String> test = f.get();

                if (test != null) {
                    if (test.size() > 0) {
                        rows.addAll(test);
                    }
                }

                f.cancel(true);

            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (ExecutionException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }

        futures = null;
        qcollect = new HashSet<Callable<ArrayList<String>>>(4);

        j++;

        Runtime.getRuntime().gc();
        System.gc();

    }

    // send to db
    if (jsons.size() > SPLITSIZE) {
        log.info("Split True: Sending to DB @" + Calendar.getInstance().getTime().toString());
        postToDb(jsons, true);
        jsons = new ArrayList<String>();
    } else if (jsons.size() > 0) {
        log.info("Split False: Sending to DB @" + Calendar.getInstance().getTime().toString());
        postToDb(jsons, false);
        jsons = new ArrayList<String>();
    }

    Runtime.getRuntime().gc();
    System.gc();

    log.info("Shutting Down Forkjoin Pool");
    if (fjp.isShutdown() == false) {
        fjp.shutdownNow();
    }
}

From source file:base.BasePlayer.FileRead.java

static boolean checkCompoundGene(Gene gene, VarNode vardraw) {

    if (gene.varnodes.size() < 1) {
        gene.compounds.clear();//from w  w w  .j  a  v  a2  s.  com

        return false;
    }

    Entry<String, ArrayList<SampleNode>> entry, entry2;
    Boolean found2 = null;

    for (int i = 0; i < gene.varnodes.size(); i++) {

        for (int v = 0; v < gene.varnodes.get(i).vars.size(); v++) {

            entry = gene.varnodes.get(i).vars.get(v);
            ArrayList<Sample> healthArray = new ArrayList<Sample>();
            for (int s = 0; s < entry.getValue().size(); s++) {
                if (entry.getValue().get(s).getSample() == null) {
                    continue;
                }
                if (entry.getValue().get(s).getSample().annotation) {
                    entry.getValue().get(s).inheritance = true;
                    continue;
                }
                /*if(!entry.getValue().get(s).inheritance) {
                   continue;
                }*/
                if (!entry.getValue().get(s).getSample().affected) {
                    healthArray.add(entry.getValue().get(s).getSample());
                } else {
                    if (entry.getValue().get(s).isHomozygous()) {
                        healthArray.clear();
                        break;
                    }
                }
            }
            if (healthArray.size() > 0) {

                for (int var = 0; var < vardraw.vars.size(); var++) {
                    entry2 = vardraw.vars.get(var);
                    found2 = null;
                    for (int s = 0; s < entry2.getValue().size(); s++) {
                        if (entry2.getValue().get(s).getSample() == null) {
                            continue;
                        }
                        if (entry2.getValue().get(s).getSample().annotation) {
                            entry2.getValue().get(s).inheritance = true;
                            continue;
                        }
                        /*if(!entry2.getValue().get(s).inheritance) {
                           continue;
                        }*/
                        if (!entry2.getValue().get(s).getSample().affected) {
                            if (healthArray.contains(entry2.getValue().get(s).getSample())) {
                                found2 = true;
                                break;
                            } else {
                                found2 = false;
                            }
                        }
                    }

                    if (found2 != null && !found2) {
                        if (!gene.compounds.contains(vardraw)) {
                            gene.compounds.add(vardraw);
                        }
                        if (!gene.compounds.contains(gene.varnodes.get(i))) {
                            gene.compounds.add(gene.varnodes.get(i));
                        }
                        //System.out.println(gene.compounds.size());
                    }
                }
            }
        }
    }
    /*if(gene.samples.size() < FileRead.affected) {      
       return false;
    }*/

    if (gene.compounds.size() == 0) {
        return false;
    }

    return true;
}

From source file:com.slamd.admin.AdminServlet.java

/**
 * Handles the work of exporting information about multiple jobs.
 *
 * @param  requestInfo  The state information for this request.
 * @param  jobIDs       The job IDs of the jobs to export.
 *///w  ww  . ja v  a 2  s. c o m
static void handleMassExport(RequestInfo requestInfo, String[] jobIDs) {
    logMessage(requestInfo, "In handleMassExport()");

    // The user must have permission to export jobs to access this section.
    if (!requestInfo.mayExportJobData) {
        logMessage(requestInfo, "No mayViewJob permission granted");
        generateAccessDeniedBody(requestInfo, "You do not have permission to " + "export job information.");
        return;
    }

    // Get the important state variables for this request.
    HttpServletRequest request = requestInfo.request;
    String servletBaseURI = requestInfo.servletBaseURI;
    StringBuilder htmlBody = requestInfo.htmlBody;
    StringBuilder infoMessage = requestInfo.infoMessage;

    String category = request.getParameter(Constants.SERVLET_PARAM_VIEW_CATEGORY);

    // Make sure that at least one job ID was specified.  If not, then print
    // an appropriate error message and go back to viewing the completed jobs.
    if ((jobIDs == null) || (jobIDs.length < 1)) {
        infoMessage.append("You must specify at least one job to export.<BR>" + EOL);
        handleViewJob(requestInfo, category, null, null);
        return;
    }

    // Retrieve all of the jobs and see if they are all of the same type.  If
    // so, then we'll be able to be pretty specific about the kinds of data
    // that can be exported.  Otherwise, it will only be possible to choose
    // what gets exported in a more generic manner.
    ArrayList<ArrayList<Job>> jobTypeList = new ArrayList<ArrayList<Job>>();
    ArrayList<String> jobIDList = new ArrayList<String>();
    for (int i = 0; i < jobIDs.length; i++) {
        Job job = null;
        try {
            job = configDB.getJob(jobIDs[i]);
            if (job == null) {
                infoMessage.append("Unable to retrieve job " + jobIDs[i] + " -- job not found.<BR>" + EOL);
            } else {
                // Make sure that the job has statistics available.
                if (!job.hasStats()) {
                    infoMessage.append("Skipping job " + jobIDs[i] + " because it does not have any statistics "
                            + "available.<BR>" + EOL);
                    continue;
                }

                // Get the job type for this job and see if it is the same as any
                // of the other jobs that we have already seen.
                jobIDList.add(jobIDs[i]);
                boolean categorized = false;
                for (int j = 0; j < jobTypeList.size(); j++) {
                    ArrayList<Job> jobList = jobTypeList.get(j);
                    Job job2 = jobList.get(0);
                    if (job2.getJobClassName().equals(job.getJobClassName())) {
                        jobList.add(job);
                        categorized = true;
                        break;
                    }
                }
                if (!categorized) {
                    ArrayList<Job> jobList = new ArrayList<Job>();
                    jobList.add(job);
                    jobTypeList.add(jobList);
                }
            }
        } catch (Exception e) {
            infoMessage.append("Unable to retrieve job " + jobIDs[i] + " -- " + e + "<BR>" + EOL);
        }
    }

    // Make sure that at least one job was placed in the job list.
    if (jobTypeList.isEmpty()) {
        infoMessage.append("Unable to perform the requested comparison because "
                + "there was not at least one job meeting the " + "necessary criteria.<BR>" + EOL);
        handleViewJob(requestInfo, category, null, null);
        return;
    }

    jobIDs = new String[jobIDList.size()];
    jobIDList.toArray(jobIDs);

    // Sort the job information based on the actual start times for the jobs.
    // Since there should not be that many jobs to compare, and since it is
    // likely that they will already be sorted anyway, then a selection sort
    // should be the fastest and simplest way to do it.
    for (int i = 0; i < jobTypeList.size(); i++) {
        ArrayList<Job> jobList = jobTypeList.get(i);
        long[] startTimes = new long[jobList.size()];
        Job[] jobs = new Job[jobList.size()];
        for (int j = 0; j < jobs.length; j++) {
            jobs[j] = jobList.get(j);
            startTimes[j] = jobs[j].getActualStartTime().getTime();
        }
        for (int j = 0; j < jobs.length; j++) {
            int slot = -1;
            long minStartTime = startTimes[j];

            for (int k = j + 1; k < jobs.length; k++) {
                if (startTimes[k] < minStartTime) {
                    slot = k;
                    minStartTime = startTimes[k];
                }
            }

            if (slot > 0) {
                Job tempJob = jobs[slot];
                long tempStartTime = startTimes[slot];

                jobs[slot] = jobs[j];
                startTimes[slot] = startTimes[j];
                jobs[j] = tempJob;
                startTimes[j] = tempStartTime;
            }
        }

        jobList.clear();
        for (int j = 0; j < jobs.length; j++) {
            jobList.add(jobs[j]);
        }
    }

    // Determine whether the user has chosen the kinds of information to be
    // exported.
    String confirmedStr = request.getParameter(Constants.SERVLET_PARAM_CONFIRMED);

    if ((confirmedStr != null) && confirmedStr.equals(Constants.CONFIG_VALUE_TRUE)) {
        // Determine the kinds of information that will be included in the output.
        String value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_JOB_ID);
        boolean includeJobID = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_INCLUDE_LABELS);
        boolean includeLabels = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_DESCRIPTION);
        boolean includeDescription = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_START_TIME);
        boolean includeStartTime = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_STOP_TIME);
        boolean includeStopTime = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_DURATION);
        boolean includeDuration = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_CLIENTS);
        boolean includeClients = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_THREADS);
        boolean includeThreads = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_INTERVAL);
        boolean includeInterval = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_PARAMETERS);
        boolean includeAllParams = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_STATISTICS);
        boolean includeAllStats = (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                || value.equalsIgnoreCase("off")));

        ArrayList<Parameter> includeParameters = new ArrayList<Parameter>();
        ArrayList<StatTracker> includeStats = new ArrayList<StatTracker>();
        if (jobTypeList.size() == 1) {
            ArrayList<Job> jobList = jobTypeList.get(0);
            Job job = jobList.get(0);
            Parameter[] stubs = job.getParameterStubs().getParameters();
            for (int i = 0; i < stubs.length; i++) {
                value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_PARAM_PREFIX + stubs[i].getName());
                if (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                        || value.equalsIgnoreCase("off"))) {
                    includeParameters.add(stubs[i]);
                }
            }

            String[] trackerNames = job.getStatTrackerNames();
            for (int i = 0; i < trackerNames.length; i++) {
                value = request.getParameter(Constants.SERVLET_PARAM_EXPORT_STAT_PREFIX + trackerNames[i]);
                if (!((value == null) || value.equals("0") || value.equalsIgnoreCase("false")
                        || value.equalsIgnoreCase("off"))) {
                    // There must be at least one job with this stat tracker in order to
                    // export this information.
                    for (int j = 0; j < jobList.size(); j++) {
                        StatTracker[] trackers = jobList.get(j).getStatTrackers(trackerNames[i]);
                        if ((trackers != null) && (trackers.length > 0)) {
                            includeStats.add(trackers[0]);
                            break;
                        }
                    }
                }
            }
        }

        // Get the writer to use to send data to the client.
        PrintWriter writer = null;
        try {
            writer = requestInfo.response.getWriter();
        } catch (IOException ioe) {
            infoMessage.append("ERROR:  Unable to write the data -- " + ioe + "<BR>" + EOL);
            htmlBody.append(
                    "<SPAN CLASS=\"" + Constants.STYLE_MAIN_HEADER + "\">Error Saving Data</SPAN>" + EOL);
            htmlBody.append("<BR><BR>" + EOL);
            htmlBody.append("The attempt to save the data failed." + EOL);
            htmlBody.append("See the error message above for additional " + "information");
            return;
        }

        // Indicate that the output from this will not be HTML.
        requestInfo.generateHTML = false;
        requestInfo.response.setContentType("application/x-slamd-job-export");
        requestInfo.response.addHeader("Content-Disposition", "filename=\"slamd_job_export_data.txt\"");

        // Iterate through all the different kinds of jobs to include in the
        // export.
        for (int i = 0; i < jobTypeList.size(); i++) {
            // Get the job information as an array.
            ArrayList<Job> jobList = jobTypeList.get(i);
            Job[] jobs = new Job[jobList.size()];
            jobList.toArray(jobs);

            // If there are multiple types of jobs to retrieve, then get the types
            // of parameters to include for this particular job type.
            if (includeAllParams && (jobTypeList.size() > 1)) {
                includeParameters.clear();
                Parameter[] stubs = jobs[0].getParameterStubs().getParameters();
                for (int j = 0; j < stubs.length; j++) {
                    if (!(stubs[j] instanceof PlaceholderParameter)) {
                        includeParameters.add(stubs[j]);
                    }
                }
            }

            // If there are multiple types of jobs to retrieve, then get the types
            // of statistics to include for this particular job type.
            if (includeAllStats && (jobTypeList.size() > 1)) {
                includeStats.clear();
                String[] trackerNames = jobs[0].getStatTrackerNames();
                for (int j = 0; j < trackerNames.length; j++) {
                    for (int k = 0; k < jobs.length; k++) {
                        StatTracker[] trackers = jobs[k].getStatTrackers(trackerNames[j]);
                        if ((trackers != null) && (trackers.length > 0)) {
                            includeStats.add(trackers[0]);
                            break;
                        }
                    }
                }
            }

            // If labels are to be included in the export, then send them out.
            if (includeLabels) {
                writer.println(jobs[0].getJobName() + " Job Data");

                if (includeJobID) {
                    writer.print("Job ID\t");
                }

                if (includeDescription) {
                    writer.print("Description\t");
                }

                if (includeStartTime) {
                    writer.print("Start Time\t");
                }

                if (includeStopTime) {
                    writer.print("Stop Time\t");
                }

                if (includeDuration) {
                    writer.print("Duration\t");
                }

                if (includeClients) {
                    writer.print("Number of Clients\t");
                }

                if (includeThreads) {
                    writer.print("Threads per Client\t");
                }

                if (includeInterval) {
                    writer.print("Collection Interval\t");
                }

                for (int j = 0; j < includeParameters.size(); j++) {
                    Parameter p = includeParameters.get(i);
                    writer.print(p.getDisplayName() + '\t');
                }

                for (int j = 0; j < includeStats.size(); j++) {
                    StatTracker tracker = includeStats.get(j);
                    String[] trackerLabels = tracker.getSummaryLabels();
                    for (int k = 0; k < trackerLabels.length; k++) {
                        writer.print(trackerLabels[k] + '\t');
                    }
                }

                writer.println();
            }

            // Write out the requested information for each job.
            for (int j = 0; j < jobs.length; j++) {
                if (includeJobID) {
                    writer.print(jobs[j].getJobID() + '\t');
                }

                if (includeDescription) {
                    writer.print(jobs[j].getJobDescription() + '\t');
                }

                if (includeStartTime) {
                    String formattedTime = displayDateFormat.format(jobs[j].getActualStartTime());
                    writer.print(formattedTime + '\t');
                }

                if (includeStopTime) {
                    String formattedTime = displayDateFormat.format(jobs[j].getActualStopTime());
                    writer.print(formattedTime + '\t');
                }

                if (includeDuration) {
                    writer.print(jobs[j].getActualDuration() + "\t");
                }

                if (includeClients) {
                    writer.print(jobs[j].getNumberOfClients() + "\t");
                }

                if (includeThreads) {
                    writer.print(jobs[j].getThreadsPerClient() + "\t");
                }

                if (includeInterval) {
                    writer.print(jobs[j].getCollectionInterval() + "\t");
                }

                for (int k = 0; k < includeParameters.size(); k++) {
                    Parameter p = includeParameters.get(k);
                    Parameter q = jobs[i].getParameterList().getParameter(p.getName());
                    if (q == null) {
                        writer.print("\t");
                    } else {
                        writer.print(q.getValueString() + '\t');
                    }
                }

                for (int k = 0; k < includeStats.size(); k++) {
                    StatTracker tracker = includeStats.get(k);

                    StatTracker[] trackers = jobs[j].getStatTrackers(tracker.getDisplayName());
                    if ((trackers == null) || (trackers.length == 0)) {
                        for (int l = 0; l < tracker.getSummaryLabels().length; l++) {
                            writer.print("\t");
                        }
                    } else {
                        try {
                            StatTracker t = trackers[0].newInstance();
                            t.aggregate(trackers);
                            String[] values = t.getSummaryData();
                            for (int l = 0; l < values.length; l++) {
                                writer.print(values[l] + '\t');
                            }
                        } catch (Exception e) {
                            for (int l = 0; l < tracker.getSummaryLabels().length; l++) {
                                writer.print("\t");
                            }
                        }
                    }
                }

                writer.println();
            }

            writer.println();
        }
    } else {
        if (jobTypeList.size() == 1) {
            ArrayList<Job> jobList = jobTypeList.get(0);
            Job job = jobList.get(0);

            htmlBody.append("<SPAN CLASS=\"" + Constants.STYLE_MAIN_HEADER + "\">Export Multiple \""
                    + job.getJobName() + "\" Jobs</SPAN>" + EOL);
        } else {
            htmlBody.append("<SPAN CLASS=\"" + Constants.STYLE_MAIN_HEADER
                    + "\">Export Multiple SLAMD Jobs</SPAN>" + EOL);
        }

        htmlBody.append("<BR><BR>" + EOL);
        htmlBody.append(
                "Please select the information that should be included " + "in the job data export." + EOL);

        if (jobTypeList.size() > 1) {
            htmlBody.append("Note that because multiple job types were selected, "
                    + "it is not possible to choose the individual "
                    + "parameter and statistic types that can be exported." + EOL);
            htmlBody.append("Therefore, it is only possible to indicate whether "
                    + "to include all or no parameter information, and all " + "or no statistical information."
                    + EOL);
        }

        htmlBody.append("<BR><BR>" + EOL);

        htmlBody.append("<FORM METHOD=\"POST\" ACTION=\"" + servletBaseURI + "\" CLASS=\""
                + Constants.STYLE_MAIN_FORM + "\">" + EOL);
        htmlBody.append(
                "  " + generateHidden(Constants.SERVLET_PARAM_SECTION, Constants.SERVLET_SECTION_JOB) + EOL);
        htmlBody.append(
                "  " + generateHidden(Constants.SERVLET_PARAM_SUBSECTION, Constants.SERVLET_SECTION_JOB_MASS_OP)
                        + EOL);
        htmlBody.append(
                "  " + generateHidden(Constants.SERVLET_PARAM_SUBMIT, Constants.SUBMIT_STRING_EXPORT) + EOL);

        htmlBody.append(
                "  " + generateHidden(Constants.SERVLET_PARAM_CONFIRMED, Constants.CONFIG_VALUE_TRUE) + EOL);

        for (int i = 0; i < jobIDs.length; i++) {
            htmlBody.append("  " + generateHidden(Constants.SERVLET_PARAM_JOB_ID, jobIDs[i]) + EOL);
        }
        if (requestInfo.debugHTML) {
            htmlBody.append(generateHidden(Constants.SERVLET_PARAM_HTML_DEBUG, "1") + EOL);
        }

        htmlBody.append("  <B>Job Schedule Information to Export</B><BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_JOB_ID
                + "\" CHECKED>Job ID<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_DESCRIPTION
                + "\" CHECKED>Job Description<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_START_TIME
                + "\">Job Start Time<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_STOP_TIME
                + "\">Job Stop Time<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_DURATION
                + "\">Job Duration<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_CLIENTS
                + "\">Number of Clients<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_THREADS
                + "\">Number of Threads per Client<BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_INTERVAL
                + "\">Statistics Collection Interval<BR>" + EOL);

        htmlBody.append("  <BR><BR>" + EOL);
        htmlBody.append("  <B>Job Parameter Information to Export</B><BR>" + EOL);
        if (jobTypeList.size() == 1) {
            ArrayList<Job> jobList = jobTypeList.get(0);
            Job job = jobList.get(0);
            Parameter[] stubs = job.getParameterStubs().getParameters();
            for (int i = 0; i < stubs.length; i++) {
                if (!(stubs[i] instanceof PlaceholderParameter)) {
                    htmlBody.append(
                            "  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_PARAM_PREFIX
                                    + stubs[i].getName() + "\">" + stubs[i].getDisplayName() + "<BR>" + EOL);
                }
            }
        } else {
            htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_PARAMETERS
                    + "\">Export all parameters<BR>" + EOL);
        }

        htmlBody.append("  <BR><BR>" + EOL);
        htmlBody.append("  <B>Statistical Information to Export</B><BR>" + EOL);
        if (jobTypeList.size() == 1) {
            ArrayList<Job> jobList = jobTypeList.get(0);
            Job job = jobList.get(0);
            String[] trackerNames = job.getStatTrackerNames();
            for (int i = 0; i < trackerNames.length; i++) {
                htmlBody.append(
                        "  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_STAT_PREFIX
                                + trackerNames[i] + "\" CHECKED>" + trackerNames[i] + "<BR>" + EOL);
            }
        } else {
            htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_EXPORT_STATISTICS
                    + "\" CHECKED>Export all statistics<BR>" + EOL);
        }

        htmlBody.append("  <BR><BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"CHECKBOX\" NAME=\"" + Constants.SERVLET_PARAM_INCLUDE_LABELS
                + "\" CHECKED>Include Labels in Exported Data<BR>" + EOL);
        htmlBody.append("  <BR>" + EOL);
        htmlBody.append("  <INPUT TYPE=\"SUBMIT\" VALUE=\"Export Data\"><BR>" + EOL);

        htmlBody.append("</FORM>" + EOL);
    }
}

From source file:com.amazon.carbonado.repo.jdbc.JDBCStorableIntrospector.java

/**
 * Uses the given database connection to query database metadata. This is
 * used to bind storables to tables, and properties to columns. Other
 * checks are performed to ensure that storable type matches well with the
 * definition in the database.//from   w ww .j  a v a  2s. com
 */
private static <S extends Storable> JDBCStorableInfo<S> examine(StorableInfo<S> mainInfo, Connection con,
        final String searchCatalog, final String searchSchema, SchemaResolver resolver,
        boolean primaryKeyCheckDisabled) throws SQLException, SupportException {
    final DatabaseMetaData meta = con.getMetaData();

    final String databaseProductName = meta.getDatabaseProductName();
    final String userName = meta.getUserName();

    String[] tableAliases;
    if (mainInfo.getAliasCount() > 0) {
        tableAliases = mainInfo.getAliases();
    } else {
        String name = mainInfo.getStorableType().getSimpleName();
        tableAliases = generateAliases(name);
    }

    // Try to find matching table from aliases.
    String catalog = null, schema = null, tableName = null, tableType = null;
    findName: {
        // The call to getTables may return several matching tables. This
        // map defines the "best" table type we'd like to use. The higher
        // the number the better.
        Map<String, Integer> fitnessMap = new HashMap<String, Integer>();
        fitnessMap.put("LOCAL TEMPORARY", 1);
        fitnessMap.put("GLOBAL TEMPORARY", 2);
        fitnessMap.put("VIEW", 3);
        fitnessMap.put("SYSTEM TABLE", 4);
        fitnessMap.put("TABLE", 5);
        fitnessMap.put("ALIAS", 6);
        fitnessMap.put("SYNONYM", 7);

        for (int i = 0; i < tableAliases.length; i++) {
            ResultSet rs = meta.getTables(searchCatalog, searchSchema, tableAliases[i], null);
            try {
                int bestFitness = 0;
                while (rs.next()) {
                    String type = rs.getString("TABLE_TYPE");
                    Integer fitness = fitnessMap.get(type);
                    if (fitness != null) {
                        String rsSchema = rs.getString("TABLE_SCHEM");

                        if (searchSchema == null) {
                            if (userName != null && userName.equalsIgnoreCase(rsSchema)) {
                                // Favor entities whose schema name matches
                                // the user name.
                                fitness += 7;
                            }
                        }

                        if (fitness > bestFitness) {
                            bestFitness = fitness;
                            catalog = rs.getString("TABLE_CAT");
                            schema = rsSchema;
                            tableName = rs.getString("TABLE_NAME");
                            tableType = type;
                        }
                    }
                }
            } finally {
                rs.close();
            }

            if (tableName != null) {
                // Found a match, so stop checking aliases.
                break;
            }
        }
    }

    if (tableName == null && !mainInfo.isIndependent()) {
        StringBuilder buf = new StringBuilder();
        buf.append("Unable to find matching table name for type \"");
        buf.append(mainInfo.getStorableType().getName());
        buf.append("\" by looking for ");
        appendToSentence(buf, tableAliases);
        buf.append(" with catalog " + searchCatalog + " and schema " + searchSchema);
        throw new MismatchException(buf.toString());
    }

    String qualifiedTableName = tableName;
    String resolvedTableName = tableName;

    // Oracle specific stuff...
    // TODO: Migrate this to OracleSupportStrategy.
    if (tableName != null && databaseProductName.toUpperCase().contains("ORACLE")) {
        if ("TABLE".equals(tableType) && searchSchema != null) {
            // Qualified table name references the schema. Used by SQL statements.
            qualifiedTableName = searchSchema + '.' + tableName;
        } else if ("SYNONYM".equals(tableType)) {
            // Try to get the real schema. This call is Oracle specific, however.
            String select = "SELECT TABLE_OWNER,TABLE_NAME " + "FROM ALL_SYNONYMS "
                    + "WHERE OWNER=? AND SYNONYM_NAME=?";
            PreparedStatement ps = con.prepareStatement(select);
            ps.setString(1, schema); // in Oracle, schema is the owner
            ps.setString(2, tableName);
            try {
                ResultSet rs = ps.executeQuery();
                try {
                    if (rs.next()) {
                        schema = rs.getString("TABLE_OWNER");
                        resolvedTableName = rs.getString("TABLE_NAME");
                    }
                } finally {
                    rs.close();
                }
            } finally {
                ps.close();
            }
        }
    }

    // Gather information on all columns such that metadata only needs to
    // be retrieved once.
    Map<String, ColumnInfo> columnMap = new TreeMap<String, ColumnInfo>(String.CASE_INSENSITIVE_ORDER);

    if (resolvedTableName != null) {
        ResultSet rs = meta.getColumns(catalog, schema, resolvedTableName, null);
        rs.setFetchSize(1000);
        try {
            while (rs.next()) {
                ColumnInfo info = new ColumnInfo(rs);
                columnMap.put(info.columnName, info);
            }
        } finally {
            rs.close();
        }
    }

    // Make sure that all properties have a corresponding column.
    Map<String, ? extends StorableProperty<S>> mainProperties = mainInfo.getAllProperties();
    Map<String, String> columnToProperty = new HashMap<String, String>();
    Map<String, JDBCStorableProperty<S>> jProperties = new LinkedHashMap<String, JDBCStorableProperty<S>>(
            mainProperties.size());

    ArrayList<String> errorMessages = new ArrayList<String>();

    for (StorableProperty<S> mainProperty : mainProperties.values()) {
        if (mainProperty.isDerived() || mainProperty.isJoin() || tableName == null) {
            jProperties.put(mainProperty.getName(), new JProperty<S>(mainProperty, primaryKeyCheckDisabled));
            continue;
        }

        String[] columnAliases;
        if (mainProperty.getAliasCount() > 0) {
            columnAliases = mainProperty.getAliases();
        } else {
            columnAliases = generateAliases(mainProperty.getName());
        }

        JDBCStorableProperty<S> jProperty = null;
        boolean addedError = false;

        findName: for (int i = 0; i < columnAliases.length; i++) {
            ColumnInfo columnInfo = columnMap.get(columnAliases[i]);
            if (columnInfo != null) {
                AccessInfo accessInfo = getAccessInfo(mainProperty, columnInfo.dataType,
                        columnInfo.dataTypeName, columnInfo.columnSize, columnInfo.decimalDigits);

                if (accessInfo == null) {
                    TypeDesc propertyType = TypeDesc.forClass(mainProperty.getType());
                    String message = "Property \"" + mainProperty.getName() + "\" has type \""
                            + propertyType.getFullName() + "\" which is incompatible with database type \""
                            + columnInfo.dataTypeName + '"';

                    if (columnInfo.decimalDigits > 0) {
                        message += " (decimal digits = " + columnInfo.decimalDigits + ')';
                    }

                    errorMessages.add(message);
                    addedError = true;
                    break findName;
                }

                if (columnInfo.nullable) {
                    if (!mainProperty.isNullable() && !mainProperty.isIndependent()) {
                        errorMessages.add(
                                "Property \"" + mainProperty.getName() + "\" must have a Nullable annotation");
                    }
                } else {
                    if (mainProperty.isNullable() && !mainProperty.isIndependent()) {
                        errorMessages.add("Property \"" + mainProperty.getName()
                                + "\" must not have a Nullable annotation");
                    }
                }

                boolean autoIncrement = mainProperty.isAutomatic();
                if (autoIncrement) {
                    // Need to execute a little query to check if column is
                    // auto-increment or not. This information is not available in
                    // the regular database metadata prior to jdk1.6.

                    PreparedStatement ps = con.prepareStatement(
                            "SELECT " + columnInfo.columnName + " FROM " + tableName + " WHERE 1=0");

                    try {
                        ResultSet rs = ps.executeQuery();
                        try {
                            autoIncrement = rs.getMetaData().isAutoIncrement(1);
                        } finally {
                            rs.close();
                        }
                    } finally {
                        ps.close();
                    }
                }

                jProperty = new JProperty<S>(mainProperty, columnInfo, autoIncrement, primaryKeyCheckDisabled,
                        accessInfo.mResultSetGet, accessInfo.mPreparedStatementSet, accessInfo.getAdapter());

                break findName;
            }
        }

        if (jProperty != null) {
            jProperties.put(mainProperty.getName(), jProperty);
            columnToProperty.put(jProperty.getColumnName(), jProperty.getName());
        } else {
            if (mainProperty.isIndependent()) {
                jProperties.put(mainProperty.getName(),
                        new JProperty<S>(mainProperty, primaryKeyCheckDisabled));
            } else if (!addedError) {
                StringBuilder buf = new StringBuilder();
                buf.append("Unable to find matching database column for property \"");
                buf.append(mainProperty.getName());
                buf.append("\" by looking for ");
                appendToSentence(buf, columnAliases);
                errorMessages.add(buf.toString());
            }
        }
    }

    if (errorMessages.size() > 0) {
        throw new MismatchException(mainInfo.getStorableType(), errorMessages);
    }

    // Now verify that primary or alternate keys match.

    if (resolvedTableName != null)
        checkPrimaryKey: {
            ResultSet rs;
            try {
                rs = meta.getPrimaryKeys(catalog, schema, resolvedTableName);
            } catch (SQLException e) {
                getLog().info("Unable to get primary keys for table \"" + resolvedTableName + "\" with catalog "
                        + catalog + " and schema " + schema + ": " + e);
                break checkPrimaryKey;
            }

            List<String> pkProps = new ArrayList<String>();

            try {
                while (rs.next()) {
                    String columnName = rs.getString("COLUMN_NAME");
                    String propertyName = columnToProperty.get(columnName);

                    if (propertyName == null) {
                        errorMessages
                                .add("Column \"" + columnName + "\" must be part of primary or alternate key");
                        continue;
                    }

                    pkProps.add(propertyName);
                }
            } finally {
                rs.close();
            }

            if (errorMessages.size() > 0) {
                // Skip any extra checks.
                break checkPrimaryKey;
            }

            if (pkProps.size() == 0) {
                // If no primary keys are reported, don't even bother checking.
                // There's no consistent way to get primary keys, and entities
                // like views and synonyms don't usually report primary keys.
                // A primary key might even be logically defined as a unique
                // constraint.
                break checkPrimaryKey;
            }

            if (matchesKey(pkProps, mainInfo.getPrimaryKey())) {
                // Good. Primary key in database is same as in Storable.
                break checkPrimaryKey;
            }

            // Check if Storable has an alternate key which matches the
            // database's primary key.
            boolean foundAnyAltKey = false;
            for (StorableKey<S> altKey : mainInfo.getAlternateKeys()) {
                if (matchesKey(pkProps, altKey)) {
                    // Okay. Primary key in database matches a Storable
                    // alternate key.
                    foundAnyAltKey = true;

                    // Also check that declared primary key is a strict subset
                    // of the alternate key. If not, keep checking alt keys.

                    if (matchesSubKey(pkProps, mainInfo.getPrimaryKey())) {
                        break checkPrimaryKey;
                    }
                }
            }

            if (foundAnyAltKey) {
                errorMessages.add("Actual primary key matches a declared alternate key, "
                        + "but declared primary key must be a strict subset. "
                        + mainInfo.getPrimaryKey().getProperties() + " is not a subset of " + pkProps);
            } else {
                errorMessages.add("Actual primary key does not match any "
                        + "declared primary or alternate key: " + pkProps);
            }
        }

    if (errorMessages.size() > 0) {
        if (primaryKeyCheckDisabled) {
            for (String errorMessage : errorMessages) {
                getLog().warn("Suppressed error: " + errorMessage);
            }
            errorMessages.clear();
        } else {
            throw new MismatchException(mainInfo.getStorableType(), errorMessages);
        }
    }

    // IndexInfo is empty, as querying for it tends to cause a table analyze to run.
    IndexInfo[] indexInfo = new IndexInfo[0];

    if (needsQuotes(tableName)) {
        String quote = meta.getIdentifierQuoteString();
        if (quote != null && !quote.equals(" ")) {
            tableName = quote + tableName + quote;
            qualifiedTableName = quote + qualifiedTableName + quote;
        }
    }

    return new JInfo<S>(mainInfo, catalog, schema, tableName, qualifiedTableName, indexInfo, jProperties);
}