Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:edu.umass.cs.gnsserver.installer.EC2Runner.java

/**
 * Starts a set of EC2 hosts running GNS that we call a runset.
 *
 * @param runSetName//from   ww w . j av a2 s .co  m
 */
public static void createRunSetMulti(String runSetName) {
    int timeout = AWSEC2.DEFAULTREACHABILITYWAITTIME;
    System.out.println("EC2 User Name: " + ec2UserName);
    System.out.println("AMI Name: " + amiRecordType.toString());
    System.out.println("Datastore: " + dataStoreType.toString());
    //preferences.put(RUNSETNAME, runSetName); // store the last one
    startAllMonitoringAndGUIProcesses();
    attachShutDownHook(runSetName);
    ArrayList<Thread> threads = new ArrayList<Thread>();
    // use threads to do a bunch of installs in parallel
    do {
        hostsThatDidNotStart.clear();
        //StatusModel.getInstance().queueDeleteAllEntries(); // for gui
        int cnt = STARTINGNODENUMBER;
        for (EC2RegionSpec regionSpec : regionsList) {
            int i;
            for (i = 0; i < regionSpec.getCount(); i++) {
                threads.add(new EC2RunnerThread(runSetName, regionSpec.getRegion(), Integer.toString(cnt),
                        i == 0 ? regionSpec.getIp() : null, timeout));
                cnt = cnt + 1;
            }
        }
        for (Thread thread : threads) {
            thread.start();
        }
        // and wait for all of them to complete
        try {
            for (Thread thread : threads) {
                thread.join();
            }
        } catch (InterruptedException e) {
            System.out.println("Problem joining threads: " + e);
        }

        if (!hostsThatDidNotStart.isEmpty()) {
            System.out.println("Hosts that did not start: " + hostsThatDidNotStart.keySet());
            timeout = (int) ((float) timeout * 1.5);
            System.out.println("Maybe kill them all and try again with timeout " + timeout + "ms?");
            if (showDialog("Hosts that did not start: " + hostsThatDidNotStart.keySet()
                    + "\nKill them all and try again with with timeout " + timeout + "ms?"
                    + "\nIf you don't respond in 10 seconds this will happen.", 10000)) {
                System.out.println("Yes, kill them all and try again with timeout " + timeout + "ms.");
                terminateRunSet(runSetName);
            } else {
                terminateRunSet(runSetName);
                System.out.println("No, kill them all and quit.");
                return;
            }
        }

        threads.clear();

        // keep repeating until everything starts
    } while (!hostsThatDidNotStart.isEmpty());

    // got a complete set running... now on to step 2
    System.out.println(hostTable.toString());
    // after we know all the hosts are we run the last part

    System.out.println("Hosts that did not start: " + hostsThatDidNotStart.keySet());
    // write out a config file that the GNS installer can use for this set of EC2 hosts
    writeGNSINstallerConf(configName);
    removeShutDownHook();
    System.out.println("Finished creation of Run Set " + runSetName);
}

From source file:de.ub0r.android.callmeter.data.RuleMatcher.java

/**
 * Match all unmatched logs./* ww  w. ja va 2 s. c o  m*/
 *
 * @param context    {@link Context}
 * @param showStatus post status to dialog/handler
 * @return true if a log was matched
 */
static synchronized boolean match(final Context context, final boolean showStatus) {
    Log.d(TAG, "match(ctx, ", showStatus, ")");
    long start = System.currentTimeMillis();
    boolean ret = false;
    load(context);
    final ContentResolver cr = context.getContentResolver();
    final Cursor cursor = cr.query(DataProvider.Logs.CONTENT_URI, DataProvider.Logs.PROJECTION,
            DataProvider.Logs.PLAN_ID + " = " + DataProvider.NO_ID, null, DataProvider.Logs.DATE + " ASC");
    if (cursor != null && cursor.moveToFirst()) {
        final int l = cursor.getCount();
        Handler h;
        if (showStatus) {
            h = Plans.getHandler();
            if (h != null) {
                final Message m = h.obtainMessage(Plans.MSG_BACKGROUND_PROGRESS_MATCHER);
                m.arg1 = 0;
                m.arg2 = l;
                m.sendToTarget();
            }
        }
        try {
            ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
            int i = 1;
            do {
                ret |= matchLog(cr, ops, cursor);
                if (i % PROGRESS_STEPS == 0 || (i < PROGRESS_STEPS && i % CallMeter.TEN == 0)) {
                    h = Plans.getHandler();
                    if (h != null) {
                        final Message m = h.obtainMessage(Plans.MSG_BACKGROUND_PROGRESS_MATCHER);
                        m.arg1 = i;
                        m.arg2 = l;
                        Log.d(TAG, "send progress: ", i, "/", l);
                        m.sendToTarget();
                    } else {
                        Log.d(TAG, "send progress: ", i, " handler=null");
                    }
                    Log.d(TAG, "save logs..");
                    cr.applyBatch(DataProvider.AUTHORITY, ops);
                    ops.clear();
                    Log.d(TAG, "sleeping..");
                    try {
                        Thread.sleep(CallMeter.MILLIS);
                    } catch (InterruptedException e) {
                        Log.e(TAG, "sleep interrupted", e);
                    }
                    Log.d(TAG, "sleep finished");
                }
                ++i;
            } while (cursor.moveToNext());
            if (ops.size() > 0) {
                cr.applyBatch(DataProvider.AUTHORITY, ops);
            }
        } catch (IllegalStateException e) {
            Log.e(TAG, "illegal state in RuleMatcher's loop", e);
        } catch (OperationApplicationException e) {
            Log.e(TAG, "illegal operation in RuleMatcher's loop", e);
        } catch (RemoteException e) {
            Log.e(TAG, "remote exception in RuleMatcher's loop", e);
        }
    }
    try {
        if (cursor != null && !cursor.isClosed()) {
            cursor.close();
        }
    } catch (IllegalStateException e) {
        Log.e(TAG, "illegal state while closing cursor", e);
    }

    if (ret) {
        final SharedPreferences p = PreferenceManager.getDefaultSharedPreferences(context);
        final boolean a80 = p.getBoolean(Preferences.PREFS_ALERT80, true);
        final boolean a100 = p.getBoolean(Preferences.PREFS_ALERT100, true);
        // check for alerts
        if ((a80 || a100) && plans != null && plans.size() > 0) {
            final long now = System.currentTimeMillis();
            int alert = 0;
            Plan alertPlan = null;
            int l = plans.size();
            for (int i = 0; i < l; i++) {
                final Plan plan = plans.valueAt(i);
                if (plan == null) {
                    continue;
                }
                if (plan.nextAlert > now) {
                    Log.d(TAG, "%s: skip alert until: %d now=%d", plan, plan.nextAlert, now);
                    continue;
                }
                int used = DataProvider.Plans.getUsed(plan.type, plan.limitType, plan.billedAmount,
                        plan.billedCost);
                int usedRate = plan.limit > 0 ? (int) ((used * CallMeter.HUNDRED) / plan.limit) : 0;
                if (a100 && usedRate >= CallMeter.HUNDRED) {
                    alert = usedRate;
                    alertPlan = plan;
                } else if (a80 && alert < CallMeter.EIGHTY && usedRate >= CallMeter.EIGHTY) {
                    alert = usedRate;
                    alertPlan = plan;
                }
            }
            if (alert > 0) {
                final NotificationManager mNotificationMgr = (NotificationManager) context
                        .getSystemService(Context.NOTIFICATION_SERVICE);
                final String t = String.format(context.getString(R.string.alerts_message), alertPlan.name,
                        alert);
                NotificationCompat.Builder b = new NotificationCompat.Builder(context);
                b.setSmallIcon(android.R.drawable.stat_notify_error);
                b.setTicker(t);
                b.setWhen(now);
                b.setContentTitle(context.getString(R.string.alerts_title));
                b.setContentText(t);
                Intent i = new Intent(context, Plans.class);
                i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
                i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                b.setContentIntent(PendingIntent.getActivity(context, 0, i, PendingIntent.FLAG_CANCEL_CURRENT));
                mNotificationMgr.notify(0, b.build());
                // set nextAlert to beginning of next day
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.DAY_OF_MONTH, 1);
                cal.set(Calendar.HOUR_OF_DAY, 0);
                cal.set(Calendar.MINUTE, 0);
                cal.set(Calendar.SECOND, 0);
                cal.set(Calendar.MILLISECOND, 0);
                alertPlan.nextAlert = cal.getTimeInMillis();
                final ContentValues cv = new ContentValues();
                cv.put(DataProvider.Plans.NEXT_ALERT, alertPlan.nextAlert);
                cr.update(DataProvider.Plans.CONTENT_URI, cv, DataProvider.Plans.ID + " = ?",
                        new String[] { String.valueOf(alertPlan.id) });
            }
        }
    }
    long end = System.currentTimeMillis();
    Log.i(TAG, "match(): ", end - start, "ms");
    return ret;
}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static SampleData interpretCSV(StatsConfig config, List<String[]> rows, ReportTypes pipelineType,
        HashMap<String, Integer> pipelineMetadata) {

    HashMap<Integer, ArrayList<ArrayList<Double>>> samplesPerMeasure = new HashMap<Integer, ArrayList<ArrayList<Double>>>();

    //Only remove first line if it is a header line
    if (rows.size() > 0 && rows.get(0)[6].equals("IsBaseline")) {
        rows.remove(0);/*from ww  w.j  a  v a  2 s  . c om*/
    }

    if (rows.size() > 1) {

        logger.log(Level.INFO, "Extracting samples and metadata from imported data.");
        int selectBestN = config.getSelectBestN();
        String selectByMeasure = config.getSelectByMeasure();

        // Preprocessing: Parse different models (classifier + feature set column) and measures
        ArrayList<String> measures = new ArrayList<String>();
        ArrayList<Pair<String, String>> datasets = new ArrayList<Pair<String, String>>();
        ArrayList<Pair<String, String>> models = new ArrayList<Pair<String, String>>();
        ArrayList<Pair<String, String>> baselineModels = new ArrayList<Pair<String, String>>();

        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            String classifier = columns[2];
            if (classifier.equals("0")) {
                classifier = "Aggregated";
            }
            String featureSets = columns[3];
            Pair<String, String> model = Pair.of(classifier, featureSets);
            if (!models.contains(model)) {
                models.add(model);
                if (!baselineModels.contains(model) && Integer.parseInt(columns[6]) == 1) {
                    baselineModels.add(model);
                }
            }
            if (!measures.contains(columns[4])) {
                measures.add(columns[4]);
            }
        }

        //Check: Baseline only allowed when > 2 models are evaluated
        if (models.size() <= 2 && baselineModels.size() > 0) {
            logger.log(Level.WARN,
                    "At least three models are required to make an evaluation against a baseline meaningful. In the dataset, a baseline was specified for only two models. The baseline indicator will be ignored.");
            System.err.println(
                    "At least three models are required to make an evaluation against a baseline meaningful. In the dataset, a baseline was specified for only two models. The baseline indicator will be ignored.");
            baselineModels.clear();
        }

        // Now sort samples according to data
        Collections.sort(rows, new Helpers.LexicographicArrayComparator());
        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            Pair<String, String> data = null;
            String trainData = columns[0].trim();
            String testData = columns[1].trim();

            //If this is a CV, numbers after a dot indicate fold UUIDS, they thus have to be splitted to retain the original dataset name
            if (pipelineType == ReportTypes.CV) {
                trainData = trainData.split("\\.")[0];
                testData = testData.split("\\.")[0];
            }

            if (trainData.equals(testData)) {
                data = Pair.of(trainData, null);
            } else {
                //columns[1] = columns[1].split(".")[0];
                data = Pair.of(trainData, testData);
            }
            if (!datasets.contains(data)) {
                datasets.add(data);
            }
        }

        // Preprocessing: Initialize sample container per measure/model
        for (int i = 0; i < measures.size(); i++) {
            ArrayList<ArrayList<Double>> samplesPerModel = new ArrayList<ArrayList<Double>>();
            for (int j = 0; j < models.size(); j++) {
                samplesPerModel.add(new ArrayList<Double>());
            }
            samplesPerMeasure.put(i, samplesPerModel);
        }

        // Assign samples to different models
        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            String classifier = columns[2];
            if (classifier.equals("0")) {
                classifier = "Aggregated";
            }
            String featureSet = columns[3];
            String measure = columns[4];
            double value = Double.parseDouble(columns[5]);

            int measureIndex = measures.indexOf(measure);
            int modelIndex = models.indexOf(Pair.of(classifier, featureSet));

            ArrayList<ArrayList<Double>> sPMeasure = samplesPerMeasure.get(measureIndex);
            sPMeasure.get(modelIndex).add(value);
        }

        // Transform into data format required by the statistical evaluation
        HashMap<String, ArrayList<ArrayList<Double>>> indexedSamples = new HashMap<String, ArrayList<ArrayList<Double>>>();
        HashMap<String, ArrayList<Double>> indexedSamplesAverage = new HashMap<String, ArrayList<Double>>();

        Iterator<Integer> it = samplesPerMeasure.keySet().iterator();
        while (it.hasNext()) {
            int measureIndex = it.next();
            ArrayList<ArrayList<Double>> samplesPerModel = samplesPerMeasure.get(measureIndex);

            ArrayList<Double> sampleAverages = new ArrayList<Double>(models.size());
            for (int modelIndex = 0; modelIndex < models.size(); modelIndex++) {
                ArrayList<Double> sample = samplesPerModel.get(modelIndex);
                double average = 0;
                for (int j = 0; j < sample.size(); j++) {
                    average += sample.get(j);
                }
                average /= sample.size();
                sampleAverages.add(average);
            }
            indexedSamplesAverage.put(measures.get(measureIndex), sampleAverages);
            indexedSamples.put(measures.get(measureIndex), samplesPerMeasure.get(measureIndex));
        }

        // Check if data fulfills general requirements: > 5 samples for each model, same number of samples per model
        it = samplesPerMeasure.keySet().iterator();
        while (it.hasNext()) {
            Integer measureIndex = it.next();
            ArrayList<ArrayList<Double>> samplesPerModel = samplesPerMeasure.get(measureIndex);
            int s = samplesPerModel.get(0).size();

            for (int i = 1; i < samplesPerModel.size(); i++) {
                if (samplesPerModel.get(i).size() < 5) {
                    logger.log(Level.ERROR, "More than 5 samples are needed per model and measure. Aborting.");
                    System.err.println("More than 5 samples are needed per model and measure. Aborting.");
                    System.exit(1);
                }
                if (samplesPerModel.get(i).size() != s) {
                    logger.log(Level.ERROR,
                            "Different models are not represented by the same number of samples. Aborting.");
                    System.err.println(
                            "Different models are not represented by the same number of samples. Aborting.");
                    System.exit(1);
                }
            }
        }

        // Collect remaining data required for creating a SampleData object
        // Check if data fulfills requirements of the specific PipelineTypes
        int nFolds = 1;
        int nRepetitions = 1;
        switch (pipelineType) {
        case CV:
            if (datasets.size() > 1) {
                System.err.println(
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                return null;
            } else if (datasets.get(0).getValue() != null) {
                System.err.println(
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                return null;
            }
            nFolds = indexedSamples.get(measures.get(0)).get(0).size();
            nRepetitions = 1;
            break;
        case MULTIPLE_CV:
            if (datasets.size() > 1) {
                System.err.println(
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                return null;
            } else if (datasets.get(0).getValue() != null) {
                System.err.println(
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                return null;
            }
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = indexedSamples.get(measures.get(0)).get(0).size();
            break;
        case CV_DATASET_LVL:
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = 1;
            break;
        case MULTIPLE_CV_DATASET_LVL:
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = pipelineMetadata.get("nRepetitions");
            break;
        case TRAIN_TEST_DATASET_LVL:
            nFolds = 1;
            nRepetitions = 1;
            break;
        default:
            System.err.println("Unknown PipelineType. Aborting.");
            logger.log(Level.ERROR, "Unknown PipelineType. Aborting.");
            return null;
        }

        //Reorder data in case of a baseline evaluation (baseline first)
        if (baselineModels.size() == 1) {
            Pair<String, String> baselineModel = baselineModels.get(0);
            int modelIndex = models.indexOf(baselineModel);
            models.remove(modelIndex);
            models.add(0, baselineModel);
            for (String measure : indexedSamples.keySet()) {
                ArrayList<Double> s = indexedSamples.get(measure).get(modelIndex);
                indexedSamples.get(measure).remove(modelIndex);
                indexedSamples.get(measure).add(0, s);
                double a = indexedSamplesAverage.get(measure).get(modelIndex);
                indexedSamplesAverage.get(measure).remove(modelIndex);
                indexedSamplesAverage.get(measure).add(0, a);
            }
        }

        SampleData sampleData = new SampleData(null, indexedSamples, indexedSamplesAverage, datasets, models,
                baselineModels, pipelineType, nFolds, nRepetitions);
        sampleData = Helpers.truncateData(sampleData, selectBestN, selectByMeasure);

        return sampleData;
    }
    return null;
}

From source file:com.hichinaschool.flashcards.async.DeckTask.java

private TaskData doInBackgroundUpdateCardBrowserList(TaskData... params) {
    // Log.i(AnkiDroidApp.TAG, "doInBackgroundSortCards");
    if (params.length == 1) {
        Comparator comparator = params[0].getComparator();
        ArrayList<HashMap<String, String>> card = params[0].getCards();
        Collections.sort(card, comparator);
    } else {/*  ww w  .j a  v  a  2 s  . com*/
        ArrayList<HashMap<String, String>> allCard = params[0].getCards();
        ArrayList<HashMap<String, String>> cards = params[1].getCards();
        cards.clear();
        HashSet<String> tags = new HashSet<String>();
        for (String s : (HashSet<String>) params[2].getObjArray()[0]) {
            tags.add(s.toLowerCase());
        }
        for (int i = 0; i < allCard.size(); i++) {
            HashMap<String, String> card = allCard.get(i);
            if (Arrays.asList(card.get("tags").toLowerCase().trim().split("\\s")).containsAll(tags)) {
                cards.add(allCard.get(i));
            }
        }
    }
    return null;
}

From source file:org.apache.hadoop.hive.hwi.HWISessionItem.java

/**
 * runQuery iterates the list of queries executing each query.
 *///from   w ww.  j av a 2  s  .  c o  m
public void runQuery() {
    FileOutputStream fos = null;
    if (getResultFile() != null) {
        try {
            fos = new FileOutputStream(new File(resultFile));
            ss.out = new PrintStream(fos, true, "UTF-8");
        } catch (java.io.FileNotFoundException fex) {
            l4j.error(getSessionName() + " opening resultfile " + resultFile, fex);
        } catch (java.io.UnsupportedEncodingException uex) {
            l4j.error(getSessionName() + " opening resultfile " + resultFile, uex);
        }
    } else {
        l4j.debug(getSessionName() + " Output file was not specified");
    }
    l4j.debug(getSessionName() + " state is now QUERY_RUNNING.");
    status = WebSessionItemStatus.QUERY_RUNNING;

    // expect one return per query
    queryRet = new ArrayList<Integer>(queries.size());
    for (int i = 0; i < queries.size(); i++) {
        String cmd = queries.get(i);
        String cmd_trimmed = cmd.trim();
        String[] tokens = cmd_trimmed.split("\\s+");
        String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
        CommandProcessor proc = null;
        try {
            proc = CommandProcessorFactory.get(tokens[0]);
        } catch (SQLException e) {
            l4j.error(getSessionName() + " error processing " + cmd, e);
        }
        if (proc != null) {
            if (proc instanceof Driver) {
                Driver qp = (Driver) proc;
                qp.setTryCount(Integer.MAX_VALUE);
                try {
                    queryRet.add(Integer.valueOf(qp.run(cmd).getResponseCode()));
                    ArrayList<String> res = new ArrayList<String>();
                    try {
                        while (qp.getResults(res)) {
                            ArrayList<String> resCopy = new ArrayList<String>();
                            resCopy.addAll(res);
                            resultBucket.add(resCopy);
                            if (resultBucket.size() > resultBucketMaxSize) {
                                resultBucket.remove(0);
                            }
                            for (String row : res) {
                                if (ss != null) {
                                    if (ss.out != null) {
                                        ss.out.println(row);
                                    }
                                } else {
                                    throw new RuntimeException("ss was null");
                                }
                            }
                            res.clear();
                        }

                    } catch (IOException ex) {
                        l4j.error(
                                getSessionName() + " getting results " + getResultFile() + " caused exception.",
                                ex);
                    }
                } catch (CommandNeedRetryException e) {
                    // this should never happen since we Driver.setTryCount(Integer.MAX_VALUE)
                    l4j.error(getSessionName() + " Exception when executing", e);
                } finally {
                    qp.close();
                }
            } else {
                try {
                    queryRet.add(Integer.valueOf(proc.run(cmd_1).getResponseCode()));
                } catch (CommandNeedRetryException e) {
                    // this should never happen if there is no bug
                    l4j.error(getSessionName() + " Exception when executing", e);
                }
            }
        } else {
            // processor was null
            l4j.error(getSessionName() + " query processor was not found for query " + cmd);
        }
    } // end for

    // cleanup
    try {
        if (fos != null) {
            fos.close();
        }
    } catch (IOException ex) {
        l4j.error(getSessionName() + " closing result file " + getResultFile() + " caused exception.", ex);
    }
    status = WebSessionItemStatus.READY;
    l4j.debug(getSessionName() + " state is now READY");
    synchronized (runnable) {
        runnable.notifyAll();
    }
}

From source file:com.flexible.flexibleadapter.common.FlexibleItemAnimator.java

private void runAddAnimation(boolean removalsPending, boolean changesPending, boolean movesPending,
        boolean additionsPending) {
    if (additionsPending) {
        final ArrayList<ViewHolder> additions = new ArrayList<>();
        // Sorting addition animations based on it's original layout position
        Collections.sort(mPendingAdditions, new Comparator<ViewHolder>() {
            @Override// w w  w .  j  ava2s.c o m
            public int compare(ViewHolder vh1, ViewHolder vh2) {
                return vh1.getLayoutPosition() - vh2.getLayoutPosition();
            }
        });
        additions.addAll(mPendingAdditions);
        mAdditionsList.add(additions);
        mPendingAdditions.clear();
        Runnable adder = new Runnable() {
            public void run() {
                int index = 0;
                for (ViewHolder holder : additions) {
                    doAnimateAdd(holder, index++);
                }
                additions.clear();
                mAdditionsList.remove(additions);
            }
        };
        if (removalsPending || movesPending || changesPending) {
            long removeDuration = removalsPending ? getRemoveDuration() : 0;
            long moveDuration = movesPending ? getMoveDuration() : 0;
            long changeDuration = changesPending ? getChangeDuration() : 0;
            long totalDelay = removeDuration + Math.max(moveDuration, changeDuration);
            View view = additions.get(0).itemView;
            ViewCompat.postOnAnimationDelayed(view, adder, totalDelay);
        } else {
            adder.run();
        }
    }
}

From source file:civilisation.individu.Humain.java

/**
 * @param tortues/*from ww w .  j  a v  a  2s .co  m*/
 * @return la turtle la plus eloign_______________________________________ de la tortue appelante
 */
@SuppressWarnings("null")
public Turtle MaxOneOf(ArrayList<Turtle> tortues) {
    ArrayList<Turtle> choix = new ArrayList<Turtle>();
    double min = 0;
    for (int i = 0; i < choix.size(); i++) {
        int x = tortues.get(i).xcor();
        int y = tortues.get(i).ycor();
        if (this.distance(x, y) > min) {
            min = this.distance(x, y);
            choix.clear();
            choix.add(tortues.get(i));
        }
        if (this.distance(x, y) == min) {
            choix.add(tortues.get(i));
        }
    }
    return this.oneOf(choix);
}

From source file:com.wagos.calendarcard.RecyclePagerAdapter.java

@Override
public final void finishUpdate(ViewGroup container) {
    ArrayList<View> recycledViews = new ArrayList<>();

    // Remove views backing destroyed items from the specified container,
    // and queue them for recycling.
    for (int i = 0; destroyedItems.size() > 0 && i < container.getChildCount(); i++) {
        View v = container.getChildAt(i);
        Iterator<Object> it = destroyedItems.iterator();
        while (it.hasNext()) {
            if (isViewFromObject(v, it.next())) {
                container.removeView(v);
                recycledViews.add(v);/*ww w.  j  av a  2  s  .c  om*/
                it.remove();
                break;
            }
        }
    }

    // Render views and attach them to the container. Page views are reused
    // whenever possible.
    for (Object instantiatedItem : instantiatedItems) {
        View convertView = null;
        if (recycledViews.size() > 0)
            convertView = recycledViews.remove(0);

        if (convertView != null) {
            // Re-add existing view before rendering so that we can make change inside getView()
            container.addView(convertView);
            convertView = getView(instantiatedItem, convertView, container);
        } else {
            convertView = getView(instantiatedItem, null, container);
            container.addView(convertView);
        }

        // Set another tag id to not break ViewHolder pattern
        convertView.setTag(R.id.view_data, instantiatedItem);
    }

    instantiatedItems.clear();
    recycledViews.clear();
}

From source file:configuration.Util.java

public static void dictsReset(ArrayList<HashMap> listDicts, HashMap<JCheckBox, JSpinner> DictBoxSpinner,
        HashMap<JCheckBox, JTextField> DictBoxTextField, HashMap<JCheckBox, JComboBox> DictBoxComboBox,
        HashMap<JRadioButton, JSpinner> DictRadioButtonSpinner,
        HashMap<JRadioButton, JTextField> DictRadioButtonTextField) {
    DictBoxSpinner.clear();/*from w  ww .j a v  a2s.  co m*/
    DictBoxTextField.clear();
    DictBoxComboBox.clear();
    DictRadioButtonSpinner.clear();
    DictRadioButtonTextField.clear();

    listDicts.clear();
    listDicts.add(DictBoxSpinner);
    listDicts.add(DictBoxTextField);
    listDicts.add(DictBoxComboBox);
    listDicts.add(DictRadioButtonSpinner);
    listDicts.add(DictRadioButtonTextField);
}

From source file:com.krawler.crm.hrmsintegration.bizservice.GoalManagementServiceImpl.java

public JSONObject getGoalHistoryJSON(boolean isexport, String goalid, String companyid, String userid,
        DateFormat dateFormat, String start, String limit) throws ServiceException {
    JSONArray jarr = new JSONArray();
    KwlReturnObject kmsg = null;/*from   w  w w  .ja v  a 2  s  . co m*/
    JSONObject jobj = new JSONObject();
    int count = 0;
    List ll = null;
    List lst = null;
    try {

        ArrayList filter_names = new ArrayList();
        ArrayList filter_params = new ArrayList();
        HashMap<String, Object> requestParams = new HashMap<String, Object>();
        filter_names.add("c.id");
        filter_params.add(goalid);
        kmsg = hrmsIntDAOObj.getFinalGoals(requestParams, filter_names, filter_params);
        count = kmsg.getRecordTotalCount();
        lst = kmsg.getEntityList();
        Iterator ite = lst.iterator();
        while (ite.hasNext()) {
            Finalgoalmanagement fgmt = (Finalgoalmanagement) ite.next();

            filter_names.clear();
            filter_params.clear();
            requestParams.clear();

            requestParams = getFilter(2, fgmt.getStartdate(), fgmt.getEnddate(), userid, companyid);
            if (!StringUtil.isNullOrEmpty(start)) {
                requestParams.put("start", start);
                requestParams.put("limit", limit);
                requestParams.put("pagingFlag", true);
            }
            filter_names = (ArrayList) requestParams.get("filter_names");
            filter_params = (ArrayList) requestParams.get("filter_params");
            kmsg = crmAccountDAOObj.getAccountOwners(requestParams, filter_names, filter_params);
            ll = kmsg.getEntityList();
            count = kmsg.getRecordTotalCount();
            Iterator itetype2 = ll.iterator();
            while (itetype2.hasNext()) {
                CrmAccount ca = (CrmAccount) itetype2.next();
                JSONObject tmpObj = new JSONObject();
                String[] productInfo = crmAccountHandler.getAccountProducts(crmAccountDAOObj,
                        ca.getAccountid());

                tmpObj.put("accountid", ca.getAccountid());
                tmpObj.put("accountname", ca.getAccountname());
                tmpObj.put("revenue", StringUtil.isNullOrEmpty(ca.getRevenue()) ? "0" : ca.getRevenue());
                tmpObj.put("createdon",
                        isexport ? crmManagerCommon.exportDateNull(ca.getCreatedon(), dateFormat)
                                : crmManagerCommon.dateNull(ca.getCreatedon()));
                tmpObj.put("productid", productInfo[0]);
                tmpObj.put("product", productInfo[1]);
                tmpObj.put("exportmultiproduct", productInfo[2]);
                tmpObj.put("type",
                        (ca.getCrmCombodataByAccounttypeid() != null
                                ? ca.getCrmCombodataByAccounttypeid().getValue()
                                : ""));
                tmpObj.put("typeid", crmManagerCommon.comboNull(ca.getCrmCombodataByAccounttypeid()));
                tmpObj.put("industryid", crmManagerCommon.comboNull(ca.getCrmCombodataByIndustryid()));
                tmpObj.put("industry",
                        (ca.getCrmCombodataByIndustryid() != null ? ca.getCrmCombodataByIndustryid().getValue()
                                : ""));
                tmpObj.put("website", (ca.getWebsite() != null ? ca.getWebsite() : ""));
                jarr.put(tmpObj);
            }
        }
        jobj.put("data", jarr);
        jobj.put("count", count);

    } catch (JSONException e) {
        logger.warn("JSONException exception in getGoalHistoryJSON()", e);
    } catch (ServiceException e) {
        logger.warn("ServiceException exception in getGoalHistoryJSON()", e);
    }
    return jobj;
}