Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:com.example.findmygf.MapActivity.java

private ArrayList<String> parseWOEID(Document srcDoc) {
    ArrayList<String> listWOEID = new ArrayList<String>();

    NodeList nodeListDescription = srcDoc.getElementsByTagName("woeid");
    if (nodeListDescription.getLength() >= 0) {
        for (int i = 0; i < nodeListDescription.getLength(); i++) {
            listWOEID.add(nodeListDescription.item(i).getTextContent());
        }//from   w w w.j  a v  a 2 s. c o m
    } else {
        listWOEID.clear();
    }

    return listWOEID;
}

From source file:ffx.autoparm.Potential2.java

/**
 * <p>// ww  w  .j av  a  2s  . c om
 * gen_pot_grid</p>
 *
 * @param target_file a {@link java.io.File} object.
 * @param atoms an array of {@link ffx.potential.bonded.Atom} objects.
 * @param type a int.
 * @return an array of double.
 */
public double[][][] gen_pot_grid(File target_file, Atom atoms[], int type) {
    double pot_grid[][][] = new double[nSymm][][];//nSymm, nPoints, 4
    ArrayList<Double[]> temp_grid = new ArrayList<Double[]>();
    if (target_file != null && target_file.exists() && target_file.canRead()) {
        try {
            //won't work for nSymm stuff
            for (int i = 0; i < nSymm; i++) {
                read_target_file(target_file, temp_grid, type);
                pot_grid[i] = new double[temp_grid.size()][4];
                for (int j = 0; j < temp_grid.size(); j++) {
                    Double xyzpot[] = temp_grid.get(j);
                    pot_grid[i][j][0] = xyzpot[0];
                    pot_grid[i][j][1] = xyzpot[1];
                    pot_grid[i][j][2] = xyzpot[2];
                    pot_grid[i][j][3] = xyzpot[3];
                }
                temp_grid.clear();
            }
        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("Error opening or reading target/cube file");
        }
    } else {
        for (int i = 0; i < nSymm; i++) {
            make_grid(temp_grid, atoms);
            pot_grid[i] = new double[temp_grid.size()][4];
            for (int j = 0; j < temp_grid.size(); j++) {
                Double xyz[] = temp_grid.get(j);
                pot_grid[i][j][0] = xyz[0];
                pot_grid[i][j][1] = xyz[1];
                pot_grid[i][j][2] = xyz[2];
                pot_grid[i][j][3] = pme.potpoint(xyz);
            }
            temp_grid.clear();
        }
    }
    return pot_grid;
}

From source file:com.yoctopuce.YoctoAPI.YMessageBox.java

public int checkNewMessages() throws YAPI_Exception {
    String bitmapStr;/*w w  w .  j a  v  a 2s .  c o m*/
    byte[] prevBitmap;
    byte[] newBitmap;
    int slot;
    int nslots;
    int pduIdx;
    int idx;
    int bitVal;
    int prevBit;
    int i;
    int nsig;
    int cnt;
    String sig;
    ArrayList<YSms> newArr = new ArrayList<YSms>();
    ArrayList<YSms> newMsg = new ArrayList<YSms>();
    ArrayList<YSms> newAgg = new ArrayList<YSms>();
    ArrayList<String> signatures = new ArrayList<String>();
    YSms sms;

    // may throw an exception
    bitmapStr = get_slotsBitmap();
    if (bitmapStr.equals(_prevBitmapStr)) {
        return YAPI.SUCCESS;
    }
    prevBitmap = YAPIContext._hexStrToBin(_prevBitmapStr);
    newBitmap = YAPIContext._hexStrToBin(bitmapStr);
    _prevBitmapStr = bitmapStr;
    nslots = 8 * (newBitmap).length;
    newArr.clear();
    newMsg.clear();
    signatures.clear();
    nsig = 0;
    // copy known messages
    pduIdx = 0;
    while (pduIdx < _pdus.size()) {
        sms = _pdus.get(pduIdx);
        slot = sms.get_slot();
        idx = ((slot) >> (3));
        if (idx < (newBitmap).length) {
            bitVal = ((1) << ((((slot) & (7)))));
            if ((((newBitmap[idx]) & (bitVal))) != 0) {
                newArr.add(sms);
                if (sms.get_concatCount() == 0) {
                    newMsg.add(sms);
                } else {
                    sig = sms.get_concatSignature();
                    i = 0;
                    while ((i < nsig) && ((sig).length() > 0)) {
                        if (signatures.get(i).equals(sig)) {
                            sig = "";
                        }
                        i = i + 1;
                    }
                    if ((sig).length() > 0) {
                        signatures.add(sig);
                        nsig = nsig + 1;
                    }
                }
            }
        }
        pduIdx = pduIdx + 1;
    }
    // receive new messages
    slot = 0;
    while (slot < nslots) {
        idx = ((slot) >> (3));
        bitVal = ((1) << ((((slot) & (7)))));
        prevBit = 0;
        if (idx < (prevBitmap).length) {
            prevBit = ((prevBitmap[idx]) & (bitVal));
        }
        if ((((newBitmap[idx]) & (bitVal))) != 0) {
            if (prevBit == 0) {
                sms = fetchPdu(slot);
                newArr.add(sms);
                if (sms.get_concatCount() == 0) {
                    newMsg.add(sms);
                } else {
                    sig = sms.get_concatSignature();
                    i = 0;
                    while ((i < nsig) && ((sig).length() > 0)) {
                        if (signatures.get(i).equals(sig)) {
                            sig = "";
                        }
                        i = i + 1;
                    }
                    if ((sig).length() > 0) {
                        signatures.add(sig);
                        nsig = nsig + 1;
                    }
                }
            }
        }
        slot = slot + 1;
    }
    _pdus = newArr;
    // append complete concatenated messages
    i = 0;
    while (i < nsig) {
        sig = signatures.get(i);
        cnt = 0;
        pduIdx = 0;
        while (pduIdx < _pdus.size()) {
            sms = _pdus.get(pduIdx);
            if (sms.get_concatCount() > 0) {
                if (sms.get_concatSignature().equals(sig)) {
                    if (cnt == 0) {
                        cnt = sms.get_concatCount();
                        newAgg.clear();
                    }
                    newAgg.add(sms);
                }
            }
            pduIdx = pduIdx + 1;
        }
        if ((cnt > 0) && (newAgg.size() == cnt)) {
            sms = new YSms(this);
            sms.set_parts(newAgg);
            newMsg.add(sms);
        }
        i = i + 1;
    }
    _messages = newMsg;

    return YAPI.SUCCESS;
}

From source file:com.tct.mail.NotificationActionIntentService.java

/**
 * Give the chance to restore the mail's status,here we restore to queue status
 * @param context/* ww w. j  a v a 2s.  c  o m*/
 * @param boxId is the outbox's id
 */
private void cleanFaildMailStatus(Context context, long boxId) {
    Cursor cursor = null;
    ContentResolver resolver = context.getContentResolver();
    ArrayList<Long> pendingSendMails = new ArrayList<Long>();
    ContentValues value = new ContentValues();
    //query the failed mails
    try {
        cursor = resolver.query(EmailContent.Message.CONTENT_URI,
                EmailContent.Message.ID_COLUMN_WITH_STATUS_PROJECTION,
                MessageColumns.SENDING_STATUS + "=?" + " AND " + MessageColumns.MAILBOX_KEY + "=?",
                new String[] { Long.toString(EmailContent.Message.MAIL_IN_FAILED_STATUS),
                        Long.toString(boxId) },
                null);
        while (cursor.moveToNext()) {
            pendingSendMails.add(cursor.getLong(0));
        }
    } catch (Exception e) {
        LogUtils.e(LogUtils.TAG, e, "Exception happen during queue the failed mails in cleanFaildMailStatus ");
    } finally {
        if (cursor != null) {
            cursor.close();
        }
    }
    //update the mails status
    if (pendingSendMails.size() > 0) {
        for (long id : pendingSendMails) {
            value.clear();
            Uri uri = ContentUris.withAppendedId(EmailContent.Message.CONTENT_URI, id);
            value.put(MessageColumns.SENDING_STATUS, EmailContent.Message.MAIL_IN_QUEUE_STATUS);
            resolver.update(uri, value, null, null);
            LogUtils.d(LogUtils.TAG, "update the mail's status from FAIL to QUEUE,current message id is %d",
                    id);
        }
    }
    pendingSendMails.clear();
}

From source file:com.miz.mizuu.fragments.ShowSeasonsFragment.java

private void addAllEpisodes() {
    int tempCount = 0;

    ArrayList<TvShowEpisode> tempEpisodes = new ArrayList<TvShowEpisode>(allEpisodes);

    for (int i = 0; i < tempEpisodes.size(); i++) {
        if (seasonEpisodeCount.get(Integer.valueOf(tempEpisodes.get(i).getSeason())) == 0) {
            seasonEpisodeCount.append(Integer.valueOf(tempEpisodes.get(i).getSeason()), 1);
            seasons.add(tempEpisodes.get(i).getSeason());
        } else {// w  w  w.j a  v a2  s  . co m
            tempCount = seasonEpisodeCount.get(Integer.valueOf(tempEpisodes.get(i).getSeason()));
            seasonEpisodeCount.append(Integer.valueOf(tempEpisodes.get(i).getSeason()), tempCount + 1);
        }
        shownEpisodes.add(tempEpisodes.get(i));
    }

    tempEpisodes.clear();
    tempEpisodes = null;
}

From source file:com.krawler.spring.crm.common.crmManagerDAOImpl.java

@Override
public List<User> getNextLeadRoutingUsers(String companyid) throws ServiceException {
    StringBuilder maxNumhql = new StringBuilder(
            "select max(ordernum) from LeadRoutingUsers where user.company.companyID = ?");
    ArrayList filter_params = new ArrayList();
    filter_params.add(companyid);/*from  ww w.  j a va2s  . com*/
    List maxList = executeQuery(maxNumhql.toString(), filter_params.toArray());
    int maxcnt = Integer.parseInt(maxList.get(0).toString());

    StringBuilder hql = new StringBuilder("from User where userID in (select user.userID from LeadRoutingUsers "
            + "where ordernum = (select CASE when (ordernum+1) > ? then 0 else (ordernum+1) END from LeadRoutingUsers "
            + "where lastused = ? and user.company.companyID = ?) and user.company.companyID = ?)");
    filter_params.clear();
    filter_params.add(maxcnt);
    filter_params.add(true);
    filter_params.add(companyid);
    filter_params.add(companyid);
    List<User> userList = executeQuery(hql.toString(), filter_params.toArray());
    if (userList.isEmpty()) {
        hql = new StringBuilder("from User where userID in (select user.userID from LeadRoutingUsers "
                + "where ordernum = 0 and user.company.companyID = ?)");
        filter_params.clear();
        filter_params.add(companyid);
        userList = executeQuery(hql.toString(), filter_params.toArray());
    }
    return userList;
}

From source file:com.nuance.expertassistant.ReadExcelFile.java

public ArrayList<Integer> retrieveAnswers(String projectID, String question, String expectedAnswer) {

    ArrayList<Integer> analysis = new ArrayList<Integer>();

    analysis.add(0);//from w  w w .j av  a2  s . c o  m
    analysis.add(-1);
    analysis.add(-1);
    analysis.add(-1);

    try {

        String contextID = InvokeQACoreAPI.getContextID(projectID).replaceAll("\n", "");

        System.out.println(" The contextID is :[" + contextID + "]");

        String answerString = InvokeQACoreAPI.getAnswer(contextID, projectID, question);
        // System.out.println(" The retrieved answer is :" + answerString);
        JSONArray jsonArray = new JSONArray(answerString);

        for (int i = 0; i < jsonArray.length(); i++) {
            JSONObject job = jsonArray.getJSONObject(i);
            String answerText = job.getJSONObject("answer").getString("text");
            String evidence = html2text(
                    job.getJSONObject("answer").getJSONObject("evidence").getString("text"));
            int confidence = job.getJSONObject("answer").getInt("confidence");
            Double score = job.getJSONObject("answer").getDouble("score");

            System.out.println("ANSWER TEXT (" + i + "):[" + answerText + "]");
            System.out.println("EVIDENCE TEXT (" + i + "):[" + evidence + "]");
            System.out.println("CONFIDENCE (" + i + "):[" + confidence + "]");
            System.out.println("SCORE (" + i + "):[" + score + "]");

            if (evidence.contains(expectedAnswer) || answerText.contains(expectedAnswer)) {
                analysis.clear();
                analysis.add(1);
                analysis.add(i);
                analysis.add(evidence.length() + answerText.length());
                analysis.add(jsonArray.length());

                System.out.println("Analysis is " + analysis.toString());

                return analysis;
            }

            System.out.println("*********************************************");
        }

    } catch (Exception e) {

        System.out.println("Exception :" + e.getMessage());
        e.printStackTrace();
        return analysis;
    }

    return analysis;

}

From source file:org.apache.hadoop.hive.ql.dataToDB.BaseDBExternalDataLoad.java

private void insertSingleFile(Path file_path, Statement stat, String delimiter) throws HiveException {
    try {/*from  w w w .  j  a  v a  2s. c o m*/
        FSDataInputStream fdis = fs.open(file_path);
        Object reader;
        if (newinsert) {
            reader = new MyLineReader(fdis);
        } else {
            reader = new BufferedReader(new InputStreamReader(fdis));
        }
        String line = "";
        String deli = delimiter;
        if (deli == null || deli.isEmpty()) {
            deli = new String(new char[] { '\01' });
        }
        List<FieldSchema> fss = this.cols;

        int recordcnt = 1;
        String basesql = "insert into " + config.getDbTable() + " values ";
        int insertsize = HiveConf.getIntVar(config.getConf(), HiveConf.ConfVars.HIVEBIROWNUMPERINSERT);
        if (insertsize <= 0 || insertsize >= 100000) {
            insertsize = 10000;
        }
        ArrayList<String> valuelist = new ArrayList<String>(insertsize);
        StringBuffer lineBuffer = new StringBuffer();
        while (readLine(reader, lineBuffer)) {
            line = lineBuffer.toString();
            lineBuffer.setLength(0);
            ArrayList<Integer> arrays = getIndexes(line, deli);
            String values = "(";
            int m = arrays.get(0);
            int n = arrays.get(1);
            int count = 0;
            for (int j = 1; j < arrays.size(); j++) {
                String c = "";
                n = arrays.get(j);
                if (n == (m + 1)) {
                    c = toSQLInsertStr(fss.get(count).getType(), "");
                } else {
                    c = toSQLInsertStr(fss.get(count).getType(), line.substring(m + 1, n));
                }

                m = n;
                if (count == 0)
                    values += c;
                else
                    values += "," + c;
                count++;
            }
            values += ")";
            valuelist.add(values);
            if (recordcnt % insertsize == 0) {
                insertValues(stat, basesql, valuelist);
                valuelist.clear();
            }
            if (recordcnt % 10000 == 0 && SessionState.get() != null)
                SessionState.get().ssLog("Load reocord to postgre:" + recordcnt);
            recordcnt++;
        }

        if (!valuelist.isEmpty()) {
            insertValues(stat, basesql, valuelist);
            valuelist.clear();
        }

        closestream(reader);

    } catch (IOException e) {
        LOG.debug(e.getMessage());
        throw new HiveException(e.getMessage());

    } catch (SQLException e) {
        LOG.debug(e.getMessage());
        throw new HiveException(e.getMessage());
    }
}

From source file:org.apache.hadoop.hive.ql.dataToDB.BaseDBExternalDataLoad.java

private void insertSingleFile(Path file_path, Statement stat) throws HiveException {
    try {//from  w w w.  j  a  v a2  s. c  o  m
        FSDataInputStream fdis = fs.open(file_path);
        Object reader;
        if (newinsert) {
            reader = new MyLineReader(fdis);
        } else {
            reader = new BufferedReader(new InputStreamReader(fdis));
        }
        String line = "";
        String deli = config.getTable().getSerdeParam(Constants.FIELD_DELIM);
        if (deli == null || deli.isEmpty()) {
            deli = new String(new char[] { '\01' });
        }
        List<FieldSchema> fss = config.getTable().getCols();

        int recordcnt = 1;
        String basesql = "insert into " + config.getDbTable() + " values ";
        int insertsize = HiveConf.getIntVar(config.getConf(), HiveConf.ConfVars.HIVEBIROWNUMPERINSERT);
        if (insertsize <= 0 || insertsize >= 100000) {
            insertsize = 10000;
        }
        ArrayList<String> valuelist = new ArrayList<String>(insertsize);
        StringBuffer lineBuffer = new StringBuffer();
        while (readLine(reader, lineBuffer)) {
            line = lineBuffer.toString();
            lineBuffer.setLength(0);
            ArrayList<Integer> arrays = getIndexes(line, deli);
            String values = "(";
            int m = arrays.get(0);
            int n = arrays.get(1);
            int count = 0;
            for (int j = 1; j < arrays.size(); j++) {
                String c = "";
                n = arrays.get(j);
                if (n == (m + 1)) {
                    c = toSQLInsertStr(fss.get(count).getType(), "");
                } else {
                    c = toSQLInsertStr(fss.get(count).getType(), line.substring(m + 1, n));
                }

                m = n;
                if (count == 0)
                    values += c;
                else
                    values += "," + c;
                count++;
            }
            values += ")";
            valuelist.add(values);
            if (recordcnt % insertsize == 0) {
                insertValues(stat, basesql, valuelist);
                valuelist.clear();
            }
            if (recordcnt % 10000 == 0 && SessionState.get() != null)
                SessionState.get().ssLog("Load reocord to postgre:" + recordcnt);
            recordcnt++;
        }

        if (!valuelist.isEmpty()) {
            insertValues(stat, basesql, valuelist);
            valuelist.clear();
        }

        closestream(reader);

    } catch (IOException e) {
        LOG.debug(e.getMessage());
        throw new HiveException(e.getMessage());

    } catch (SQLException e) {
        LOG.debug(e.getMessage());
        throw new HiveException(e.getMessage());
    }
}

From source file:com.pari.nm.modules.jobs.VSEMImporter.java

private void importFromFileProcessor(VSEFileProcessor fp, String fileName, File rootDir)
        throws PariAPIException, MalformedURLException, JellyTagException, UnsupportedEncodingException,
        Exception {/*w w  w.  j  a va 2  s  . c  om*/
    parseDAVData(fileName, rootDir);
    EntityDataProviderIf repo = fp.getDataProvider();
    EntityData network = repo.getEntity("Network");
    EntityVector devicesList = (EntityVector) network.getPropValue("DeviceList");
    EntityData[] devices = devicesList.getEntities();
    numDevs = (devices == null) ? 0 : devices.length;

    try {
        String identifier = new String();
        List<ScriptInfo> icfScripts = DeviceScriptManagerImpl.getInstance()
                .getIcfDefinedZipPackages(PackageType.ZipInventory, customerId, "Icf");

        if (icfScripts != null) {
            Iterator<ScriptInfo> itr = icfScripts.iterator();
            while (itr.hasNext()) {
                ScriptInfo script = itr.next();
                identifier = script.getIdentifier();
                break;
            }
            ICManager.getInstance().validateLicense(customerId, identifier, numDevs);
        }

        List<ScriptInfo> icfInvScripts = DeviceScriptManagerImpl.getInstance()
                .getIcfDefinedZipPackages(PackageType.Inventory, customerId, "Icf");

        if (icfInvScripts != null) {
            Iterator<ScriptInfo> itr = icfInvScripts.iterator();
            while (itr.hasNext()) {
                ScriptInfo script = itr.next();
                identifier = script.getIdentifier();
                break;
            }
            ICManager.getInstance().validateLicense(customerId, identifier, numDevs);
        }
    } catch (Exception e) {
        logger.warn("No sufficient license to run " + " - " + e.getMessage());
        logger.warn("No sufficient license to run import devices.");
        throw new Exception(e.getMessage());
    }
    if (network == null) {
        throw PariAPIMessages._API_PCBV2_NOT_NOT_VALID.createException(vseFileName, "No Network Entity Found.");
    }
    try {
        handleCollectionInfo(network);
    } catch (Exception e) {
        logger.warn("Error while getting CollectionInfoList from Network Entity", e);
        if (invoker != null) {
            invoker.logMsg("Unable to process collection information");
        }
    }
    if (devicesList == null || devicesList.isEmpty()) {
        throw PariAPIMessages._API_PCBV2_NOT_NOT_VALID.createException(vseFileName,
                "No Device Entities Found.");
    }
    logger.info("Converting " + numDevs + " devices from VSEM format to PCBV2 format");
    if (numDevs == 0) {
        logger.info("No devices found to import from vsem: " + vseFileName + " . Exiting...");
        if (invoker != null) {
            invoker.logMsg("No device found to import. Exiting...");
        }
        return;
    }
    Map<Integer, EntityData> deviceDataMap = new LinkedHashMap<Integer, EntityData>();
    for (int idx = 0; idx < devices.length; idx++) {
        deviceDataMap.put(idx, devices[idx]);
    }
    // For Memory optimization purposes to scale better, remove the device Entity from the VSE Data
    devicesList.removeAllEntities();
    devices = null;
    int curDevIdx = 0;
    final Map<String/* ipAddr */, Long/* discoveredTime */> deviceDiscTimeMap = new ConcurrentHashMap<String, Long>();
    CustomerInstance custInstance = null;
    int instanceID = -1;
    if (customerId >= 0 && wingInstanceName != null && !wingInstanceName.isEmpty()) {
        try {
            custInstance = CustomerIfImpl.getInstance().getCustomerWingInstanceByName(customerId,
                    wingInstanceName);
        } catch (Exception e1) {
            logger.error(
                    "Error while getting instance id from customer: " + customerId + "/" + wingInstanceName,
                    e1);
        }
    }
    if (custInstance != null && custInstance.getInstanceId() != -1 && deviceDiscTimeMap.isEmpty()) {
        instanceID = custInstance.getInstanceId();
        Map<String, Long> tempMap = InventoryDBHelper.getDiscoveryTimeForAllDevices(customerId, instanceID);
        if (tempMap != null) {
            deviceDiscTimeMap.putAll(tempMap);
        }
    }

    try {

        try {
            /*
             * split device data in to multiple sub arrays and process each array in separate thread.
             */

            ArrayList<EntityData> devDataArray = new ArrayList<EntityData>();

            Iterator<Integer> i = deviceDataMap.keySet().iterator();

            while (i.hasNext()) {
                final EntityData devData = deviceDataMap.get(i.next());
                devDataArray.add(devData);
                curDevIdx++;
            }

            if (devDataArray.size() < subArraySize) {
                subArraySize = 10;
            }

            List<List<EntityData>> subArrayList = split(devDataArray, subArraySize);
            devDataArray.clear();

            logger.info("Sub Array size after splitting " + subArrayList.size());

            CountDownLatch latch = new CountDownLatch(subArrayList.size());

            for (List<EntityData> entityDataList : subArrayList) {
                processDeviceArray(entityDataList, deviceDiscTimeMap, latch);
                entityDataList.clear();

            }

            deviceDataMap.clear();
            deviceDiscTimeMap.clear();
            latch.await();

        } catch (Exception ex) {
            logger.error("Exception while importing device", ex);
        }

    } catch (Exception ex) {
        logger.error("Exception while importing devices from VSEM file", ex);
    }

}