Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:com.linkedin.databus.core.TestDbusEventBufferMult.java

@Test
public void testMultiPPartionStreamFromLatest() throws Exception {
    createBufMult();/*from www.j  a v a2  s.  co  m*/

    PhysicalPartition[] p = { _pConfigs[0].getPhysicalPartition(), _pConfigs[1].getPhysicalPartition(),
            _pConfigs[2].getPhysicalPartition() };

    //generate a bunch of windows for 3 partitions
    int windowsNum = 10;
    for (int i = 1; i <= windowsNum; ++i) {
        DbusEventBufferAppendable buf = _eventBufferMult.getDbusEventBufferAppendable(p[0]);

        buf.startEvents();
        byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset());
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 100, (short) 0,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i, null);

        buf = _eventBufferMult.getDbusEventBufferAppendable(p[1]);
        buf.startEvents();
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i + 1, null);

        buf = _eventBufferMult.getDbusEventBufferAppendable(p[2]);
        buf.startEvents();
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(3), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i + 2, null);
    }
    String[] pnames = new String[p.length];
    int count = 0;
    for (PhysicalPartition ip : p) {
        pnames[count++] = ip.toSimpleString();
    }

    StatsCollectors<DbusEventsStatisticsCollector> statsColl = createStats(pnames);

    PhysicalPartitionKey[] pkeys = { new PhysicalPartitionKey(p[0]), new PhysicalPartitionKey(p[1]),
            new PhysicalPartitionKey(p[2]) };

    CheckpointMult cpMult = new CheckpointMult();
    for (int i = 0; i < 3; ++i) {
        Checkpoint cp = new Checkpoint();
        cp.setFlexible();
        cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
        cpMult.addCheckpoint(p[i], cp);
    }

    DbusEventBufferBatchReadable reader = _eventBufferMult.getDbusEventBufferBatchReadable(cpMult,
            Arrays.asList(pkeys), statsColl);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    WritableByteChannel writeChannel = Channels.newChannel(baos);
    // Set streamFromLatestScn == true
    reader.streamEvents(true, 1000000, writeChannel, Encoding.BINARY, new AllowAllDbusFilter());
    writeChannel.close();
    baos.close();

    //make sure we got the physical partition names right
    List<String> ppartNames = statsColl.getStatsCollectorKeys();
    assertEquals(ppartNames.size(), 3);

    HashSet<String> expectedPPartNames = new HashSet<String>(
            Arrays.asList(p[0].toSimpleString(), p[1].toSimpleString(), p[2].toSimpleString()));
    for (String ppartName : ppartNames) {
        assertTrue(expectedPPartNames.contains(ppartName));
    }

    //verify event counts per partition
    DbusEventsTotalStats[] ppartStats = { statsColl.getStatsCollector(p[0].toSimpleString()).getTotalStats(),
            statsColl.getStatsCollector(p[1].toSimpleString()).getTotalStats(),
            statsColl.getStatsCollector(p[2].toSimpleString()).getTotalStats() };

    // Only the last window is returned in each of the partitions
    assertEquals(ppartStats[0].getNumDataEvents(), 1);
    assertEquals(ppartStats[1].getNumDataEvents(), 2);
    assertEquals(ppartStats[2].getNumDataEvents(), 3);
    assertEquals(ppartStats[0].getNumSysEvents(), 1);
    assertEquals(ppartStats[1].getNumSysEvents(), 1);
    assertEquals(ppartStats[2].getNumSysEvents(), 1);

    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumDataEvents(), (1 + 2 + 3));
    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumSysEvents(), (1 + 1 + 1));

    assertEquals(statsColl.getStatsCollector().getTotalStats().getMaxTimeLag(), Math
            .max(ppartStats[0].getTimeLag(), Math.max(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag())));
    assertEquals(statsColl.getStatsCollector().getTotalStats().getMinTimeLag(), Math
            .min(ppartStats[0].getTimeLag(), Math.min(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag())));
}

From source file:dk.netarkivet.harvester.harvesting.ArchiveFilesReportGenerator.java

/**
 * Parses heritrix.out and generates the ARC/WARC files report.
 * @return the generated report file.//from ww  w  .j  a  v  a  2s  .  c o  m
 */
protected File generateReport() {

    Map<String, ArchiveFileStatus> reportContents = parseHeritrixOut();

    File reportFile = new File(crawlDir, REPORT_FILE_NAME);

    try {
        boolean created = reportFile.createNewFile();
        if (!created) {
            throw new IOException("Unable to create '" + reportFile.getAbsolutePath() + "'.");
        }
        PrintWriter out = new PrintWriter(reportFile);

        out.println(REPORT_FILE_HEADER);

        HashSet<String> arcFilesFromHeritrixOut = new HashSet<String>();
        for (Map.Entry<String, ArchiveFilesReportGenerator.ArchiveFileStatus> entry : reportContents
                .entrySet()) {
            String arcFileName = entry.getKey();
            arcFilesFromHeritrixOut.add(arcFileName);
            ArchiveFileStatus afs = entry.getValue();
            out.println(arcFileName + " " + afs.toString());
        }

        // Inspect the contents of the local ARC folder

        //TODO check if this value is configurable
        File localArchiveFolder = new File(crawlDir, ARCHIVE_FORMAT + "s");
        if (localArchiveFolder.exists() && localArchiveFolder.isDirectory()) {
            File[] localArchiveFiles = localArchiveFolder.listFiles(new FileFilter() {
                @Override
                public boolean accept(File f) {
                    return f.isFile() && f.getName().contains("." + ARCHIVE_FORMAT);
                }
            });
            for (File f : localArchiveFiles) {
                String arcFileName = f.getName();
                if (!arcFilesFromHeritrixOut.contains(arcFileName)) {
                    ArchiveFileStatus afs = new ArchiveFileStatus();
                    afs.setSize(f.length());
                    out.println(arcFileName + " " + afs.toString());
                }
            }
        }

        out.close();
    } catch (IOException e) {
        throw new IOFailure("Failed to create " + reportFile.getName(), e);
    }

    return reportFile;
}

From source file:org.lol.reddit.reddit.CommentListingRequest.java

private void buildComments(final JsonValue value, final RedditCommentListItem parent, final long timestamp,
        final HashSet<String> needsChanging) throws IOException, InterruptedException, IllegalAccessException,
        java.lang.InstantiationException, NoSuchMethodException, InvocationTargetException {

    final RedditThing commentThing = value.asObject(RedditThing.class);

    final RedditCommentListItem item;
    boolean shouldRecurse = false;

    if (commentThing.getKind() == RedditThing.Kind.MORE_COMMENTS
            && mUrl.pathType() == RedditURLParser.PathType.PostCommentListingURL) {

        final RedditMoreComments redditMoreComments = commentThing.asMoreComments();
        final RedditPreparedMoreComments preparedMoreComments = new RedditPreparedMoreComments(
                redditMoreComments, mUrl.asPostCommentListURL());
        item = new RedditCommentListItem(parent, preparedMoreComments);

    } else if (commentThing.getKind() == RedditThing.Kind.COMMENT) {

        final RedditComment comment = commentThing.asComment();
        final RedditPreparedComment preparedComment = new RedditPreparedComment(mContext, comment, timestamp,
                needsChanging.contains(comment.name), mParentPost, mUser, mCommentHeaderItems);

        if (parent != null && parent.isComment()) {
            parent.asComment().addChild(preparedComment);
        }/*from  w  ww  .j  a  va2 s .  co m*/

        item = new RedditCommentListItem(parent, preparedComment);

        if (comment.replies.getType() == JsonValue.Type.OBJECT) {
            shouldRecurse = true;
        }

    } else {
        return;
    }

    notifyListener(Event.EVENT_ITEM_DOWNLOADED, item);

    if (shouldRecurse) {
        final RedditComment comment = commentThing.asComment();
        final JsonBufferedObject replies = comment.replies.asObject();
        final JsonBufferedArray children = replies.getObject("data").getArray("children");

        for (final JsonValue v : children) {
            buildComments(v, item, timestamp, needsChanging);
        }
    }
}

From source file:gov.nih.nci.cabig.caaers.web.study.CreateStudyAjaxFacade.java

/**
 * Add an expected AE to the Study//from   w  ww. j a v  a 2  s.co  m
 */
public AjaxOutput addExpectedAE(int[] listOfTermIDs) {

    AjaxOutput ajaxOutput = new AjaxOutput();
    StudyCommand command = (StudyCommand) extractCommand();
    if (command.getStudy().getId() != null) {
        Study study = studyRepository.getById(command.getStudy().getId());
        command.setStudy(study);
    }

    boolean isMeddra = command.getStudy().getAeTerminology().getTerm() == Term.MEDDRA;

    List<? extends AbstractExpectedAE<? extends DomainObject>> studyTerms = (isMeddra)
            ? command.getStudy().getExpectedAEMeddraLowLevelTerms()
            : command.getStudy().getExpectedAECtcTerms();
    int firstIndex = studyTerms.size();
    HashSet<Integer> terms = new HashSet<Integer>();
    for (int i = 0; i < studyTerms.size(); i++) {
        terms.add(studyTerms.get(i).getTerm().getId());
    }

    List<Integer> filteredTermIDs = new ArrayList<Integer>();
    // List<String> removedTerms = new ArrayList<String>();

    // the list of terms to be added
    // filter off the terms that are already present
    for (int id : listOfTermIDs) {
        if (!terms.contains(id))
            filteredTermIDs.add(id);
    }

    if (filteredTermIDs.isEmpty())
        return ajaxOutput;

    for (int id : filteredTermIDs) {
        if (isMeddra) {
            // populate MedDRA term
            LowLevelTerm llt = lowLevelTermDao.getById(id);
            ExpectedAEMeddraLowLevelTerm studyllt = new ExpectedAEMeddraLowLevelTerm();
            studyllt.setLowLevelTerm(llt);
            command.getStudy().addExpectedAEMeddraLowLevelTerm(studyllt);
            studyllt.setStudy(command.getStudy());
        } else {
            // properly set CTCterm
            CtcTerm ctc = ctcTermDao.getById(id);
            ExpectedAECtcTerm studyCtc = new ExpectedAECtcTerm();
            studyCtc.setCtcTerm(ctc);
            command.getStudy().addExpectedAECtcTerm(studyCtc);
            studyCtc.setStudy(command.getStudy());
        }
    }

    if (command.getStudy().getId() != null) {
        command.save();
    }

    int lastIndex = studyTerms.size() - 1;
    Map<String, String> params = new LinkedHashMap<String, String>(); // preserve order for testing
    params.put("firstIndex", Integer.toString(firstIndex));
    params.put("lastIndex", Integer.toString(lastIndex));
    params.put("isSingle", Boolean.toString(true));
    ajaxOutput.setHtmlContent(renderAjaxView("expectedAEsSection", command.getStudy().getId(), params));

    return ajaxOutput;
}

From source file:edu.cornell.mannlib.vitro.webapp.web.templatemodels.edit.EditConfigurationTemplateModel.java

public Map<String, String> getOfferTypesCreateNew() {
    WebappDaoFactory wdf = vreq.getWebappDaoFactory();
    ObjectProperty op = wdf.getObjectPropertyDao().getObjectPropertyByURI(editConfig.getPredicateUri());

    Individual sub = wdf.getIndividualDao().getIndividualByURI(editConfig.getSubjectUri());

    VClass rangeClass = EditConfigurationUtils.getRangeVClass(vreq);

    List<VClass> vclasses = null;
    List<VClass> subjectVClasses = sub.getVClasses();
    if (subjectVClasses == null) {
        vclasses = wdf.getVClassDao().getAllVclasses();
    } else if (rangeClass != null) {
        List<VClass> rangeVClasses = new ArrayList<VClass>();
        vclasses = new ArrayList<VClass>();
        if (!rangeClass.isUnion()) {
            rangeVClasses.add(rangeClass);
        } else {//from  w w w. j a  v a 2  s . co  m
            rangeVClasses.addAll(rangeClass.getUnionComponents());
        }
        for (VClass rangeVClass : rangeVClasses) {
            if (rangeVClass.getGroupURI() != null) {
                vclasses.add(rangeVClass);
            }
            List<String> subURIs = wdf.getVClassDao().getAllSubClassURIs(rangeVClass.getURI());
            for (String subClassURI : subURIs) {
                VClass subClass = wdf.getVClassDao().getVClassByURI(subClassURI);
                //if the subclass exists and also belongs to a particular class group
                if (subClass != null && subClass.getGroupURI() != null) {
                    vclasses.add(subClass);
                }
            }
        }
    } else {
        //this hash is used to make sure there are no duplicates in the vclasses
        //a more elegant method may look at overriding equals/hashcode to enable a single hashset of VClass objects
        HashSet<String> vclassesURIs = new HashSet<String>();
        vclasses = new ArrayList<VClass>();
        //Get the range vclasses applicable for the property and each vclass for the subject
        for (VClass subjectVClass : subjectVClasses) {
            List<VClass> rangeVclasses = wdf.getVClassDao().getVClassesForProperty(subjectVClass.getURI(),
                    op.getURI());
            //add range vclass to hash
            if (rangeVclasses != null) {
                for (VClass v : rangeVclasses) {
                    //Need to make sure any class added will belong to a class group
                    if (!vclassesURIs.contains(v.getURI()) && v.getGroupURI() != null) {
                        vclassesURIs.add(v.getURI());
                        vclasses.add(v);
                    }
                }
            }
        }
    }
    //if each subject vclass resulted in null being returned for range vclasses, then size of vclasses would be zero
    if (vclasses.size() == 0) {
        List<VClass> allVClasses = wdf.getVClassDao().getAllVclasses();
        //Since these are all vclasses, we should check whether vclasses included are in a class group
        for (VClass v : allVClasses) {
            if (v.getGroupURI() != null) {
                vclasses.add(v);
            }
        }
    }

    HashMap<String, String> types = new HashMap<String, String>();
    for (VClass vclass : vclasses) {
        String name = null;
        if (vclass.getPickListName() != null && vclass.getPickListName().length() > 0) {
            name = vclass.getPickListName();
        } else if (vclass.getName() != null && vclass.getName().length() > 0) {
            name = vclass.getName();
        } else if (vclass.getLocalNameWithPrefix() != null && vclass.getLocalNameWithPrefix().length() > 0) {
            name = vclass.getLocalNameWithPrefix();
        }
        if (name != null && name.length() > 0)
            types.put(vclass.getURI(), name);
    }

    //Unlike input element formatting tag, including sorting logic here
    return getSortedMap(types);
}

From source file:edu.ku.brc.specify.tasks.subpane.VisualQueryPanel.java

/**
 * Moves selected items from one list to the other.
 * @param srcList/*from  w  w w  . j  a v  a  2  s  . co  m*/
 * @param srcHash
 * @param dstList
 * @param dstHash
 */
private void moveItems(final JList srcList, final HashSet<Integer> srcHash, final JList dstList,
        final HashSet<Integer> dstHash) {
    int inx = srcList.getSelectedIndex();
    if (inx > -1) {
        DefaultListModel srcModel = (DefaultListModel) srcList.getModel();
        DefaultListModel dstModel = (DefaultListModel) dstList.getModel();

        int[] indexes = srcList.getSelectedIndices();
        ArrayList<LatLonPoint> llpList = new ArrayList<LatLonPoint>(indexes.length);
        for (int selInx : indexes) {
            LatLonPoint llp = (LatLonPoint) srcModel.get(selInx);
            llpList.add(llp);

            if (!dstHash.contains(llp.getLocId())) {
                dstModel.addElement(llp);
                dstHash.add(llp.getLocId());
            }
        }

        for (LatLonPoint llp : llpList) {
            srcModel.removeElement(llp);
            srcHash.remove(llp.getLocId());
        }
    }
}

From source file:com.almende.eve.ggdemo.DemoAgent.java

/**
 * Gets the lights./*from   w w  w.j av a  2  s . c o  m*/
 * 
 * @return the lights
 * @throws JSONRPCException
 *             the jSONRPC exception
 * @throws ClassNotFoundException
 *             the class not found exception
 * @throws InstantiationException
 *             the instantiation exception
 * @throws IllegalAccessException
 *             the illegal access exception
 * @throws InvocationTargetException
 *             the invocation target exception
 * @throws NoSuchMethodException
 *             the no such method exception
 * @throws IOException
 *             Signals that an I/O exception has occurred.
 */
public ObjectNode getLights() throws JSONRPCException, ClassNotFoundException, InstantiationException,
        IllegalAccessException, InvocationTargetException, NoSuchMethodException, IOException {
    ObjectNode result = JOM.createObjectNode();

    ArrayList<String> agents = getState().get("agents", new TypeUtil<ArrayList<String>>() {
    });
    if (agents != null) {
        boolean labels = agents.size() < 50;
        result.put("init", true);
        ArrayNode nodes = JOM.createArrayNode();
        ArrayNode edges = JOM.createArrayNode();
        int off = 0;
        int on = 0;
        HashSet<String> uniqueEdges = new HashSet<String>();
        for (String agent : agents) {
            ObjectNode node = JOM.createObjectNode();
            LampAgent lamp = (LampAgent) getAgentHost().getAgent(agent);
            if (lamp == null) {
                System.err.println("Warning, agent doesn't exists:" + agent);
                continue;
            }
            String id = lamp.getId().substring(4);
            Boolean isOn = lamp.isOn();
            if (isOn == null) {
                isOn = false;
            }
            if (isOn) {
                on++;
            } else {
                off++;
            }

            node.put("id", id);
            if (labels) {
                node.put("label", lamp.getId());
            }
            node.put("radius", 10);
            node.put("shape", "dot");
            node.put("group", isOn ? "On" : "Off");

            nodes.add(node);
            for (String other : lamp.getNeighbours()) {
                String otherId = other.substring(10);
                if (!uniqueEdges.contains(otherId + ":" + id)) {
                    ObjectNode edge = JOM.createObjectNode();
                    edge.put("from", id);
                    edge.put("to", otherId);
                    edges.add(edge);
                    uniqueEdges.add(id + ":" + otherId);
                }
            }
        }
        result.put("nodes", nodes);
        result.put("edges", edges);
        result.put("on", on);
        result.put("off", off);
    } else {
        result.put("init", false);
        result.put("nodes", JOM.createArrayNode());
        result.put("edges", JOM.createArrayNode());
        result.put("on", false);
        result.put("off", false);
    }
    return result;
}

From source file:com.espertech.esper.core.service.StatementLifecycleSvcImpl.java

private void validateScript(ExpressionScriptProvided script) throws ExprValidationException {
    String dialect = script.getOptionalDialect() == null
            ? services.getConfigSnapshot().getEngineDefaults().getScripts().getDefaultDialect()
            : script.getOptionalDialect();
    if (dialect == null) {
        throw new ExprValidationException("Failed to determine script dialect for script '" + script.getName()
                + "', please configure a default dialect or provide a dialect explicitly");
    }//from   ww  w.  ja va2  s . c o m
    if (dialect.trim().toLowerCase().equals("mvel")) {
        if (!MVELInvoker.isMVELInClasspath()) {
            throw new ExprValidationException(
                    "MVEL scripting engine not found in classpath, script dialect 'mvel' requires mvel in classpath for script '"
                            + script.getName() + "'");
        }
        MVELHelper.verifyScript(script);
    } else {
        JSR223Helper.verifyCompileScript(script, dialect);
    }

    if (!script.getParameterNames().isEmpty()) {
        HashSet<String> parameters = new HashSet<String>();
        for (String param : script.getParameterNames()) {
            if (parameters.contains(param)) {
                throw new ExprValidationException("Invalid script parameters for script '" + script.getName()
                        + "', parameter '" + param + "' is defined more then once");
            }
            parameters.add(param);
        }
    }
}

From source file:com.tesora.dve.sql.util.PEDDL.java

@Override
public void destroy(ConnectionResource mr) throws Throwable {
    if (mr == null)
        return;/*from www.  j  a v a2  s  .  c om*/

    TaggedLineInfo tli = new TaggedLineInfo(-1, null, -1, LineTag.DDL);
    // drop all the dbs
    for (DatabaseDDL dbddl : getDatabases())
        mr.execute(tli, dbddl.getDropStatement());
    HashSet<String> pgnames = new HashSet<String>();
    for (StorageGroupDDL sgddl : persGroups)
        pgnames.add(sgddl.getName());
    ResourceResponse rr = mr.fetch("show containers");
    List<ResultRow> results = rr.getResults();
    for (ResultRow row : results) {
        ResultColumn rc = row.getResultColumn(1);
        String contName = (String) rc.getColumnValue();
        mr.execute(tli, "drop container " + contName);
    }
    // now we have to find ranges that use our persistent group
    rr = mr.fetch("show ranges");
    results = rr.getResults();
    for (ResultRow row : results) {
        ResultColumn rangeNameColumn = row.getResultColumn(1);
        ResultColumn storageGroupColumn = row.getResultColumn(2);
        String rangeName = rangeNameColumn.getColumnValue().getClass().isArray()
                ? new String((byte[]) rangeNameColumn.getColumnValue())
                : (String) rangeNameColumn.getColumnValue();
        String groupName = storageGroupColumn.getColumnValue().getClass().isArray()
                ? new String((byte[]) storageGroupColumn.getColumnValue())
                : (String) storageGroupColumn.getColumnValue();
        if (pgnames.contains(groupName.trim())) {
            try {
                mr.execute(tli, "drop range " + rangeName + " persistent group " + groupName.trim());
            } catch (PEException e) {
                if (!throwDropRangeInUseException
                        && (StringUtils.containsIgnoreCase(e.getMessage(), "Unable to drop range")
                                && StringUtils.containsIgnoreCase(e.getMessage(), "because used by table"))) {
                    // eat the exception
                } else {
                    throw e;
                }
            }
        }
    }
    // now we can drop the persistent group, which apparently also drops the persistent sites
    for (StorageGroupDDL sgddl : persGroups) {
        sgddl.destroy(mr);
    }
}

From source file:freenet.client.ArchiveManager.java

private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element,
        ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context)
        throws ArchiveFailureException, ArchiveRestartException {
    if (logMINOR)
        Logger.minor(this, "Handling a TAR Archive");
    TarArchiveInputStream tarIS = null;/*  w ww.j a v a2  s .  c  o m*/
    try {
        tarIS = new TarArchiveInputStream(data);

        // MINOR: Assumes the first entry in the tarball is a directory.
        ArchiveEntry entry;

        byte[] buf = new byte[32768];
        HashSet<String> names = new HashSet<String>();
        boolean gotMetadata = false;

        outerTAR: while (true) {
            try {
                entry = tarIS.getNextEntry();
            } catch (IllegalArgumentException e) {
                // Annoyingly, it can throw this on some corruptions...
                throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
            }
            if (entry == null)
                break;
            if (entry.isDirectory())
                continue;
            String name = stripLeadingSlashes(entry.getName());
            if (names.contains(name)) {
                Logger.error(this, "Duplicate key " + name + " in archive " + key);
                continue;
            }
            long size = entry.getSize();
            if (name.equals(".metadata"))
                gotMetadata = true;
            if (size > maxArchivedFileSize && !name.equals(element)) {
                addErrorElement(
                        ctx, key, name, "File too big: " + size
                                + " greater than current archived file size limit " + maxArchivedFileSize,
                        true);
            } else {
                // Read the element
                long realLen = 0;
                Bucket output = tempBucketFactory.makeBucket(size);
                OutputStream out = output.getOutputStream();

                try {
                    int readBytes;
                    while ((readBytes = tarIS.read(buf)) > 0) {
                        out.write(buf, 0, readBytes);
                        readBytes += realLen;
                        if (readBytes > maxArchivedFileSize) {
                            addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize
                                    + " greater than current archived file size limit " + maxArchivedFileSize,
                                    true);
                            out.close();
                            out = null;
                            output.free();
                            continue outerTAR;
                        }
                    }

                } finally {
                    if (out != null)
                        out.close();
                }
                if (size <= maxArchivedFileSize) {
                    addStoreElement(ctx, key, name, output, gotElement, element, callback, context);
                    names.add(name);
                    trimStoredData();
                } else {
                    // We are here because they asked for this file.
                    callback.gotBucket(output, context);
                    gotElement.value = true;
                    addErrorElement(
                            ctx, key, name, "File too big: " + size
                                    + " greater than current archived file size limit " + maxArchivedFileSize,
                            true);
                }
            }
        }

        // If no metadata, generate some
        if (!gotMetadata) {
            generateMetadata(ctx, key, names, gotElement, element, callback, context);
            trimStoredData();
        }
        if (throwAtExit)
            throw new ArchiveRestartException("Archive changed on re-fetch");

        if ((!gotElement.value) && element != null)
            callback.notInArchive(context);

    } catch (IOException e) {
        throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
    } finally {
        Closer.close(tarIS);
    }
}