List of usage examples for java.util LinkedList addLast
public void addLast(E e)
From source file:com.jaspersoft.jasperserver.api.metadata.common.service.impl.hibernate.HibernateRepositoryServiceImpl.java
protected void refreshFolderPaths(RepoFolder folder) { // refreshing recursively using a queue LinkedList<RepoFolder> folders = new LinkedList<RepoFolder>(); folders.addLast(folder); while (!folders.isEmpty()) { RepoFolder aFolder = folders.removeFirst(); aFolder.refreshURI(this); Set resources = aFolder.getChildren(); if (resources != null && !resources.isEmpty()) { for (Iterator it = resources.iterator(); it.hasNext();) { RepoResource child = (RepoResource) it.next(); RepoFolder grandChildrenFolder = child.getChildrenFolder(); if (grandChildrenFolder != null) { folders.addLast(grandChildrenFolder); }//from w ww.java 2s. co m } } } }
From source file:it.cnr.icar.eric.client.admin.function.Cp.java
/** * Load the contents of baseDir into rp using pathname as base for * locators of loaded objects.//from w ww. ja v a 2s . c om * * @param baseDir Directory in local file system from which to load * @param rp Existing RegistryPackage to which to add */ protected void scanDir(File baseDir, RegistryPackage rootRP) throws Exception { ArrayList<RegistryObject> repositoryObjects = new ArrayList<RegistryObject>(); LinkedList<DirInfo> dirInfoList = new LinkedList<DirInfo>(); dirInfoList.add(new DirInfo(baseDir, rootRP)); /* * Loop through the list of directories (and corresponding * RegistryPackages and pathnames). Child directories of * curDir are added to the end of the list, so the list isn't * finished until all descendant directories have been * processed. */ while (!dirInfoList.isEmpty()) { DirInfo curDirInfo = dirInfoList.removeFirst(); File curDir = curDirInfo.getDir(); RegistryPackage curRP = curDirInfo.getRegistryPackage(); if (!curDir.exists()) { throw new AdminException(format(rb, "nonexistentLocalDir", new Object[] { curDir })); } if (!curDir.isDirectory()) { throw new AdminException(format(rb, "nondirectoryLocalDir", new Object[] { curDir })); } if (!curDir.canRead()) { throw new AdminException(format(rb, "unreadableLocalDir", new Object[] { curDir })); } File[] childFiles = curDir.listFiles(); for (int i = 0; i < childFiles.length; i++) { String childName = childFiles[i].getName(); boolean canInclude = checkIncludesExcludes(childName); RegistryObject childObject; if (!canInclude) { if (verbose || debug) { context.printMessage(format(rb, "notIncluding", new Object[] { childFiles[i] })); } continue; } if (childFiles[i].isFile()) { if (verbose || debug) { context.printMessage(format(rb, "including", new Object[] { "ExtrinsicObject", childFiles[i], childName })); } childObject = context.getService().createExtrinsicObject(childFiles[i]); } else if (childFiles[i].isDirectory()) { if (verbose || debug) { context.printMessage(format(rb, "including", new Object[] { "RegistryPackage", childFiles[i], childName })); } childObject = context.getService().createRegistryPackage(childName); dirInfoList.addLast(new DirInfo(childFiles[i], (RegistryPackage) childObject)); } else { childObject = null; throw new AdminException(format(rb, "notFileOrDir", new Object[] { childFiles[i] })); } if (curRP != null) { curRP.addRegistryObject(childObject); } repositoryObjects.add(childObject); } } if (!repositoryObjects.isEmpty()) { if (rootRP != null) { repositoryObjects.add(rootRP); } BulkResponse response = ((LifeCycleManagerImpl) context.getService().getLCM()) .saveObjects(repositoryObjects, saveObjectsSlots); JAXRUtility.checkBulkResponse(response); } }
From source file:org.gcaldaemon.core.ldap.ContactLoader.java
public ContactLoader(ThreadGroup mainGroup, Configurator configurator) throws Exception { super(mainGroup, "Contact loader"); this.configurator = configurator; this.vcardDirectory = new File(configurator.getWorkDirectory(), "vcard"); if (!vcardDirectory.isDirectory()) { vcardDirectory.mkdirs();/* w ww. j ava 2 s .c o m*/ } // Acceptable hostnames FilterMask[] hosts = configurator.getFilterProperty(Configurator.LDAP_ALLOWED_HOSTNAMES); // Acceptable TCP/IP addresses FilterMask[] addresses = configurator.getFilterProperty(Configurator.LDAP_ALLOWED_ADDRESSES); // Get contact list cache timeout long timeout = configurator.getConfigProperty(Configurator.LDAP_CACHE_TIMEOUT, 3600000L); if (timeout < 180000L) { log.warn("The fastest contact list polling period is '3 min'!"); timeout = 180000L; } pollingTimeout = timeout; // Get username/password pairs LinkedList usernameList = new LinkedList(); LinkedList passwordList = new LinkedList(); String parameterPostfix; int gapCounter = 0; for (int i = 1;; i++) { // Create parameter postfix [..n] if (i == 1) { parameterPostfix = ""; } else { parameterPostfix = Integer.toString(i); } if (configurator.getConfigProperty(Configurator.LDAP_GOOGLE_USERNAME + parameterPostfix, null) == null) { if (gapCounter < MAX_INDEX_GAP) { gapCounter++; continue; } break; } gapCounter = 0; // Get username String username = configurator.getConfigProperty(Configurator.LDAP_GOOGLE_USERNAME + parameterPostfix, null); // Get password String password = null; if (configurator.getConfigProperty(Configurator.LDAP_GOOGLE_PASSWORD + parameterPostfix, null) != null) { password = configurator.getPasswordProperty(Configurator.LDAP_GOOGLE_PASSWORD + parameterPostfix); } // Verify parameters if (username == null) { throw new NullPointerException( "Missing username (" + Configurator.LDAP_GOOGLE_USERNAME + parameterPostfix + ")!"); } if (password == null) { throw new NullPointerException( "Missing password (" + Configurator.LDAP_GOOGLE_PASSWORD + parameterPostfix + ")!"); } // Add parameters to lists usernameList.addLast(username); passwordList.addLast(password); } // Create object arrays usernames = new String[usernameList.size()]; passwords = new String[passwordList.size()]; usernameList.toArray(usernames); passwordList.toArray(passwords); if (hosts == null && addresses == null) { // Security warning log.warn("Set the '" + Configurator.LDAP_ALLOWED_HOSTNAMES + "' parameter to limit remote access."); } else { // Debug filters if (log.isDebugEnabled()) { log.debug("Allowed LDAP hosts: " + configurator.getConfigProperty(Configurator.LDAP_ALLOWED_HOSTNAMES, "*")); log.debug("Allowed LDAP addresses: " + configurator.getConfigProperty(Configurator.LDAP_ALLOWED_ADDRESSES, "*")); } } // Get vCard properties String value = configurator.getConfigProperty(Configurator.LDAP_VCARD_ENCODING, "quoted"); if (value.equals("quoted")) { vcardEncoding = VCARD_QUOTED_ENCODING; } else { if (value.equals("native")) { vcardEncoding = VCARD_NATIVE_ENCODING; } else { vcardEncoding = VCARD_UTF8_ENCODING; } } value = configurator.getConfigProperty(Configurator.LDAP_VCARD_VERSION, "3.0"); try { double num = Double.parseDouble(value); vcardVersion = Double.toString(num); } catch (Exception formatError) { log.fatal("Invalid vCard version: " + value); throw formatError; } // Create and start LDAP listener int port = (int) configurator.getConfigProperty(Configurator.LDAP_PORT, 9080); ldapListener = new LDAPListener(this, hosts, addresses, port); // Start listener start(); }
From source file:org.apache.activemq.leveldb.test.ReplicatedLevelDBStoreTest.java
@Test(timeout = 1000 * 60 * 10) public void testReplication() throws Exception { LinkedList<File> directories = new LinkedList<File>(); directories.add(new File("target/activemq-data/leveldb-node1")); directories.add(new File("target/activemq-data/leveldb-node2")); directories.add(new File("target/activemq-data/leveldb-node3")); resetDirectories(directories);/*from w w w. ja v a2 s . c o m*/ // For some reason this had to be 64k to trigger a bug where // slave index snapshots were being done incorrectly. String playload = createPlayload(64 * 1024); ArrayList<String> expected_list = new ArrayList<String>(); // We will rotate between 3 nodes the task of being the master. for (int j = 0; j < 5; j++) { MasterLevelDBStore master = createMaster(directories.get(0)); CountDownFuture masterStart = asyncStart(master); SlaveLevelDBStore slave1 = createSlave(master, directories.get(1)); SlaveLevelDBStore slave2 = createSlave(master, directories.get(2)); asyncStart(slave2); masterStart.await(); if (j == 0) { stores.add(master); stores.add(slave1); stores.add(slave2); } MessageStore ms = master.createQueueMessageStore(new ActiveMQQueue("TEST")); LOG.info("Checking: " + master.getDirectory()); assertEquals(expected_list, getMessages(ms)); LOG.info("Adding messages..."); final int TOTAL = 500; for (int i = 0; i < TOTAL; i++) { if (i % ((int) (TOTAL * 0.10)) == 0) { LOG.info("" + (100 * i / TOTAL) + "% done"); } if (i == 250) { slave1.start(); slave2.stop(); LOG.info("Checking: " + master.getDirectory()); assertEquals(expected_list, getMessages(ms)); } String msgid = "m:" + j + ":" + i; addMessage(ms, msgid, playload); expected_list.add(msgid); } LOG.info("Checking: " + master.getDirectory()); assertEquals(expected_list, getMessages(ms)); LOG.info("Stopping master: " + master.getDirectory()); master.stop(); Thread.sleep(3 * 1000); LOG.info("Stopping slave: " + slave1.getDirectory()); slave1.stop(); // Rotate the dir order so that slave1 becomes the master next. directories.addLast(directories.removeFirst()); } }
From source file:org.commonjava.maven.ext.io.PomIO.java
private List<PomPeek> peekAtPomHierarchy(final File topPom) throws ManipulationException { final List<PomPeek> peeked = new ArrayList<>(); try {/*from w w w . j a v a 2s .co m*/ final LinkedList<File> pendingPoms = new LinkedList<>(); pendingPoms.add(topPom.getCanonicalFile()); final String topDir = topPom.getAbsoluteFile().getParentFile().getCanonicalPath(); final Set<File> seen = new HashSet<>(); File topLevelParent = topPom; while (!pendingPoms.isEmpty()) { final File pom = pendingPoms.removeFirst(); seen.add(pom); logger.debug("PEEK: " + pom); final PomPeek peek = new PomPeek(pom); final ProjectVersionRef key = peek.getKey(); if (key != null) { peeked.add(peek); final File dir = pom.getParentFile(); final String relPath = peek.getParentRelativePath(); if (relPath != null) { logger.debug("Found parent relativePath: " + relPath + " in pom: " + pom); File parent = new File(dir, relPath); if (parent.isDirectory()) { parent = new File(parent, "pom.xml"); } parent = parent.getCanonicalFile(); if (parent.getParentFile().getCanonicalPath().startsWith(topDir) && parent.exists() && !seen.contains(parent) && !pendingPoms.contains(parent)) { topLevelParent = parent; logger.debug("Possible top level parent " + parent); pendingPoms.add(parent); } else { logger.debug("Skipping reference to non-existent parent relativePath: '" + relPath + "' in: " + pom); } } final Set<String> modules = peek.getModules(); if (modules != null && !modules.isEmpty()) { for (final String module : modules) { logger.debug("Found module: " + module + " in pom: " + pom); File modPom = new File(dir, module); if (modPom.isDirectory()) { modPom = new File(modPom, "pom.xml"); } if (modPom.exists() && !seen.contains(modPom) && !pendingPoms.contains(modPom)) { pendingPoms.addLast(modPom); } else { logger.debug( "Skipping reference to non-existent module: '" + module + "' in: " + pom); } } } } else { logger.debug("Skipping " + pom + " as its a template file."); } } final HashSet<ProjectVersionRef> projectrefs = new HashSet<>(); for (final PomPeek p : peeked) { projectrefs.add(p.getKey()); if (p.getPom().equals(topLevelParent)) { logger.debug("Setting top level parent to " + p.getPom() + " :: " + p.getKey()); p.setInheritanceRoot(true); } } for (final PomPeek p : peeked) { if (p.getParentKey() == null || !seenThisParent(projectrefs, p.getParentKey())) { logger.debug("Found a standalone pom " + p.getPom() + " :: " + p.getKey()); p.setInheritanceRoot(true); } } } catch (final IOException e) { throw new ManipulationException("Problem peeking at POMs.", e); } return peeked; }
From source file:org.deeplearning4j.nn.multilayer.MultiLayerNetwork.java
/** Equivalent to backprop(), but calculates gradient for truncated BPTT instead. */ protected void truncatedBPTTGradient() { if (flattenedGradients == null) initGradientsView();// ww w. j a va 2s .c om String multiGradientKey; gradient = new DefaultGradient(); Layer currLayer; if (!(getOutputLayer() instanceof IOutputLayer)) { log.warn( "Warning: final layer isn't output layer. You cannot use backprop (truncated BPTT) without an output layer."); return; } IOutputLayer outputLayer = (IOutputLayer) getOutputLayer(); if (labels == null) throw new IllegalStateException("No labels found"); if (outputLayer.conf().getLayer().getWeightInit() == WeightInit.ZERO) { throw new IllegalStateException( "Output layer weights cannot be initialized to zero when using backprop."); } outputLayer.setLabels(labels); //calculate and apply the backward gradient for every layer int numLayers = getnLayers(); //Store gradients is a list; used to ensure iteration order in DefaultGradient linked hash map. i.e., layer 0 first instead of output layer LinkedList<Pair<String, INDArray>> gradientList = new LinkedList<>(); Pair<Gradient, INDArray> currPair = outputLayer.backpropGradient(null); for (Map.Entry<String, INDArray> entry : currPair.getFirst().gradientForVariable().entrySet()) { multiGradientKey = String.valueOf(numLayers - 1) + "_" + entry.getKey(); gradientList.addLast(new Pair<>(multiGradientKey, entry.getValue())); } if (getLayerWiseConfigurations().getInputPreProcess(numLayers - 1) != null) currPair = new Pair<>(currPair.getFirst(), this.layerWiseConfigurations .getInputPreProcess(numLayers - 1).backprop(currPair.getSecond(), getInputMiniBatchSize())); // Calculate gradients for previous layers & drops output layer in count for (int j = numLayers - 2; j >= 0; j--) { currLayer = getLayer(j); if (currLayer instanceof RecurrentLayer) { currPair = ((RecurrentLayer) currLayer).tbpttBackpropGradient(currPair.getSecond(), layerWiseConfigurations.getTbpttBackLength()); } else { currPair = currLayer.backpropGradient(currPair.getSecond()); } LinkedList<Pair<String, INDArray>> tempList = new LinkedList<>(); for (Map.Entry<String, INDArray> entry : currPair.getFirst().gradientForVariable().entrySet()) { multiGradientKey = String.valueOf(j) + "_" + entry.getKey(); tempList.addFirst(new Pair<>(multiGradientKey, entry.getValue())); } for (Pair<String, INDArray> pair : tempList) gradientList.addFirst(pair); //Pass epsilon through input processor before passing to next layer (if applicable) if (getLayerWiseConfigurations().getInputPreProcess(j) != null) currPair = new Pair<>(currPair.getFirst(), getLayerWiseConfigurations().getInputPreProcess(j) .backprop(currPair.getSecond(), getInputMiniBatchSize())); } //Add gradients to Gradients, in correct order for (Pair<String, INDArray> pair : gradientList) gradient.setGradientFor(pair.getFirst(), pair.getSecond()); }
From source file:com.commander4j.db.JDBUserReport.java
public LinkedList<JDBListData> getUserReportIds() { LinkedList<JDBListData> groupUserReportList = new LinkedList<JDBListData>(); Statement stmt;/*www .ja va 2 s . co m*/ ResultSet rs; setErrorMessage(""); Icon icon = new ImageIcon(); int index = 0; boolean show = false; try { stmt = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).createStatement(); stmt.setFetchSize(250); rs = stmt.executeQuery(Common.hostList.getHost(getHostID()).getSqlstatements() .getSQL("JDBUserReport.getUserReportIDs")); while (rs.next()) { // TO DO // IF user is private check username and group membership. if (adminUser) { show = true; } else { show = false; if (rs.getString("PRIVATE").equals("N")) { show = true; } else { if (rs.getString("USER_ID").equals(Common.userList.getUser(getSessionID()).getUserId())) { show = true; } ugm.setUserId(Common.userList.getUser(getSessionID()).getUserId()); ugm.setGroupId(rs.getString("GROUP_ID")); if (ugm.isValidUserGroupMembership()) { show = true; } } } if (show == true) { JDBUserReport ur = new JDBUserReport(getHostID(), getSessionID()); ur.getPropertiesfromResultSet(rs); icon = getUserReportIcon(rs.getString("ENABLED"), rs.getString("DESTINATION")); JDBListData mld = new JDBListData(icon, index, true, ur); groupUserReportList.addLast(mld); } } rs.close(); stmt.close(); } catch (SQLException e) { setErrorMessage(e.getMessage()); } return groupUserReportList; }
From source file:com.redhat.rcm.version.mgr.VersionManager.java
protected List<PomPeek> peekAtPomHierarchy(final File topPom, final VersionManagerSession session) throws IOException { final LinkedList<File> pendingPoms = new LinkedList<File>(); pendingPoms.add(topPom.getCanonicalFile()); final String topDir = topPom.getParentFile().getCanonicalPath(); final Set<File> seen = new HashSet<File>(); final List<PomPeek> peeked = new ArrayList<PomPeek>(); while (!pendingPoms.isEmpty()) { final File pom = pendingPoms.removeFirst(); seen.add(pom);/*from w w w . j a v a2s .co m*/ logger.info("PEEK: " + pom); final PomPeek peek = new PomPeek(pom); final FullProjectKey key = peek.getKey(); if (key != null) { session.addPeekPom(key, pom); peeked.add(peek); final File dir = pom.getParentFile(); final String relPath = peek.getParentRelativePath(); if (relPath != null) { logger.info("Found parent relativePath: " + relPath + " in pom: " + pom); File parent = new File(dir, relPath); if (parent.isDirectory()) { parent = new File(parent, "pom.xml"); } logger.info("Looking for parent POM: " + parent); parent = parent.getCanonicalFile(); if (parent.getParentFile().getCanonicalPath().startsWith(topDir) && parent.exists() && !seen.contains(parent) && !pendingPoms.contains(parent)) { pendingPoms.add(parent); } else { logger.info("Skipping reference to non-existent parent relativePath: '" + relPath + "' in: " + pom); } } final Set<String> modules = peek.getModules(); if (modules != null && !modules.isEmpty()) { for (final String module : modules) { logger.info("Found module: " + module + " in pom: " + pom); File modPom = new File(dir, module); if (modPom.isDirectory()) { modPom = new File(modPom, "pom.xml"); } logger.info("Looking for module POM: " + modPom); if (modPom.getParentFile().getCanonicalPath().startsWith(topDir) && modPom.exists() && !seen.contains(modPom) && !pendingPoms.contains(modPom)) { pendingPoms.addLast(modPom); } else { logger.info("Skipping reference to non-existent module: '" + module + "' in: " + pom); } } } } else { logger.info("Skipping " + pom + " as its a template file."); } } return peeked; }
From source file:org.guzz.builder.GuzzConfigFileBuilder.java
public List listDBGroups() { /*//from w w w .j ava 2 s.com <tran> <dbgroup name="default" masterDBConfigName="masterDB" slaveDBConfigName="slaveDB" dialectName="mysql5dialect" /> <dbgroup name="activeLog" masterDBConfigName="masterLogDB" defaultDialect="h2dialect" /> <virtualdbgroup name="log" dialectName="h2dialect" shadow="xxx.VirtualDBGroupView"> <dbgroup name="log.old.1" masterDBConfigName="masterLogDB2" /> <dbgroup name="log.old.2" masterDBConfigName="masterLogDB3" /> <dbgroup name="log.old.3" masterDBConfigName="masterLogDB4" /> </virtualdbgroup> </tran> */ LinkedList dbGroups = new LinkedList(); List rootDBGroups = parseForPhysicsDBGroup(this.rootDoc.selectNodes("tran/dbgroup"), "default"); if (rootDBGroups != null) { dbGroups.addAll(rootDBGroups); } //Load virtual dbGroup List vss = this.rootDoc.selectNodes("tran/virtualdbgroup"); if (vss != null && !vss.isEmpty()) { for (int i = 0; i < vss.size(); i++) { Element e = (Element) vss.get(i); VirtualDBGroup db = new VirtualDBGroup(); String groupName = e.attributeValue("name"); String dialectName = e.attributeValue("dialectName"); String shadow = e.attributeValue("shadow"); if (StringUtil.isEmpty(groupName)) { db.setGroupName("default"); } else { db.setGroupName(groupName); } if (StringUtil.isEmpty(dialectName)) { dialectName = "default"; } Dialect dt = this.gf.getDialect(dialectName); if (dt == null) { throw new InvalidConfigurationException( "dialect:[" + dialectName + "] not found for dbgroup:[" + e.asXML() + "]"); } db.setDialect(dt); //shadow if (StringUtil.isEmpty(shadow)) { throw new InvalidConfigurationException( "missing attribute [shadow] in virtualdbgroup:[" + e.asXML() + "]"); } Object vv = BeanCreator.newBeanInstance(shadow); if (vv instanceof VirtualDBView) { VirtualDBView vdv = (VirtualDBView) vv; vdv.setConfiguredVirtualDBGroup(db); this.gf.registerVirtualDBView(vdv); db.setVirtualDBGroupView(vdv); } else { throw new InvalidConfigurationException("attribute [shadow] must be a subclass of + " + VirtualDBView.class.getName() + " for virtualdbgroup:[" + e.asXML() + "]"); } dbGroups.addLast(db); //Load virtualdbgroup's sub dbgroup. List subDBGroups = parseForPhysicsDBGroup(e.selectNodes("dbgroup"), dialectName); if (subDBGroups != null) { dbGroups.addAll(subDBGroups); } } } return dbGroups; }
From source file:com.zimbra.cs.service.mail.ToXML.java
private static void addParts(Element root, MPartInfo mpiRoot, Set<MPartInfo> bodies, String prefix, int maxSize, boolean neuter, boolean excludeCalendarParts, String defaultCharset, boolean swallowContentExceptions, MsgContent wantContent) throws ServiceException { MPartInfo mpi = mpiRoot;/*from ww w. j a v a 2s. c om*/ LinkedList<Pair<Element, LinkedList<MPartInfo>>> queue = new LinkedList<Pair<Element, LinkedList<MPartInfo>>>(); Pair<Element, LinkedList<MPartInfo>> level = new Pair<Element, LinkedList<MPartInfo>>(root, new LinkedList<MPartInfo>()); level.getSecond().add(mpi); queue.add(level); VisitPhase phase = VisitPhase.PREVISIT; while (!queue.isEmpty()) { level = queue.getLast(); LinkedList<MPartInfo> parts = level.getSecond(); if (parts.isEmpty()) { queue.removeLast(); phase = VisitPhase.POSTVISIT; continue; } mpi = parts.getFirst(); Element child = addPart(phase, level.getFirst(), root, mpi, bodies, prefix, maxSize, neuter, excludeCalendarParts, defaultCharset, swallowContentExceptions, wantContent); if (phase == VisitPhase.PREVISIT && child != null && mpi.hasChildren()) { queue.addLast(new Pair<Element, LinkedList<MPartInfo>>(child, new LinkedList<MPartInfo>(mpi.getChildren()))); } else { parts.removeFirst(); phase = VisitPhase.PREVISIT; } } }