List of usage examples for java.util Vector elements
public Enumeration<E> elements()
From source file:com.idega.slide.util.WebdavLocalResource.java
@SuppressWarnings("deprecation") private Enumeration<LocalResponse> propfindMethod(NodeRevisionDescriptor descriptor) throws HttpException, IOException { if (descriptor == null) { return null; }// w ww. j ava2 s . c o m if (properties != null) { return properties; } try { Vector<LocalResponse> responses = new Vector<LocalResponse>(); LocalResponse response = new LocalResponse(); response.setHref(getPath()); responses.add(response); @SuppressWarnings("unchecked") List<NodeProperty> nodeProperties = Collections.list(descriptor.enumerateProperties()); List<Property> properties = new ArrayList<Property>(); for (NodeProperty p : nodeProperties) { String localName = p.getPropertyName().getName(); Property property = null; if (localName.equals(RESOURCETYPE)) { Object oValue = p.getValue(); String value = oValue == null ? null : oValue.toString(); Element element = null; if ("<collection/>".equals(value)) { element = getCollectionElement(); } else if (CoreConstants.EMPTY.equals(value)) { element = getEmptyElement(); } else { Document doc = XmlUtil.getDocumentBuilder().newDocument(); String namespace = p.getNamespace(); String tagName = p.getName(); element = doc.createElementNS(namespace, tagName); element.appendChild(doc.createTextNode(value)); } property = new ResourceTypeProperty(response, element); } else if (localName.equals(LOCKDISCOVERY)) { /*DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.newDocument(); Element element = doc.createElement("collection"); property = new LockDiscoveryProperty(response,element);*/ throw new RuntimeException("LockDiscoveryProperty not yet implemented for: " + getPath()); } else if (CREATIONDATE.equals(localName)) { setCreationDate((String) p.getValue()); } else if (GETLASTMODIFIED.equals(localName)) { setGetLastModified((String) p.getValue()); } else { LocalProperty lProperty = new LocalProperty(response); property = lProperty; lProperty.setName(p.getName()); lProperty.setNamespaceURI(p.getNamespace()); lProperty.setLocalName(p.getName()); Object oValue = p.getValue(); String value = oValue == null ? null : oValue.toString(); lProperty.setPropertyAsString(value); } if (property != null) { properties.add(property); } } if (!ListUtil.isEmpty(properties)) { response.setProperties(new Vector<Property>(properties)); } this.properties = responses.elements(); if (this.properties != null) { setProperties(3, 0); // Need to set basic properties } return this.properties; } catch (Exception e) { LOGGER.log(Level.WARNING, "Error getting properties for: " + getPath() + ": " + e.getMessage(), e); if (e instanceof ObjectNotFoundException) { getSlideAPI().deletetDefinitionFile(((ObjectNotFoundException) e).getObjectUri()); HttpException he = new HttpException("Resource on path: " + getPath() + " not found"); he.setReasonCode(WebdavStatus.SC_NOT_FOUND); throw he; } else if (e instanceof RevisionDescriptorNotFoundException) { getSlideAPI().deletetDefinitionFile(((RevisionDescriptorNotFoundException) e).getObjectUri()); } return null; } }
From source file:com.sshtools.common.ui.SshToolsApplicationPanel.java
/** * Rebuild all the action components such as toobar, context menu *///from w ww . ja v a 2 s. com public void rebuildActionComponents() { // Clear the current state of the component log.debug("Rebuild action components"); toolBar.removeAll(); // Vector enabledActions = new Vector(); for (Iterator i = actions.iterator(); i.hasNext();) { StandardAction a = (StandardAction) i.next(); String n = (String) a.getValue(Action.NAME); Boolean s = (Boolean) actionsVisible.get(n); if (s == null) { s = Boolean.TRUE; } if (Boolean.TRUE.equals(s)) { log.debug("Action " + n + " is enabled."); enabledActions.add(a); } else { log.debug("Action " + n + " not enabled."); } } // Build the tool bar, grouping the actions Vector v = new Vector(); for (Iterator i = enabledActions.iterator(); i.hasNext();) { StandardAction a = (StandardAction) i.next(); if (Boolean.TRUE.equals((Boolean) a.getValue(StandardAction.ON_TOOLBAR))) { v.addElement(a); } } Collections.sort(v, new ToolBarActionComparator()); Integer grp = null; for (Iterator i = v.iterator(); i.hasNext();) { StandardAction z = (StandardAction) i.next(); if ((grp != null) && !grp.equals((Integer) z.getValue(StandardAction.TOOLBAR_GROUP))) { toolBar.add(new ToolBarSeparator()); } if (Boolean.TRUE.equals((Boolean) z.getValue(StandardAction.IS_TOGGLE_BUTTON))) { ToolToggleButton tBtn = new ToolToggleButton(z); toolBar.add(tBtn); } else { ToolButton btn = new ToolButton(z); toolBar.add(btn); } grp = (Integer) z.getValue(StandardAction.TOOLBAR_GROUP); } toolBar.revalidate(); toolBar.repaint(); // Build the context menu, grouping the actions Vector c = new Vector(); contextMenu.removeAll(); for (Iterator i = enabledActions.iterator(); i.hasNext();) { StandardAction a = (StandardAction) i.next(); if (Boolean.TRUE.equals((Boolean) a.getValue(StandardAction.ON_CONTEXT_MENU))) { c.addElement(a); } } Collections.sort(c, new ContextActionComparator()); grp = null; for (Iterator i = c.iterator(); i.hasNext();) { StandardAction z = (StandardAction) i.next(); if ((grp != null) && !grp.equals((Integer) z.getValue(StandardAction.CONTEXT_MENU_GROUP))) { contextMenu.addSeparator(); } contextMenu.add(z); grp = (Integer) z.getValue(StandardAction.CONTEXT_MENU_GROUP); } contextMenu.revalidate(); // Build the menu bar menuBar.removeAll(); v.removeAllElements(); for (Enumeration e = enabledActions.elements(); e.hasMoreElements();) { StandardAction a = (StandardAction) e.nextElement(); if (Boolean.TRUE.equals((Boolean) a.getValue(StandardAction.ON_MENUBAR))) { v.addElement(a); } } Vector menus = (Vector) actionMenus.clone(); Collections.sort(menus); HashMap map = new HashMap(); for (Iterator i = v.iterator(); i.hasNext();) { StandardAction z = (StandardAction) i.next(); String menuName = (String) z.getValue(StandardAction.MENU_NAME); if (menuName == null) { log.error("Action " + z.getName() + " doesnt specify a value for " + StandardAction.MENU_NAME); } else { String m = (String) z.getValue(StandardAction.MENU_NAME); ActionMenu menu = getActionMenu(menus.iterator(), m); if (menu == null) { log.error("Action menu " + z.getName() + " does not exist"); } else { Vector x = (Vector) map.get(menu.name); if (x == null) { x = new Vector(); map.put(menu.name, x); } x.addElement(z); } } } for (Iterator i = menus.iterator(); i.hasNext();) { ActionMenu m = (ActionMenu) i.next(); Vector x = (Vector) map.get(m.name); if (x != null) { Collections.sort(x, new MenuItemActionComparator()); JMenu menu = new JMenu(m.displayName); menu.setMnemonic(m.mnemonic); grp = null; for (Iterator j = x.iterator(); j.hasNext();) { StandardAction a = (StandardAction) j.next(); Integer g = (Integer) a.getValue(StandardAction.MENU_ITEM_GROUP); if ((grp != null) && !g.equals(grp)) { menu.addSeparator(); } grp = g; if (a instanceof MenuAction) { JMenu mnu = (JMenu) a.getValue(MenuAction.MENU); menu.add(mnu); } else { JMenuItem item = new JMenuItem(a); menu.add(item); } } menuBar.add(menu); } else { log.error("Can't find menu " + m.name); } } menuBar.validate(); menuBar.repaint(); }
From source file:Debrief.Tools.FilterOperations.ShowTimeVariablePlot3.java
/** * Collate the data points to plot// ww w .j a v a2 s . c o m * * @param primaryTrack * the primary track * @param myOperation * the calculation we're making * @param theTracks * the selected set of tracks * @param start_time * the start time selected * @param end_time * the end time selected * @param provider * the provider of the time offset used when plotting time-zero data * @return the dataset to plot * @see toteCalculation#isWrappableData * @see toteCalculation#calculate(Watchable primary,Watchable * secondary,HiResDate thisTime) * @see Debrief.Tools.FilterOperations.ShowTimeVariablePlot3.CalculationHolder#isARelativeCalculation * @see WatchableList#getItemsBetween(HiResDate start,HiResDate end) * @see TimeSeriesCollection#addSeries(BasicTimeSeries series) */ public static AbstractSeriesDataset getDataSeries(final WatchableList primaryTrack, final CalculationHolder myOperation, final Vector<WatchableList> theTracks, final HiResDate start_time, final HiResDate end_time, final ColouredDataItem.OffsetProvider provider) { final toteCalculation theCalculation = myOperation._theCalc; AbstractSeriesDataset theSeriesCollection = null; // ok, now collate the data VersatileSeriesAdder theAdder = null; // sort out the adder for what we're doing if (HiResDate.inHiResProcessingMode()) { theSeriesCollection = new TimeSeriesCollection(); theAdder = new VersatileSeriesAdder() { public void add(final Series thisSeries, final HiResDate theTime, final double data, final Color thisColor, final boolean connectToPrevious, final ColouredDataItem.OffsetProvider provider1) { // HI-RES NOT DONE - FixedMillisecond should be converted // some-how to // FixedMicroSecond final TimeSeriesDataItem newItem = new ColouredDataItem( new FixedMillisecond((long) (theTime.getMicros() / 1000d)), data, thisColor, connectToPrevious, provider1); // To change body of implemented methods use File | Settings // | File // Templates. final TimeSeries theSeries = (TimeSeries) thisSeries; theSeries.add(newItem); } public void addSeries(final AbstractSeriesDataset collection, final Series thisSeries, final Color defaultColor) { final TimeSeriesCollection coll = (TimeSeriesCollection) collection; coll.addSeries((TimeSeries) thisSeries); } }; } else { theSeriesCollection = new TimeSeriesCollection(); // right, just working with normal dates theAdder = new VersatileSeriesAdder() { public void add(final Series thisSeries, final HiResDate theTime, final double data, final Color thisColor, final boolean connectToPrevious, final ColouredDataItem.OffsetProvider provider1) { // HI-RES NOT DONE - FixedMillisecond should be converted // some-how to // FixedMicroSecond final ColouredDataItem newItem = new ColouredDataItem( new FixedMillisecond(theTime.getDate().getTime()), data, thisColor, connectToPrevious, provider1); // To change body of implemented methods use File | Settings // | File // Templates. final TimeSeries theSeries = (TimeSeries) thisSeries; theSeries.add(newItem); } public void addSeries(final AbstractSeriesDataset collection, final Series thisSeries, final Color defaultColor) { final TimeSeriesCollection coll = (TimeSeriesCollection) collection; coll.addSeries((TimeSeries) thisSeries); } }; } // calculate the data variables for our tracks final Enumeration<WatchableList> iter = theTracks.elements(); while (iter.hasMoreElements()) { final WatchableList thisSecondaryTrack = (WatchableList) iter.nextElement(); // is this a relative calculation? if (myOperation.isARelativeCalculation()) { // yes, but we don't bother with the primary track, see if this // is it if (thisSecondaryTrack == primaryTrack) { // just double check that we have primary data final Collection<Editable> ss = thisSecondaryTrack.getItemsBetween(start_time, end_time); if (ss == null) { Application.logError2(ToolParent.WARNING, "Insufficient points found in primary track." + "\nPlease check coverage of time controller bars", null); return null; } // drop out, and wait for the next cycle continue; } } // //////////////////////////////////////////////////// // step through the track // final Collection<Editable> ss = thisSecondaryTrack.getItemsBetween(start_time, end_time); // indicator for whether we join this data point to the previous one boolean connectToPrevious = false; // have we found any?. Hey, listen here. The "getItemsBetween" // method may return data items, but we may still not be able to do the calc // (such as if we have "NaN" for depth). So we still do a sanity check // at the end of this method to stop us adding empty data series to the collection. if (ss == null) { Application.logError2(ToolParent.WARNING, "Insufficient points found in primary track." + "\nPlease check coverage of time controller bars", null); return null; } else { // remember the default color for this series Color seriesColor; // ok, now collate the data Series thisSeries = null; // sort out the adder for what we're doing if (HiResDate.inHiResProcessingMode()) { thisSeries = new XYSeries(thisSecondaryTrack.getName()); } else { thisSeries = new TimeSeries(thisSecondaryTrack.getName()); } seriesColor = thisSecondaryTrack.getColor(); // split into separate processing here, depending on where we're // looking // at a relative calculation if (myOperation.isARelativeCalculation()) { // yes, it is a relative calculation. // Find out if it's a special case (where we don't have time // data) if (thisSecondaryTrack.getStartDTG() == null) { // do we have any primary data to fall back on (to // decide the times // for // data points) if (primaryTrack.getStartDTG() == null) { // //////////////////////////////////////////////// // CASE 1 - neither track has time data, relative // calc // //////////////////////////////////////////////// // so, we don't have primary or secondary data. // produce data // values at the start and end of the track // produce data points at the primary track // locations final Iterator<Editable> it = ss.iterator(); final Watchable theSecondaryPoint = (Watchable) it.next(); // get an iterator for the primary track final Collection<Editable> primaryPoints = primaryTrack.getItemsBetween(start_time, end_time); // do we have any primary data in this period if (primaryPoints != null) { final Iterator<Editable> throughPrimary = primaryPoints.iterator(); final Watchable thisPrimary = (Watchable) throughPrimary.next(); // ok, create the series with it's two points in produceTwoPointDataSeries(theCalculation, thisPrimary, theSecondaryPoint, thisSeries, start_time, end_time, provider, theAdder); } } else { // //////////////////////////////////////////////// // CASE 2 - secondary track has time data, relative // calc // //////////////////////////////////////////////// // so, we do have time data for the secondary track, // but not on // the primary track // therefore we produce data points at the primary // track locations final Watchable[] theSecondaryPoints = thisSecondaryTrack.getNearestTo(start_time); final Watchable theSecondaryPoint = theSecondaryPoints[0]; final Color thisColor = theSecondaryPoint.getColor(); // get an iterator for the primary track final Collection<Editable> primaryPoints = primaryTrack.getItemsBetween(start_time, end_time); if (primaryPoints != null) { final Iterator<Editable> throughPrimary = primaryPoints.iterator(); while (throughPrimary.hasNext()) { final Watchable thisPrimary = (Watchable) throughPrimary.next(); final HiResDate currentTime = thisPrimary.getTime(); // and add the new data point (if we have // to) connectToPrevious = createDataPoint(theCalculation, thisPrimary, theSecondaryPoint, currentTime, connectToPrevious, thisColor, thisSeries, provider, theAdder); } // stepping through the primary track } // whether we have primary points } } else // whether we have DTG data { // //////////////////////////////////////////////// // CASE 3 - both tracks have time data, relative calc // //////////////////////////////////////////////// // yes, we do have DTG data for this track - hooray! // ok, step through the list final Iterator<Editable> it = ss.iterator(); // remember the last point - used to check if we're // passing through // zero degs double lastSecondaryValue = Double.NaN; // we we're // using NaN but // it // was failing the equality // test HiResDate lastTime = null; throughThisTrack: while (it.hasNext()) { final Watchable thisSecondary = (Watchable) it.next(); final Color thisColor = thisSecondary.getColor(); // what's the current time? final HiResDate currentTime = thisSecondary.getTime(); // is this fix visible? if (thisSecondary.getVisible()) { // the point on the primary track we work with Watchable thisPrimary = null; // find the fix on the primary track which is // nearest in // time to this one (if we need to) Watchable[] nearList; // temp switch on interpolation Boolean oldInterp = null; if (primaryTrack instanceof ISecondaryTrack) { final ISecondaryTrack tw = (ISecondaryTrack) primaryTrack; oldInterp = tw.getInterpolatePoints(); tw.setInterpolatePoints(true); } // find it's nearest point on the primary track nearList = primaryTrack.getNearestTo(currentTime); // and restore the interpolate points setting if (oldInterp != null) { final ISecondaryTrack tw = (ISecondaryTrack) primaryTrack; tw.setInterpolatePoints(oldInterp.booleanValue()); } // yes. right, we only perform a calc if we have // primary data // for this point if (nearList.length == 0) { // remember that the next point doesn't // connect to it's // previous one // since we want to show the gap represented // by this datum connectToPrevious = false; // drop out, and wait for the next cycle continue throughThisTrack; } else { thisPrimary = nearList[0]; } // //////////////////////////////////////////////// // NOW PUT IN BIT TO WRAP THROUGH ZERO WHERE // APPLICABLE // //////////////////////////////////////////////// // produce the new calculated value final double thisVal = theCalculation.calculate(thisSecondary, thisPrimary, currentTime); // SPECIAL HANDLING - do we need to check if // this data passes // through 360 degs? if (theCalculation.isWrappableData()) { // add extra points, if we need to connectToPrevious = insertWrappingPoints(theCalculation, lastSecondaryValue, thisVal, lastTime, currentTime, thisColor, thisSeries, connectToPrevious, provider, theAdder, myOperation._clipMax); } // //////////////////////////////////////////////// // THANK YOU, WE'RE PLEASED TO RETURN YOU TO // YOUR NORMAL PROGRAM // //////////////////////////////////////////////// // and add the new data point (if we have to) connectToPrevious = createDataPoint(theCalculation, thisPrimary, thisSecondary, currentTime, connectToPrevious, thisColor, thisSeries, provider, theAdder); lastSecondaryValue = thisVal; lastTime = currentTime; } // whether this point is visible } // stepping through this track } // whether we have DTG data } else { // so, this is an absolute calculation - we don't need to // worry about // the primry // track // do we have time data for this secondary track? if (thisSecondaryTrack.getStartDTG() == null) { // //////////////////////////////////////////////// // CASE 4 - no time data, non-relative calculation // //////////////////////////////////////////////// // it's ok. It we don't have time related data for this // point we // just create // data points for it at the start & end of the track // ok, create the series with it's two points in // ok, step through the list final Iterator<Editable> it = ss.iterator(); final Watchable thisSecondary = (Watchable) it.next(); // and produceTwoPointDataSeries(theCalculation, null, thisSecondary, thisSeries, start_time, end_time, provider, theAdder); } else { // //////////////////////////////////////////////// // CASE 5 - with time data, non-relative calculation // //////////////////////////////////////////////// // ok, step through the list final Iterator<Editable> it = ss.iterator(); // remember the last point - used to check if we're // passing through // zero degs double lastSecondaryValue = Double.NaN; // we we're // using NaN but // it // was failing the equality // test HiResDate lastTime = null; while (it.hasNext()) { final Watchable thisSecondary = (Watchable) it.next(); // / get the colour final Color thisColor = thisSecondary.getColor(); // what's the time of this data point? final HiResDate currentTime = thisSecondary.getTime(); // produce the new calculated value final double thisVal = theCalculation.calculate(thisSecondary, null, currentTime); // SPECIAL HANDLING - do we need to check if this // data passes // through 360 degs? if (theCalculation.isWrappableData()) { // add extra points, if we need to connectToPrevious = insertWrappingPoints(theCalculation, lastSecondaryValue, thisVal, lastTime, currentTime, thisColor, thisSeries, connectToPrevious, provider, theAdder, myOperation._clipMax); } // is this fix visible? if (thisSecondary.getVisible()) { // the point on the primary track we work with final Watchable thisPrimary = null; // and add the new data point (if we have to) connectToPrevious = createDataPoint(theCalculation, thisPrimary, thisSecondary, currentTime, connectToPrevious, thisColor, thisSeries, provider, theAdder); lastSecondaryValue = thisVal; lastTime = new HiResDate(currentTime); } // whether this point is visible } // stepping through this secondary collection } // whether there was time-related data for this track } // whether this was a relative calculation // if the series if empty, set it to null, rather than create // one of // empty length if (thisSeries instanceof XYSeries) { final XYSeries ser = (XYSeries) thisSeries; if (ser.getItemCount() == 0) thisSeries = null; } else if (thisSeries instanceof TimeSeries) { final TimeSeries ser = (TimeSeries) thisSeries; if (ser.getItemCount() == 0) thisSeries = null; } // did we find anything? if (thisSeries != null) { theAdder.addSeries(theSeriesCollection, thisSeries, seriesColor); } } // if this collection actually had data } // looping through the tracks if (theSeriesCollection.getSeriesCount() == 0) theSeriesCollection = null; return theSeriesCollection; }
From source file:org.sakaiproject.dav.DavServlet.java
/** * PROPFIND Method./*from w w w. j a v a 2 s . co m*/ */ protected void doPropfind(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String path = getRelativePathSAKAI(req); if (path.endsWith("/")) path = path.substring(0, path.length() - 1); if ((path.toUpperCase().startsWith("/WEB-INF")) || (path.toUpperCase().startsWith("/META-INF")) || prohibited(path)) { resp.sendError(SakaidavStatus.SC_FORBIDDEN); return; } // Properties which are to be displayed. Vector<String> properties = null; // Propfind depth int depth = INFINITY; // Propfind type int type = FIND_ALL_PROP; String depthStr = req.getHeader("Depth"); if (depthStr == null) { depth = INFINITY; } else { if (depthStr.equals("0")) { depth = 0; } else if (depthStr.equals("1")) { depth = 1; } else if (depthStr.equals("infinity")) { depth = INFINITY; } } Node propNode = null; DocumentBuilder documentBuilder = getDocumentBuilder(); // be careful how we get content, as we've had hangs in mod_jk // Rather than passing the XML parser a stream on the network // input, we read it into a buffer and pass them a stream // on the buffer. This is an experiment to see if it fixes // the hangs. // Note that getContentLength can return -1. As everyone seems // to use the content-length header, ignore that case for now // It is strongly discouraged by the spec. int contentLength = req.getContentLength(); if (contentLength > MAX_XML_STREAM_LENGTH) { resp.sendError(HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE); return; } else if (contentLength > 0) { byte[] byteContent = new byte[contentLength]; InputStream inputStream = req.getInputStream(); int lenRead = 0; try { while (lenRead < contentLength) { int read = inputStream.read(byteContent, lenRead, contentLength - lenRead); if (read <= 0) break; lenRead += read; } } catch (Exception ignore) { } // if anything goes wrong, we treat it as find all props // Parse the input XML to see what they really want if (lenRead > 0) try { InputStream is = new ByteArrayInputStream(byteContent, 0, lenRead); // System.out.println("have bytes"); Document document = documentBuilder.parse(new InputSource(is)); // Get the root element of the document Element rootElement = document.getDocumentElement(); NodeList childList = rootElement.getChildNodes(); // System.out.println("have nodes " + childList.getLength()); for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); // System.out.println("looking at node " + currentNode.getNodeName()); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: break; case Node.ELEMENT_NODE: if (currentNode.getNodeName().endsWith("prop")) { type = FIND_BY_PROPERTY; propNode = currentNode; } if (currentNode.getNodeName().endsWith("propname")) { type = FIND_PROPERTY_NAMES; } if (currentNode.getNodeName().endsWith("allprop")) { type = FIND_ALL_PROP; } break; } } } catch (SAXParseException se) { resp.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } catch (Exception e) { M_log.warn("Exception parsing DAV request", e); } // again, in case of exception, we'll have the default // FIND_ALL_PROP } // System.out.println("Find type " + type); if (type == FIND_BY_PROPERTY) { properties = new Vector<String>(); NodeList childList = propNode.getChildNodes(); for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: break; case Node.ELEMENT_NODE: String nodeName = currentNode.getNodeName(); String propertyName = null; if (nodeName.indexOf(':') != -1) { propertyName = nodeName.substring(nodeName.indexOf(':') + 1); } else { propertyName = nodeName; } // href is a live property which is handled differently properties.addElement(propertyName); break; } } } // Retrieve the resources DirContextSAKAI resources = getResourcesSAKAI(); if (resources == null) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } // Point the resource object at a particular path and catch the error if necessary. boolean exists = true; try { resources.lookup(path); } catch (NamingException e) { exists = false; int slash = path.lastIndexOf('/'); if (slash != -1) { String parentPath = path.substring(0, slash); Vector<String> currentLockNullResources = lockNullResources.get(parentPath); if (currentLockNullResources != null) { Enumeration<String> lockNullResourcesList = currentLockNullResources.elements(); while (lockNullResourcesList.hasMoreElements()) { String lockNullPath = lockNullResourcesList.nextElement(); if (lockNullPath.equals(path)) { resp.setStatus(SakaidavStatus.SC_MULTI_STATUS); resp.setContentType("text/xml; charset=UTF-8"); // Create multistatus object XMLWriter generatedXML = new XMLWriter(resp.getWriter()); generatedXML.writeXMLHeader(); generatedXML.writeElement("D", "multistatus" + generateNamespaceDeclarations(), XMLWriter.OPENING); parseLockNullProperties(req, generatedXML, lockNullPath, type, properties); generatedXML.writeElement("D", "multistatus", XMLWriter.CLOSING); generatedXML.sendData(); return; } } } } } if (!exists) { resp.sendError(HttpServletResponse.SC_NOT_FOUND, "/dav" + path); return; } resp.setStatus(SakaidavStatus.SC_MULTI_STATUS); resp.setContentType("text/xml; charset=UTF-8"); // Create multistatus object XMLWriter generatedXML = new XMLWriter(resp.getWriter()); generatedXML.writeXMLHeader(); generatedXML.writeElement("D", "multistatus" + generateNamespaceDeclarations(), XMLWriter.OPENING); if (depth == 0) { parseProperties(req, resources, generatedXML, path, type, properties); } else { // The stack always contains the object of the current level Stack<String> stack = new Stack<String>(); stack.push(path); // Stack of the objects one level below Stack<String> stackBelow = new Stack<String>(); while ((!stack.isEmpty()) && (depth >= 0)) { String currentPath = (String) stack.pop(); try { // if (M_log.isDebugEnabled()) M_log.debug("Lookup currentPath="+currentPath); resources.lookup(currentPath); } catch (NamingException e) { continue; } parseProperties(req, resources, generatedXML, currentPath, type, properties); if ((resources.isCollection) && (depth > 0)) { Iterator<ContentEntity> it = resources.list(currentPath); while (it.hasNext()) { Entity mbr = it.next(); String resourceName = getResourceNameSAKAI(mbr); String newPath = currentPath; if (!(newPath.endsWith("/"))) newPath += "/"; newPath += resourceName; if (!(newPath.toLowerCase().indexOf("/protected") >= 0 && !contentHostingService.allowAddCollection(newPath))) stackBelow.push(newPath); // if (M_log.isDebugEnabled()) M_log.debug("SAKAI found resource " + newPath); } // Displaying the lock-null resources present in that // collection String lockPath = currentPath; if (lockPath.endsWith("/")) lockPath = lockPath.substring(0, lockPath.length() - 1); Vector<String> currentLockNullResources = lockNullResources.get(lockPath); if (currentLockNullResources != null) { Enumeration<String> lockNullResourcesList = currentLockNullResources.elements(); while (lockNullResourcesList.hasMoreElements()) { String lockNullPath = (String) lockNullResourcesList.nextElement(); parseLockNullProperties(req, generatedXML, lockNullPath, type, properties); } } } if (stack.isEmpty()) { depth--; stack = stackBelow; stackBelow = new Stack<String>(); } // if (M_log.isDebugEnabled()) M_log.debug("SAKAIDAV.propfind() " + generatedXML.toString()); generatedXML.sendData(); } } generatedXML.writeElement("D", "multistatus", XMLWriter.CLOSING); // if (M_log.isDebugEnabled()) M_log.debug("SAKAIDAV.propfind() at end:" + generatedXML.toString()); generatedXML.sendData(); }
From source file:trendanalisis.main.tools.weka.CoreWekaTFIDF.java
/** * Returns an enumeration describing the available options. * //from w w w . j a va 2 s . com * @return an enumeration of all the available options */ @Override public Enumeration<Option> listOptions() { Vector<Option> result = new Vector<Option>(); result.addElement(new Option("\tOutput word counts rather than boolean word presence.\n", "C", 0, "-C")); result.addElement(new Option("\tSpecify list of string attributes to convert to words (as weka Range).\n" + "\t(default: select all string attributes)", "R", 1, "-R <index1,index2-index4,...>")); result.addElement(new Option("\tInvert matching sense of column indexes.", "V", 0, "-V")); result.addElement(new Option("\tSpecify a prefix for the created attribute names.\n" + "\t(default: \"\")", "P", 1, "-P <attribute name prefix>")); result.addElement( new Option( "\tSpecify approximate number of word fields to create.\n" + "\tSurplus words will be discarded..\n" + "\t(default: 1000)", "W", 1, "-W <number of words to keep>")); result.addElement(new Option( "\tSpecify the rate (e.g., every 10% of the input dataset) at which to periodically prune the dictionary.\n" + "\t-W prunes after creating a full dictionary. You may not have enough memory for this approach.\n" + "\t(default: no periodic pruning)", "prune-rate", 1, "-prune-rate <rate as a percentage of dataset>")); result.addElement(new Option("\tTransform the word frequencies into log(1+fij)\n" + "\twhere fij is the frequency of word i in jth document(instance).\n", "T", 0, "-T")); result.addElement(new Option("\tTransform each word frequency into:\n" + "\tfij*log(num of Documents/num of documents containing word i)\n" + "\t where fij if frequency of word i in jth document(instance)", "I", 0, "-I")); result.addElement(new Option( "\tWhether to 0=not normalize/1=normalize all data/2=normalize test data only\n" + "\tto average length of training documents " + "(default 0=don\'t normalize).", "N", 1, "-N")); result.addElement(new Option("\tConvert all tokens to lowercase before " + "adding to the dictionary.", "L", 0, "-L")); result.addElement(new Option("\tThe stopwords handler to use (default Null).", "-stopwords-handler", 1, "-stopwords-handler")); result.addElement(new Option("\tThe stemming algorithm (classname plus parameters) to use.", "stemmer", 1, "-stemmer <spec>")); result.addElement(new Option("\tThe minimum term frequency (default = 1).", "M", 1, "-M <int>")); result.addElement(new Option("\tIf this is set, the maximum number of words and the \n" + "\tminimum term frequency is not enforced on a per-class \n" + "\tbasis but based on the documents in all the classes \n" + "\t(even if a class attribute is set).", "O", 0, "-O")); result.addElement(new Option("\tThe tokenizing algorihtm (classname plus parameters) to use.\n" + "\t(default: " + WordTokenizer.class.getName() + ")", "tokenizer", 1, "-tokenizer <spec>")); return result.elements(); }
From source file:org.apache.webdav.lib.WebdavResource.java
/** * Get the activelock owners for this resource. * * @return An enumeration of owners./*from www .j a v a 2 s .co m*/ */ public Enumeration getActiveLockOwners() { if (lockDiscovery == null) return null; Lock[] activeLocks = lockDiscovery.getActiveLocks(); if (activeLocks == null) return null; Vector buff = new Vector(); int count = activeLocks.length; for (int i = 0; i < count; i++) { buff.addElement(activeLocks[i].getOwner()); } return buff.elements(); }
From source file:com.concursive.connect.web.webdav.servlets.WebdavServlet.java
/** * LOCK Method./*www.j a v a 2 s . com*/ * * @param req Description of the Parameter * @param resp Description of the Parameter * @throws javax.servlet.ServletException Description of the Exception * @throws java.io.IOException Description of the Exception */ protected void doLock(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (readOnly) { resp.sendError(WebdavStatus.SC_FORBIDDEN); return; } if (isLocked(req)) { resp.sendError(WebdavStatus.SC_LOCKED); return; } WebdavServlet.LockInfo lock = new WebdavServlet.LockInfo(); // Parsing lock request // Parsing depth header String depthStr = req.getHeader("Depth"); if (depthStr == null) { lock.depth = INFINITY; } else { if (depthStr.equals("0")) { lock.depth = 0; } else { lock.depth = INFINITY; } } // Parsing timeout header int lockDuration = DEFAULT_TIMEOUT; String lockDurationStr = req.getHeader("Timeout"); if (lockDurationStr == null) { lockDuration = DEFAULT_TIMEOUT; } else { int commaPos = lockDurationStr.indexOf(","); // If multiple timeouts, just use the first if (commaPos != -1) { lockDurationStr = lockDurationStr.substring(0, commaPos); } if (lockDurationStr.startsWith("Second-")) { lockDuration = (new Integer(lockDurationStr.substring(7))).intValue(); } else { if (lockDurationStr.equalsIgnoreCase("infinity")) { lockDuration = MAX_TIMEOUT; } else { try { lockDuration = (new Integer(lockDurationStr)).intValue(); } catch (NumberFormatException e) { lockDuration = MAX_TIMEOUT; } } } if (lockDuration == 0) { lockDuration = DEFAULT_TIMEOUT; } if (lockDuration > MAX_TIMEOUT) { lockDuration = MAX_TIMEOUT; } } lock.expiresAt = System.currentTimeMillis() + (lockDuration * 1000); int lockRequestType = LOCK_CREATION; Node lockInfoNode = null; DocumentBuilder documentBuilder = getDocumentBuilder(); try { Document document = documentBuilder.parse(new InputSource(req.getInputStream())); // Get the root element of the document Element rootElement = document.getDocumentElement(); lockInfoNode = rootElement; } catch (Exception e) { lockRequestType = LOCK_REFRESH; } if (lockInfoNode != null) { // Reading lock information NodeList childList = lockInfoNode.getChildNodes(); StringWriter strWriter = null; DOMWriter domWriter = null; Node lockScopeNode = null; Node lockTypeNode = null; Node lockOwnerNode = null; for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: break; case Node.ELEMENT_NODE: String nodeName = currentNode.getNodeName(); if (nodeName.endsWith("lockscope")) { lockScopeNode = currentNode; } if (nodeName.endsWith("locktype")) { lockTypeNode = currentNode; } if (nodeName.endsWith("owner")) { lockOwnerNode = currentNode; } break; } } if (lockScopeNode != null) { childList = lockScopeNode.getChildNodes(); for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: break; case Node.ELEMENT_NODE: String tempScope = currentNode.getNodeName(); if (tempScope.indexOf(':') != -1) { lock.scope = tempScope.substring(tempScope.indexOf(':') + 1); } else { lock.scope = tempScope; } break; } } if (lock.scope == null) { // Bad request resp.setStatus(WebdavStatus.SC_BAD_REQUEST); } } else { // Bad request resp.setStatus(WebdavStatus.SC_BAD_REQUEST); } if (lockTypeNode != null) { childList = lockTypeNode.getChildNodes(); for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: break; case Node.ELEMENT_NODE: String tempType = currentNode.getNodeName(); if (tempType.indexOf(':') != -1) { lock.type = tempType.substring(tempType.indexOf(':') + 1); } else { lock.type = tempType; } break; } } if (lock.type == null) { // Bad request resp.setStatus(WebdavStatus.SC_BAD_REQUEST); } } else { // Bad request resp.setStatus(WebdavStatus.SC_BAD_REQUEST); } if (lockOwnerNode != null) { childList = lockOwnerNode.getChildNodes(); for (int i = 0; i < childList.getLength(); i++) { Node currentNode = childList.item(i); switch (currentNode.getNodeType()) { case Node.TEXT_NODE: lock.owner += currentNode.getNodeValue(); break; case Node.ELEMENT_NODE: strWriter = new StringWriter(); domWriter = new DOMWriter(strWriter, true); domWriter.setQualifiedNames(false); domWriter.print(currentNode); lock.owner += strWriter.toString(); break; } } if (lock.owner == null) { // Bad request resp.setStatus(WebdavStatus.SC_BAD_REQUEST); } } else { lock.owner = new String(); } } String path = getRelativePath(req); lock.path = path; // Retrieve the resources DirContext resources = getResources(); if (resources == null) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } boolean exists = true; Object object = null; try { object = resources.lookup(path); } catch (NamingException e) { exists = false; } Enumeration locksList = null; if (lockRequestType == LOCK_CREATION) { // Generating lock id String lockTokenStr = req.getServletPath() + "-" + lock.type + "-" + lock.scope + "-" + req.getUserPrincipal() + "-" + lock.depth + "-" + lock.owner + "-" + lock.tokens + "-" + lock.expiresAt + "-" + System.currentTimeMillis() + "-" + secret; String lockToken = md5Encoder.encode(md5Helper.digest(lockTokenStr.getBytes())); if ((exists) && (object instanceof DirContext) && (lock.depth == INFINITY)) { // Locking a collection (and all its member resources) // Checking if a child resource of this collection is // already locked Vector lockPaths = new Vector(); locksList = collectionLocks.elements(); while (locksList.hasMoreElements()) { WebdavServlet.LockInfo currentLock = (WebdavServlet.LockInfo) locksList.nextElement(); if (currentLock.hasExpired()) { resourceLocks.remove(currentLock.path); continue; } if ((currentLock.path.startsWith(lock.path)) && ((currentLock.isExclusive()) || (lock.isExclusive()))) { // A child collection of this collection is locked lockPaths.addElement(currentLock.path); } } locksList = resourceLocks.elements(); while (locksList.hasMoreElements()) { WebdavServlet.LockInfo currentLock = (WebdavServlet.LockInfo) locksList.nextElement(); if (currentLock.hasExpired()) { resourceLocks.remove(currentLock.path); continue; } if ((currentLock.path.startsWith(lock.path)) && ((currentLock.isExclusive()) || (lock.isExclusive()))) { // A child resource of this collection is locked lockPaths.addElement(currentLock.path); } } if (!lockPaths.isEmpty()) { // One of the child paths was locked // We generate a multistatus error report Enumeration lockPathsList = lockPaths.elements(); resp.setStatus(WebdavStatus.SC_CONFLICT); XMLWriter generatedXML = new XMLWriter(); generatedXML.writeXMLHeader(); generatedXML.writeElement(null, "multistatus" + generateNamespaceDeclarations(), XMLWriter.OPENING); while (lockPathsList.hasMoreElements()) { generatedXML.writeElement(null, "response", XMLWriter.OPENING); generatedXML.writeElement(null, "href", XMLWriter.OPENING); generatedXML.writeText((String) lockPathsList.nextElement()); generatedXML.writeElement(null, "href", XMLWriter.CLOSING); generatedXML.writeElement(null, "status", XMLWriter.OPENING); generatedXML.writeText("HTTP/1.1 " + WebdavStatus.SC_LOCKED + " " + WebdavStatus.getStatusText(WebdavStatus.SC_LOCKED)); generatedXML.writeElement(null, "status", XMLWriter.CLOSING); generatedXML.writeElement(null, "response", XMLWriter.CLOSING); } generatedXML.writeElement(null, "multistatus", XMLWriter.CLOSING); Writer writer = resp.getWriter(); writer.write(generatedXML.toString()); writer.close(); return; } boolean addLock = true; // Checking if there is already a shared lock on this path locksList = collectionLocks.elements(); while (locksList.hasMoreElements()) { WebdavServlet.LockInfo currentLock = (WebdavServlet.LockInfo) locksList.nextElement(); if (currentLock.path.equals(lock.path)) { if (currentLock.isExclusive()) { resp.sendError(WebdavStatus.SC_LOCKED); return; } else { if (lock.isExclusive()) { resp.sendError(WebdavStatus.SC_LOCKED); return; } } currentLock.tokens.addElement(lockToken); lock = currentLock; addLock = false; } } if (addLock) { lock.tokens.addElement(lockToken); collectionLocks.addElement(lock); } } else { // Locking a single resource // Retrieving an already existing lock on that resource WebdavServlet.LockInfo presentLock = (WebdavServlet.LockInfo) resourceLocks.get(lock.path); if (presentLock != null) { if ((presentLock.isExclusive()) || (lock.isExclusive())) { // If either lock is exclusive, the lock can't be // granted resp.sendError(WebdavStatus.SC_PRECONDITION_FAILED); return; } else { presentLock.tokens.addElement(lockToken); lock = presentLock; } } else { lock.tokens.addElement(lockToken); resourceLocks.put(lock.path, lock); // Checking if a resource exists at this path exists = true; try { object = resources.lookup(path); } catch (NamingException e) { exists = false; } if (!exists) { // "Creating" a lock-null resource int slash = lock.path.lastIndexOf('/'); String parentPath = lock.path.substring(0, slash); Vector lockNulls = (Vector) lockNullResources.get(parentPath); if (lockNulls == null) { lockNulls = new Vector(); lockNullResources.put(parentPath, lockNulls); } lockNulls.addElement(lock.path); } // Add the Lock-Token header as by RFC 2518 8.10.1 // - only do this for newly created locks resp.addHeader("Lock-Token", "<opaquelocktoken:" + lockToken + ">"); } } } if (lockRequestType == LOCK_REFRESH) { String ifHeader = req.getHeader("If"); if (ifHeader == null) { ifHeader = ""; } // Checking resource locks WebdavServlet.LockInfo toRenew = (WebdavServlet.LockInfo) resourceLocks.get(path); Enumeration tokenList = null; if (lock != null) { // At least one of the tokens of the locks must have been given tokenList = toRenew.tokens.elements(); while (tokenList.hasMoreElements()) { String token = (String) tokenList.nextElement(); if (ifHeader.indexOf(token) != -1) { toRenew.expiresAt = lock.expiresAt; lock = toRenew; } } } // Checking inheritable collection locks Enumeration collectionLocksList = collectionLocks.elements(); while (collectionLocksList.hasMoreElements()) { toRenew = (WebdavServlet.LockInfo) collectionLocksList.nextElement(); if (path.equals(toRenew.path)) { tokenList = toRenew.tokens.elements(); while (tokenList.hasMoreElements()) { String token = (String) tokenList.nextElement(); if (ifHeader.indexOf(token) != -1) { toRenew.expiresAt = lock.expiresAt; lock = toRenew; } } } } } // Set the status, then generate the XML response containing // the lock information XMLWriter generatedXML = new XMLWriter(); generatedXML.writeXMLHeader(); generatedXML.writeElement(null, "prop" + generateNamespaceDeclarations(), XMLWriter.OPENING); generatedXML.writeElement(null, "lockdiscovery", XMLWriter.OPENING); lock.toXML(generatedXML); generatedXML.writeElement(null, "lockdiscovery", XMLWriter.CLOSING); generatedXML.writeElement(null, "prop", XMLWriter.CLOSING); resp.setStatus(WebdavStatus.SC_OK); resp.setContentType("text/xml; charset=UTF-8"); Writer writer = resp.getWriter(); writer.write(generatedXML.toString()); writer.close(); }
From source file:org.apache.webdav.lib.WebdavResource.java
/** * Execute PROPFIND method with by propname for the given path. * Get list of named WebDAV properties on the given resource. * * <p>Once used this method, the the status code in the 207 * reponse is need to be set for the method of WebdavResource. * * <p>The values of DepthSupport.DEPTH_0, DepthSupport.DEPTH_1 * DepthSupport.DEPTH_INFINITY is possbile for the depth. * * @param path the server relative path of the resource to request * @param depth The depth.//from ww w . ja va 2 s .c om * @param properties The named properties. * @return an enumeration of <code>ResponseEntity</code> * @exception HttpException * @exception IOException */ public Enumeration propfindMethod(String path, int depth, Vector properties) throws HttpException, IOException { setClient(); // Change the depth for prop PropFindMethod method = new PropFindMethod(URIUtil.encodePath(path), depth, properties.elements()); method.setDebug(debug); method.setFollowRedirects(this.followRedirects); generateTransactionHeader(method); generateAdditionalHeaders(method); int status = client.executeMethod(method); // Set status code for this resource. if (thisResource == true) { // Set the status code. setStatusCode(method.getStatusLine().getStatusCode()); } // Also accept OK sent by buggy servers. if (status != HttpStatus.SC_MULTI_STATUS && status != HttpStatus.SC_OK) { HttpException ex = new HttpException(); ex.setReasonCode(status); throw ex; } thisResource = false; return method.getResponses(); }
From source file:org.apache.webdav.lib.WebdavResource.java
public Enumeration reportMethod(HttpURL httpURL, Vector properties) throws HttpException, IOException { setClient();/* ww w . ja va2s .co m*/ // Default depth=0, type=by_name ReportMethod method = new ReportMethod(httpURL.getEscapedPath(), DepthSupport.DEPTH_0, properties.elements()); method.setDebug(debug); method.setFollowRedirects(this.followRedirects); generateTransactionHeader(method); generateAdditionalHeaders(method); client.executeMethod(method); return method.getResponses(); }
From source file:org.apache.webdav.lib.WebdavResource.java
public Enumeration reportMethod(HttpURL httpURL, Vector properties, int depth) throws HttpException, IOException { setClient();//from ww w .j av a2 s . c o m // Default depth=0, type=by_name ReportMethod method = new ReportMethod(httpURL.getEscapedPath(), depth, properties.elements()); method.setDebug(debug); method.setFollowRedirects(this.followRedirects); generateTransactionHeader(method); generateAdditionalHeaders(method); client.executeMethod(method); /*first draft, does work anyhow Enumeration results = method.getAllResponseURLs(); return results;*/ /* Enumeration responses = method.getResponses(); ResponseEntity response = (ResponseEntity) responses.nextElement(); String href = (String) response.getHref(); Enumeration results = method.getResponseProperties(href); return results;*/ Vector results = new Vector(); Enumeration responses = method.getResponses(); while (responses.hasMoreElements()) { ResponseEntity response = (ResponseEntity) responses.nextElement(); String href = response.getHref(); String sResult = href; // Set status code for this resource. if ((thisResource == true) && (response.getStatusCode() > 0)) setStatusCode(response.getStatusCode()); thisResource = false; Enumeration responseProperties = method.getResponseProperties(href); while (responseProperties.hasMoreElements()) { Property property = (Property) responseProperties.nextElement(); sResult += "\n" + property.getName() + ":\t" + DOMUtils.getTextValue(property.getElement()); // results.addElement(DOMUtils.getTextValue(property.getElement())); } results.addElement(sResult); } return results.elements(); }