List of usage examples for java.util LinkedList addLast
public void addLast(E e)
From source file:org.apache.tapestry.util.AdaptorRegistry.java
/** * Searches the registration Map for a match, based on inheritance. * * <p>Searches class inheritance first, then interfaces (in a rather vague order). * Really should match the order from the JVM spec. * * <p>There's a degenerate case where we may check the same interface more than once: * <ul>/*w w w . ja va 2 s . com*/ * <li>Two interfaces, I1 and I2 * <li>Two classes, C1 and C2 * <li>I2 extends I1 * <li>C2 extends C1 * <li>C1 implements I1 * <li>C2 implements I2 * <li>The search will be: C2, C1, I2, I1, I1 * <li>I1 is searched twice, because C1 implements it, and I2 extends it * <li>There are other such cases, but none of them cause infinite loops * and most are rare (we could guard against it, but its relatively expensive). * <li>Multiple checks only occur if we don't find a registration * </ul> * * <p> * This method is only called from a synchronized block, so it is * implicitly synchronized. * **/ private Object searchForAdaptor(Class subjectClass) { LinkedList queue = null; Object result = null; if (LOG.isDebugEnabled()) LOG.debug("Searching for adaptor for class " + Tapestry.getClassName(subjectClass)); // Step one: work up through the class inheritance. Class searchClass = subjectClass; // Primitive types have null, not Object, as their parent // class. while (searchClass != Object.class && searchClass != null) { result = registrations.get(searchClass); if (result != null) return result; // Not an exact match. If the search class // implements any interfaces, add them to the queue. Class[] interfaces = searchClass.getInterfaces(); int length = interfaces.length; if (queue == null && length > 0) queue = new LinkedList(); for (int i = 0; i < length; i++) queue.addLast(interfaces[i]); // Advance up to the next superclass searchClass = getSuperclass(searchClass); } // Ok, the easy part failed, lets start searching // interfaces. if (queue != null) { while (!queue.isEmpty()) { searchClass = (Class) queue.removeFirst(); result = registrations.get(searchClass); if (result != null) return result; // Interfaces can extend other interfaces; add them // to the queue. Class[] interfaces = searchClass.getInterfaces(); int length = interfaces.length; for (int i = 0; i < length; i++) queue.addLast(interfaces[i]); } } // Not a match on interface; our last gasp is to check // for a registration for java.lang.Object result = registrations.get(Object.class); if (result != null) return result; // No match? That's rare ... and an error. throw new IllegalArgumentException( Tapestry.format("AdaptorRegistry.adaptor-not-found", Tapestry.getClassName(subjectClass))); }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentPropertyGroups() throws IOException { //load all classes and assign an id to them //dbpedia properties are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); }//from w ww. ja v a2s . co m if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia properties are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && !(s.equals("http://www.w3.org/2000/01/rdf-schema#label") || s.equals("http://www.w3.org/2002/07/owl#sameAs") || s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentPropertyEdges sets equivalentPropertyEdges = new HashSet[propertyIds.size() + 1]; propertyById = new String[propertyIds.size() + 1]; for (Map.Entry<String, Integer> e : propertyIds.entrySet()) { propertyById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { String s = m.group(2); int idS = propertyIds.get(s); String v = m.group(7); int idV = propertyIds.get(v); if (equivalentPropertyEdges[idS] == null) { equivalentPropertyEdges[idS] = new HashSet<>(); } equivalentPropertyEdges[idS].add(idV); if (equivalentPropertyEdges[idV] == null) { equivalentPropertyEdges[idV] = new HashSet<>(); } equivalentPropertyEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } equivalentProperty = new int[propertyIds.size() + 1]; int i = 1; while (i < equivalentProperty.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentProperty[j] != 0) { if (equivalentProperty[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentProperty[j] = i; if (equivalentPropertyEdges[j] != null) { for (int k : equivalentPropertyEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentProperty.length && equivalentProperty[i] != 0) { i++; } } }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentClassGroups() throws IOException { //load all classes and assign an id to them //dbpedia classes are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); }//from w ww . j a va2 s .c om if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia classes are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && !(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentClassEdges sets equivalentClassEdges = new HashSet[classIds.size() + 1]; classById = new String[classIds.size() + 1]; for (Map.Entry<String, Integer> e : classIds.entrySet()) { classById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { String s = m.group(2); int idS = classIds.get(s); String v = m.group(7); int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } //manually add an equivalence //http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs equivalentClass { String s = "http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs"; int idS = classIds.get(s); String v = "http://dbpedia.org/ontology/Drug"; int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } equivalentClass = new int[classIds.size() + 1]; int i = 1; while (i < equivalentClass.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentClass[j] != 0) { if (equivalentClass[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentClass[j] = i; if (equivalentClassEdges[j] != null) { for (int k : equivalentClassEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentClass.length && equivalentClass[i] != 0) { i++; } } }
From source file:org.jnap.core.mvc.async.AsyncRequestInterceptor.java
@Override public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { if (isAsync(request, handler)) { Method handlerMethod = (Method) request .getAttribute(RestfulHandlerAdapter.CURRENT_HANDLER_METHOD_ATTRIBUTE); if (handlerMethod != null) { LinkedList<AsyncResponseHandler> handlers = new LinkedList<AsyncResponseHandler>(); AsyncResponseHandler responseHandler = null; if (AsyncResponseModel.class.isInstance(modelAndView)) { // TODO AsyncState.SUSPEND_RESPONSE }/*from w w w . j ava 2s. co m*/ if (handlerMethod.isAnnotationPresent(Broadcast.class)) { Broadcast annotation = handlerMethod.getAnnotation(Broadcast.class); int delay = annotation.delay(); Class[] suspendTimeout = annotation.value(); AsyncState state = annotation.resumeOnBroadcast() ? AsyncState.RESUME_ON_BROADCAST : AsyncState.BROADCAST; responseHandler = new AsyncResponseHandler(state, delay, 0, SCOPE.APPLICATION, true, suspendTimeout, null); handlers.addLast(responseHandler); if (handlerMethod.isAnnotationPresent(Cluster.class)) { // TODO add @Cluster support } } if (handlerMethod.isAnnotationPresent(Suspend.class)) { Suspend annotation = handlerMethod.getAnnotation(Suspend.class); long suspendTimeout = annotation.period(); suspendTimeout = TimeUnitConverter.convert(suspendTimeout, annotation.timeUnit()); Suspend.SCOPE scope = annotation.scope(); boolean outputComments = annotation.outputComments(); boolean trackable = false; // TODO add Trackable support // if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) { // trackable = true; // } AsyncState state = annotation.resumeOnBroadcast() ? AsyncState.SUSPEND_RESUME : AsyncState.SUSPEND; if (trackable) { state = AsyncState.SUSPEND_TRACKABLE; } responseHandler = new AsyncResponseHandler(state, suspendTimeout, 0, scope, outputComments); responseHandler.setListeners(createListeners(annotation.listeners())); handlers.addFirst(responseHandler); } if (handlerMethod.isAnnotationPresent(Subscribe.class)) { boolean trackable = false; // TODO add Trackable support // if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) { // trackable = true; // } Subscribe annotation = handlerMethod.getAnnotation(Subscribe.class); AsyncState state = trackable ? AsyncState.SUBSCRIBE_TRACKABLE : AsyncState.SUBSCRIBE; String topic = annotation.value(); // TODO add SpEL support responseHandler = new AsyncResponseHandler(state, 30000, -1, Suspend.SCOPE.APPLICATION, false, null, topic); responseHandler.setListeners(createListeners(annotation.listeners())); handlers.addFirst(responseHandler); } if (handlerMethod.isAnnotationPresent(Publish.class)) { String topic = handlerMethod.getAnnotation(Publish.class).value(); // TODO add SpEL support responseHandler = new AsyncResponseHandler(AsyncState.PUBLISH, 30000, -1, Suspend.SCOPE.APPLICATION, false, null, topic); handlers.addFirst(responseHandler); } if (handlerMethod.isAnnotationPresent(Resume.class)) { handlers.addFirst(new AsyncResponseHandler(AsyncState.RESUME, handlerMethod.getAnnotation(Resume.class).value())); } if (handlerMethod.isAnnotationPresent(Schedule.class)) { Schedule annotation = handlerMethod.getAnnotation(Schedule.class); AsyncState state = annotation.resumeOnBroadcast() ? AsyncState.SCHEDULE_RESUME : AsyncState.SCHEDULE; handlers.addFirst(new AsyncResponseHandler(state, annotation.period(), annotation.waitFor())); } for (AsyncResponseHandler asyncHandler : handlers) { asyncHandler.handle(request, response, modelAndView); } } else { logger.warn("Atmosphere annotation support disabled on this request."); } } }
From source file:org.gcaldaemon.core.GmailEntry.java
public final GmailMessage[] receive(String title) throws Exception { // Open 'INBOX' folder Folder inbox = mailbox.getFolder("INBOX"); inbox.open(Folder.READ_WRITE);/*from www . j av a 2 s.co m*/ Message[] messages = inbox.getMessages(); if (messages == null || messages.length == 0) { return new GmailMessage[0]; } // Loop on messages LinkedList list = new LinkedList(); for (int i = 0; i < messages.length; i++) { Message msg = messages[i]; if (!msg.isSet(Flag.SEEN)) { String subject = msg.getSubject(); if (title == null || title.length() == 0 || title.equals(subject)) { GmailMessage gm = new GmailMessage(); Address[] from = msg.getFrom(); msg.setFlag(Flag.SEEN, true); if (from == null || from.length == 0) { continue; } gm.subject = subject; gm.from = from[0].toString(); gm.memo = String.valueOf(msg.getContent()); list.addLast(gm); } } } inbox.close(true); // Return the array of the messages GmailMessage[] array = new GmailMessage[list.size()]; list.toArray(array); return array; }
From source file:com.android.utils.AccessibilityNodeInfoUtils.java
/** * Returns the result of applying a filter using breadth-first traversal. * * @param node The root node to traverse from. * @param filter The filter to satisfy./*ww w. j ava 2 s . co m*/ * @return The first node reached via BFS traversal that satisfies the * filter. */ public static AccessibilityNodeInfoCompat searchFromBfs(AccessibilityNodeInfoCompat node, NodeFilter filter) { if (node == null) { return null; } final LinkedList<AccessibilityNodeInfoCompat> queue = new LinkedList<>(); Set<AccessibilityNodeInfoCompat> visitedNodes = new HashSet<>(); queue.add(AccessibilityNodeInfoCompat.obtain(node)); try { while (!queue.isEmpty()) { final AccessibilityNodeInfoCompat item = queue.removeFirst(); visitedNodes.add(item); if (filter.accept(item)) { return item; } final int childCount = item.getChildCount(); for (int i = 0; i < childCount; i++) { final AccessibilityNodeInfoCompat child = item.getChild(i); if (child != null && !visitedNodes.contains(child)) { queue.addLast(child); } } item.recycle(); } } finally { while (!queue.isEmpty()) { queue.removeFirst().recycle(); } } return null; }
From source file:com.commander4j.db.JDBPrinters.java
public LinkedList<JDBListData> getPrinterIDs() { LinkedList<JDBListData> intList = new LinkedList<JDBListData>(); PreparedStatement stmt;/*from w w w .j av a 2s. c o m*/ ResultSet rs; setErrorMessage(""); Icon icon = new ImageIcon(); int index = 0; try { stmt = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).prepareStatement( Common.hostList.getHost(getHostID()).getSqlstatements().getSQL("JDBPrinters.getPrinterIDs")); stmt.setFetchSize(250); stmt.setString(1, "Y"); rs = stmt.executeQuery(); while (rs.next()) { getPropertiesfromResultSet(rs); icon = getPrinterIcon(); JDBListData mld = new JDBListData(icon, index, true, rs.getString("printer_id")); intList.addLast(mld); } rs.close(); stmt.close(); } catch (SQLException e) { setErrorMessage(e.getMessage()); } return intList; }
From source file:com.jaspersoft.studio.property.section.style.inerithance.StylesListSection.java
/** * Build the hierarchy of styles of an element * //www. java2s . co m * @param element * Element from which the styles list will be generated * @return A list of MStyle, where the first is the style assigned to the element, the second is the style assigned to * the first item of the list and so on */ private LinkedList<MStyle> buildStylesGerarchy(APropertyNode element) { LinkedList<MStyle> result = new LinkedList<MStyle>(); Object style = getElementStyle(element); StyleContainer styleContainer = styleMaps.get(style); while (styleContainer != null) { MStyle styleModel = styleContainer.getStyle(); if (!result.contains(styleModel)) result.addLast(styleModel); else { //The style has itself set as parent style, break the cycle //this shouldn't happen, but maybe the jrxml was modifed manually on //it's better to put this check to avoid a java heap exception break; } style = getElementStyle(styleModel); styleContainer = styleMaps.get(style); } return result; }
From source file:de.interactive_instruments.ShapeChange.Model.EA.EADocument.java
public void executeCommonInitializationProcedure() throws ShapeChangeAbortException { // determine if specific packages should not be loaded this.excludedPackageNames = options.getExcludedPackages(); /** Cache classes and packages */ // First set up initial evaluation tasks of packages consisting // of the models in the repository class EvalTask { PackageInfoEA fatherPI;// w ww.j a va2s . co m org.sparx.Package eaPackage; EvalTask(PackageInfoEA fpi, org.sparx.Package p) { fatherPI = fpi; eaPackage = p; } } StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_READMODEL); LinkedList<EvalTask> evalp = new LinkedList<EvalTask>(); Collection<org.sparx.Package> model = repository.GetModels(); for (org.sparx.Package p : model) { // Check if this model and all its contents shall be excluded String name = p.GetName(); if (excludedPackageNames != null && excludedPackageNames.contains(name)) { // stop processing this model and continue with the next continue; } evalp.addLast(new EvalTask(null, p)); } // Now remove tasks from the list, adding further tasks as we proceed // until we have no more tasks to evaluate while (evalp.size() > 0) { // Remove next evaluation task EvalTask et = evalp.removeFirst(); org.sparx.Package pack = et.eaPackage; PackageInfoEA fpi = et.fatherPI; // Check if this package and all its contents shall be excluded from // the model String name = pack.GetName(); if (excludedPackageNames != null && excludedPackageNames.contains(name)) { // stop processing this package and continue with the next continue; } // Add to package cache. The PackageInfo Ctor does the necessary // parent/child linkage of packages Element packelmt = pack.GetElement(); PackageInfoEA pi = new PackageInfoEA(this, fpi, pack, packelmt); fPackageById.put(pi.id(), pi); if (packelmt != null) this.fPackageByElmtId.put(new Integer(packelmt.GetElementID()).toString(), pi); // Now pick all classes and add these to their to caches. for (org.sparx.Element elmt : pack.GetElements()) { String type = elmt.GetType(); if (!type.equals("DataType") && !type.equals("Class") && !type.equals("Interface") && !type.equals("Enumeration")) continue; ClassInfoEA ci = new ClassInfoEA(this, pi, elmt); fClassById.put(ci.id(), ci); // TODO What's happening to identical class names? How is this // supposed to be handled? Open issue.While classifier names // have to be // unique per app schema only, it is a legacy from Rational Rose // that it is expected that classifier names are unique in the // whole // model. The correct solution would be to add namespace // qualifiers. fClassByName.put(ci.name(), ci); } // Add next level packages for further evaluation for (org.sparx.Package pnxt : pack.GetPackages()) { evalp.addLast(new EvalTask(pi, pnxt)); } } StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_ESTABLISHCLASSES); /** * Now that all classes are collected, in a second go establish class * derivation hierarchy and all other associations between classes. */ for (ClassInfoEA ci : fClassById.values()) { // Generalization - class derivation hierarchy ci.establishClassDerivationHierarchy(); // Other associations where the class is source or target ci.establishAssociations(); } String checkingConstraints = options.parameter("checkingConstraints"); if (checkingConstraints == null || !checkingConstraints.toLowerCase().trim().equals("disabled")) { StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_READCONSTARINTS); // TODO The following may be removed when constraints have been // tested. /** In a third go collect all constraints */ for (ClassInfoEA ci : fClassById.values()) { ci.constraints(); SortedMap<StructuredNumber, PropertyInfo> props = ci.properties(); for (PropertyInfo pi : props.values()) pi.constraints(); } } /** * Loop over all schemas (i.e packages with a target namespace) and * store the schema location, so that it can be added in import * statements */ SortedSet<PackageInfo> schemas = schemas(""); for (Iterator<PackageInfo> i = schemas.iterator(); i.hasNext();) { PackageInfo pi = i.next(); options.addSchemaLocation(pi.targetNamespace(), pi.xsdDocument()); } // ============================== // load diagrams if so requested String loadDiagrams = options.parameter("loadDiagrams"); if (loadDiagrams != null && loadDiagrams.equalsIgnoreCase("true")) { java.io.File tmpDir = options.imageTmpDir(); if (tmpDir.exists()) { // probably content from previous run, delete the content of the directory try { FileUtils.deleteDirectory(tmpDir); } catch (IOException e) { result.addWarning(null, 34, tmpDir.getAbsolutePath()); } if (!tmpDir.exists()) { try { FileUtils.forceMkdir(tmpDir); } catch (IOException e) { result.addWarning(null, 32, tmpDir.getAbsolutePath()); } } } AtomicInteger imgIdCounter = new AtomicInteger(0); SortedSet<? extends PackageInfo> selectedSchema = this.selectedSchemas(); for (PackageInfo pi : selectedSchema) { if (pi == null) { continue; } // Only process schemas in a namespace and name that matches a // user-selected pattern if (options.skipSchema(null, pi)) continue; saveDiagrams(imgIdCounter, "img", tmpDir, escapeFileName(tmpDir.getName()), pi); } } }
From source file:org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext.java
public static JavaPairRDD<Long, FrameBlock> toFrameJavaPairRDD(JavaSparkContext sc, FrameBlock src) throws DMLRuntimeException { long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; LinkedList<Tuple2<Long, FrameBlock>> list = new LinkedList<Tuple2<Long, FrameBlock>>(); //create and write subblocks of matrix int blksize = ConfigurationManager.getBlocksize(); for (int blockRow = 0; blockRow < (int) Math.ceil(src.getNumRows() / (double) blksize); blockRow++) { int maxRow = (blockRow * blksize + blksize < src.getNumRows()) ? blksize : src.getNumRows() - blockRow * blksize; int roffset = blockRow * blksize; FrameBlock block = new FrameBlock(src.getSchema()); //copy sub frame to block, incl meta data on first src.sliceOperations(roffset, roffset + maxRow - 1, 0, src.getNumColumns() - 1, block); if (roffset == 0) block.setColumnMetadata(src.getColumnMetadata()); //append block to sequence file list.addLast(new Tuple2<Long, FrameBlock>((long) roffset + 1, block)); }/*from w ww .j ava 2s . c o m*/ JavaPairRDD<Long, FrameBlock> result = sc.parallelizePairs(list); if (DMLScript.STATISTICS) { Statistics.accSparkParallelizeTime(System.nanoTime() - t0); Statistics.incSparkParallelizeCount(1); } return result; }