List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:org.apache.hadoop.hbase.util.RegionSplitter.java
static LinkedList<Pair<byte[], byte[]>> splitScan(LinkedList<Pair<byte[], byte[]>> regionList, HTable table, SplitAlgorithm splitAlgo) throws IOException, InterruptedException { LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList(); LinkedList<Pair<byte[], byte[]>> logicalSplitting = Lists.newLinkedList(); LinkedList<Pair<byte[], byte[]>> physicalSplitting = Lists.newLinkedList(); // get table info Path rootDir = FSUtils.getRootDir(table.getConfiguration()); Path tableDir = FSUtils.getTableDir(rootDir, table.getName()); FileSystem fs = tableDir.getFileSystem(table.getConfiguration()); HTableDescriptor htd = table.getTableDescriptor(); // clear the cache to forcibly refresh region information table.clearRegionCache();//w w w . j ava2 s . c o m // for every region that hasn't been verified as a finished split for (Pair<byte[], byte[]> region : regionList) { byte[] start = region.getFirst(); byte[] split = region.getSecond(); // see if the new split daughter region has come online try { HRegionInfo dri = table.getRegionLocation(split).getRegionInfo(); if (dri.isOffline() || !Bytes.equals(dri.getStartKey(), split)) { logicalSplitting.add(region); continue; } } catch (NoServerForRegionException nsfre) { // NSFRE will occur if the old hbase:meta entry has no server assigned LOG.info(nsfre); logicalSplitting.add(region); continue; } try { // when a daughter region is opened, a compaction is triggered // wait until compaction completes for both daughter regions LinkedList<HRegionInfo> check = Lists.newLinkedList(); check.add(table.getRegionLocation(start).getRegionInfo()); check.add(table.getRegionLocation(split).getRegionInfo()); for (HRegionInfo hri : check.toArray(new HRegionInfo[] {})) { byte[] sk = hri.getStartKey(); if (sk.length == 0) sk = splitAlgo.firstRow(); String startKey = splitAlgo.rowToStr(sk); HRegionFileSystem regionFs = HRegionFileSystem .openRegionFromFileSystem(table.getConfiguration(), fs, tableDir, hri, true); // check every Column Family for that region boolean refFound = false; for (HColumnDescriptor c : htd.getFamilies()) { if ((refFound = regionFs.hasReferences(htd.getTableName().getNameAsString()))) { break; } } // compaction is completed when all reference files are gone if (!refFound) { check.remove(hri); } } if (check.isEmpty()) { finished.add(region); } else { physicalSplitting.add(region); } } catch (NoServerForRegionException nsfre) { LOG.debug("No Server Exception thrown for: " + splitAlgo.rowToStr(start)); physicalSplitting.add(region); table.clearRegionCache(); } } LOG.debug("Split Scan: " + finished.size() + " finished / " + logicalSplitting.size() + " split wait / " + physicalSplitting.size() + " reference wait"); return finished; }
From source file:Graph.java
/** * Perform a breadth first search of this graph, starting at v. The vist may * be cut short if visitor throws an exception during a vist callback. * /*from ww w. j a v a 2 s. co m*/ * @param <E> * * @param v - * the search starting point * @param visitor - * the vistor whose vist method is called prior to visting a vertex. * @throws E * if vistor.visit throws an exception */ public <E extends Exception> void breadthFirstSearch(Vertex<T> v, VisitorEX<T, E> visitor) throws E { LinkedList<Vertex<T>> q = new LinkedList<Vertex<T>>(); q.add(v); if (visitor != null) visitor.visit(this, v); v.visit(); while (q.isEmpty() == false) { v = q.removeFirst(); for (int i = 0; i < v.getOutgoingEdgeCount(); i++) { Edge<T> e = v.getOutgoingEdge(i); Vertex<T> to = e.getTo(); if (!to.visited()) { q.add(to); if (visitor != null) visitor.visit(this, to); to.visit(); } } } }
From source file:edu.umd.cs.guitar.ripper.Ripper.java
/** * Rip a component//from w ww . j a v a 2s .co m * * As of now this method does not propagate exceptions. * It needs to be modified to progate exceptions. All callers * need to be modified to handle exceptions. * * <p> * * @param component * @return */ public ComponentType ripComponent(GComponent component, GWindow window) { GUITARLog.log.info(""); GUITARLog.log.info("----------------------------------"); GUITARLog.log.info("Ripping component: "); GUITARLog.log.info("Signature: "); printComponentInfo(component, window); // 1. Rip special/customized components for (GComponentFilter cm : lComponentFilter) { if (cm.isProcess(component, window)) { GUITARLog.log.info("Filter " + cm.getClass().getSimpleName() + " is applied"); return cm.ripComponent(component, window); } } // 2. Rip regular components ComponentType retComp = null; try { retComp = component.extractProperties(); ComponentTypeWrapper compA = new ComponentTypeWrapper(retComp); if (useImage) { String sUUID = null; try { sUUID = captureImage(component); } catch (AWTException e) { // Ignore AWTException. sUUID is null. } catch (IOException e) { throw e; } if (sUUID != null) { compA.addProperty(GUITARConstants.UUID_TAG_NAME, sUUID); } } GUIType guiType = null; if (window != null) { guiType = window.extractGUIProperties(); } retComp = compA.getDComponentType(); // 2.1 Try to perform action on the component // to reveal more windows/components // clear window opened cache before performing actions monitor.resetWindowCache(); if (monitor.isExpandable(component, window)) { monitor.expandGUI(component); } else { GUITARLog.log.info("Component is Unexpandable"); } // Trigger terminal widget LinkedList<GWindow> lClosedWindows = monitor.getClosedWindowCache(); String sTitle = window.getTitle(); if (lClosedWindows.size() > 0) { GUITARLog.log.debug("!!!!! Window closed"); for (GWindow closedWin : lClosedWindows) { String sClosedWinTitle = closedWin.getTitle(); // Only consider widget closing the current window if (sTitle.equals(sClosedWinTitle)) { GUITARLog.log.debug("\t" + sClosedWinTitle); List<FullComponentType> lCloseComp = lCloseWindowComp.getFullComponent(); FullComponentType cCloseComp = factory.createFullComponentType(); cCloseComp.setWindow(closedWin.extractWindow().getWindow()); cCloseComp.setComponent(retComp); lCloseComp.add(cCloseComp); lCloseWindowComp.setFullComponent(lCloseComp); } // if } // for } // if if (monitor.isNewWindowOpened()) { List<FullComponentType> lOpenComp = lOpenWindowComps.getFullComponent(); FullComponentType cOpenComp = factory.createFullComponentType(); cOpenComp.setWindow(window.extractWindow().getWindow()); cOpenComp.setComponent(retComp); lOpenComp.add(cOpenComp); lOpenWindowComps.setFullComponent(lOpenComp); LinkedList<GWindow> lNewWindows = monitor.getOpenedWindowCache(); monitor.resetWindowCache(); GUITARLog.log.info(lNewWindows.size() + " new window(s) opened!!!"); for (GWindow newWins : lNewWindows) { GUITARLog.log.info("*\t Title:*" + newWins.getTitle() + "*"); } // Process the opened windows in a FIFO order while (!lNewWindows.isEmpty()) { GWindow gNewWin = lNewWindows.getLast(); lNewWindows.removeLast(); GComponent gWinComp = gNewWin.getContainer(); if (gWinComp != null) { // Add invokelist property for the component String sWindowTitle = gNewWin.getTitle(); compA = new ComponentTypeWrapper(retComp); compA.addValueByName(GUITARConstants.INVOKELIST_TAG_NAME, sWindowTitle); GUITARLog.log.debug(sWindowTitle + " recorded"); retComp = compA.getDComponentType(); // Check ignore window if (!monitor.isIgnoredWindow(gNewWin)) { if (!monitor.isRippedWindow(gNewWin)) { gNewWin.setRoot(false); monitor.addRippedList(gNewWin); GUIType dWindow = ripWindow(gNewWin); if (dWindow != null) dGUIStructure.getGUI().add(dWindow); } else { GUITARLog.log.info("Window is ripped!!!"); } } else { GUITARLog.log.info("Window is ignored!!!"); } } monitor.closeWindow(gNewWin); } } // TODO: check if the component is still available after ripping // its child window List<GComponent> gChildrenList = component.getChildren(); int nChildren = gChildrenList.size(); int i = 0; // Debug String lChildren = "["; for (int j = 0; j < nChildren; j++) { lChildren += gChildrenList.get(j).getTitle() + " - " + gChildrenList.get(j).getClassVal() + "; "; } lChildren += "]"; GUITARLog.log.debug("*" + component.getTitle() + "* in window *" + window.getTitle() + "* has " + nChildren + " children: " + lChildren); // End debug while (i < nChildren) { GComponent gChild = gChildrenList.get(i++); ComponentType guiChild = ripComponent(gChild, window); if (guiChild != null) { ((ContainerType) retComp).getContents().getWidgetOrContainer().add(guiChild); } if (nChildren < gChildrenList.size()) { nChildren = gChildrenList.size(); } } } catch (Exception e) { if (e.getClass().getName().contains("StaleElementReferenceException")) { /** * This can happen when performing an action causes a page * navigation in the current window, for example, when * submitting a form. */ GUITARLog.log.warn("Element went away: " + e.getMessage()); } else { // TODO: Must throw exception GUITARLog.log.error("ripComponent exception", e); } /** * We'll return the component we calculated anyway so it * gets added to the GUI map. I'm not entirely sure this * is the right thing to do, but it gets us further anyway. */ return retComp; } return retComp; }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeSameAsGroups() throws IOException { //load all entities and assign an id to them //dbpedia entites are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); if ((s.startsWith("http://www.dbpedia.org/resource") || s.startsWith("http://dbpedia.org/resource")) && !entityIds.containsKey(s) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(s, entityIds.size() + 1); }//from www . j a v a 2 s . c om String v = m.group(7); if ((v.startsWith("http://www.dbpedia.org/resource") || v.startsWith("http://dbpedia.org/resource")) && !entityIds.containsKey(v) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(v, entityIds.size() + 1); } } l = in.readLine(); } } } //now non-dpedia entities are loaded: http://www4.wiwiss.fu-berlin.de, http://data.linkedct.org, http://purl.org, http://bio2rdf.org, http://www.ncbi.nlm.nih.gov for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l; while ((l = in.readLine()) != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); if (s.startsWith("http://www4.wiwiss.fu-berlin.de") && !entityIds.containsKey(s) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(s, entityIds.size() + 1); } String v = m.group(7); if ((v.startsWith("http://www4.wiwiss.fu-berlin.de") //|| v.startsWith("http://data.linkedct.org") || v.startsWith("http://129.128.185.122")) && !entityIds.containsKey(v) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(v, entityIds.size() + 1); } } if (entityIds.size() == 12413) { System.out.println(); } } } } //create the sameAsEdges sets sameAsEdges = new HashSet[entityIds.size() + 1]; entityById = new String[entityIds.size() + 1]; for (Map.Entry<String, Integer> e : entityIds.entrySet()) { entityById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l; while ((l = in.readLine()) != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#sameAs")) { String s = m.group(2); int idS = entityIds.get(s); String v = m.group(7); Integer idV = entityIds.get(v); if (idV == null) { continue; } if (sameAsEdges[idS] == null) { sameAsEdges[idS] = new HashSet<>(); } sameAsEdges[idS].add(idV); if (sameAsEdges[idV] == null) { sameAsEdges[idV] = new HashSet<>(); } sameAsEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } sameAs = new int[entityIds.size() + 1]; int i = 1; while (i < sameAs.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (sameAs[j] != 0) { if (sameAs[j] != i) { System.out.println("Error"); System.exit(0); } } else { sameAs[j] = i; if (sameAsEdges[j] != null) { for (int k : sameAsEdges[j]) { q.addLast(k); } } } } i++; while (i < sameAs.length && sameAs[i] != 0) { i++; } } }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentPropertyGroups() throws IOException { //load all classes and assign an id to them //dbpedia properties are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); }/*from w w w . ja v a 2 s . c om*/ if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia properties are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && !(s.equals("http://www.w3.org/2000/01/rdf-schema#label") || s.equals("http://www.w3.org/2002/07/owl#sameAs") || s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentPropertyEdges sets equivalentPropertyEdges = new HashSet[propertyIds.size() + 1]; propertyById = new String[propertyIds.size() + 1]; for (Map.Entry<String, Integer> e : propertyIds.entrySet()) { propertyById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { String s = m.group(2); int idS = propertyIds.get(s); String v = m.group(7); int idV = propertyIds.get(v); if (equivalentPropertyEdges[idS] == null) { equivalentPropertyEdges[idS] = new HashSet<>(); } equivalentPropertyEdges[idS].add(idV); if (equivalentPropertyEdges[idV] == null) { equivalentPropertyEdges[idV] = new HashSet<>(); } equivalentPropertyEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } equivalentProperty = new int[propertyIds.size() + 1]; int i = 1; while (i < equivalentProperty.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentProperty[j] != 0) { if (equivalentProperty[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentProperty[j] = i; if (equivalentPropertyEdges[j] != null) { for (int k : equivalentPropertyEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentProperty.length && equivalentProperty[i] != 0) { i++; } } }
From source file:com.jayway.jsonpath.JsonModel.java
private <T> T getTargetObject(JsonPath jsonPath, Class<T> clazz) { notNull(jsonPath, "jsonPath can not be null"); if (!jsonPath.isPathDefinite()) { throw new IndefinitePathException(jsonPath.getPath()); }// w w w .j av a 2s . c o m JsonProvider jsonProvider = JsonProviderFactory.createProvider(); Object modelRef = jsonObject; if (jsonPath.getTokenizer().size() == 1) { PathToken onlyToken = jsonPath.getTokenizer().iterator().next(); if ("$".equals(onlyToken.getFragment())) { return clazz.cast(modelRef); } } else { LinkedList<PathToken> tokens = jsonPath.getTokenizer().getPathTokens(); PathToken currentToken; do { currentToken = tokens.poll(); modelRef = currentToken.apply(modelRef, jsonProvider); } while (!tokens.isEmpty()); if (modelRef.getClass().isAssignableFrom(clazz)) { throw new InvalidModelException( jsonPath + " does nor refer to a Map but " + currentToken.getClass().getName()); } return clazz.cast(modelRef); } throw new InvalidModelException(); }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentClassGroups() throws IOException { //load all classes and assign an id to them //dbpedia classes are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); }/*from ww w . j a v a 2s.co m*/ if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia classes are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && !(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentClassEdges sets equivalentClassEdges = new HashSet[classIds.size() + 1]; classById = new String[classIds.size() + 1]; for (Map.Entry<String, Integer> e : classIds.entrySet()) { classById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { String s = m.group(2); int idS = classIds.get(s); String v = m.group(7); int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } //manually add an equivalence //http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs equivalentClass { String s = "http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs"; int idS = classIds.get(s); String v = "http://dbpedia.org/ontology/Drug"; int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } equivalentClass = new int[classIds.size() + 1]; int i = 1; while (i < equivalentClass.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentClass[j] != 0) { if (equivalentClass[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentClass[j] = i; if (equivalentClassEdges[j] != null) { for (int k : equivalentClassEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentClass.length && equivalentClass[i] != 0) { i++; } } }
From source file:net.sourceforge.seqware.pipeline.plugins.MetadataTest.java
@Test public void testListAllTables() { systemErr.println("Test List all Tables\n"); launchPlugin("--list-tables"); String output = getOut();/*from w w w. j a v a 2s . co m*/ // fix up test to support basic workflow/run creation tools, see git commit 4862eaba7f3d7c7495155dc913ead745b544f358 String[] tables = new String[] { "TableName", "study", "experiment", "sample", "ius", "lane", "sequencer_run", "workflow", "workflow_run" }; LinkedList<String> stuff = new LinkedList(Arrays.asList(output.split("\n"))); for (String table : tables) { int index = stuff.indexOf(table); if (index >= 0) { stuff.remove(index); } else { Assert.fail("Missing a table:" + table); } } while (!stuff.isEmpty()) { String s = stuff.poll(); Assert.fail("There are extra tables listed: " + s); } }
From source file:com.mirth.connect.server.controllers.MuleEngineController.java
private void configureOutboundRouter(UMODescriptor descriptor, Channel channel) throws Exception { logger.debug(/* w w w.java 2 s .c o m*/ "configuring outbound router for channel: " + channel.getId() + " (" + channel.getName() + ")"); FilteringMulticastingRouter fmr = new FilteringMulticastingRouter(); boolean enableTransactions = false; Exception exceptionRegisteringOutboundRouter = null; // If there was an exception registering a connector, break the loop. for (ListIterator<Connector> iterator = channel.getDestinationConnectors().listIterator(); iterator .hasNext() && (exceptionRegisteringOutboundRouter == null);) { Connector connector = iterator.next(); if (connector.isEnabled()) { MuleEndpoint endpoint = new MuleEndpoint(); // Don't throw an exception if a malformed URI was passed // in for one of the destinations. try { endpoint.setEndpointURI(new MuleEndpointURI(getEndpointUri(connector), channel.getId())); } catch (Exception e) { exceptionRegisteringOutboundRouter = e; } // if there are multiple endpoints, make them all // synchronous to // ensure correct ordering of fired events if (channel.getDestinationConnectors().size() > 0) { endpoint.setSynchronous(true); // TODO: routerElement.setAttribute("synchronous", // "true"); } String connectorReference = getConnectorReferenceForOutboundRouter(channel, iterator.nextIndex()); // add the destination connector String connectorName = getConnectorNameForRouter(connectorReference); try { endpoint.setConnector(registerConnector(connector, connectorName, channel.getId())); } catch (Exception e) { exceptionRegisteringOutboundRouter = e; } // 1. append the JavaScriptTransformer that does the // mappings UMOTransformer javascriptTransformer = createTransformer(channel, connector, connectorReference + "_transformer"); try { muleManager.registerTransformer(javascriptTransformer); } catch (Exception e) { exceptionRegisteringOutboundRouter = e; } // 2. finally, append any transformers needed by the // transport (ie. StringToByteArray) ConnectorMetaData transport = transports.get(connector.getTransportName()); LinkedList<UMOTransformer> defaultTransformerList = null; if (transport.getTransformers() != null) { defaultTransformerList = chainTransformers(transport.getTransformers()); if (!defaultTransformerList.isEmpty()) { javascriptTransformer.setTransformer(defaultTransformerList.getFirst()); } } // enable transactions for the outbound router only if it // has a JDBC connector if (transport.getProtocol().equalsIgnoreCase("jdbc")) { enableTransactions = true; } endpoint.setTransformer(javascriptTransformer); fmr.addEndpoint(endpoint); } } // check for enabled transactions boolean transactional = ((channel.getProperties().get("transactional") != null) && channel.getProperties().get("transactional").toString().equalsIgnoreCase("true")); if (enableTransactions && transactional) { MuleTransactionConfig mtc = new MuleTransactionConfig(); mtc.setActionAsString("BEGIN_OR_JOIN"); mtc.setFactory(new JdbcTransactionFactory()); fmr.setTransactionConfig(mtc); } OutboundMessageRouter outboundRouter = new OutboundMessageRouter(); outboundRouter.addRouter(fmr); descriptor.setOutboundRouter(outboundRouter); /* * Throw an exception after the FilteringMulticastingRouter is created * and added to the outbound router, even though the connector * registration is aborted. This is so casting to a * FilteringMulticastingRouter doesn't fail when unregistering the * failed channel and stopping its dispatchers. */ if (exceptionRegisteringOutboundRouter != null) { throw exceptionRegisteringOutboundRouter; } }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtil.java
/** * Reduce the number of edits by eliminating operationally trivial equalities. * * @param diffs LinkedList of Diff objects. *///w w w .j av a 2s . com public void diff_cleanupEfficiency(LinkedList<Diff<T>> diffs) { if (diffs.isEmpty()) { return; } boolean changes = false; Stack<Diff> equalities = new Stack<Diff>(); // Stack of equalities. List<T> lastequality = null; // Always equal to equalities.lastElement().text ListIterator<Diff<T>> pointer = diffs.listIterator(); // Is there an insertion operation before the last equality. boolean pre_ins = false; // Is there a deletion operation before the last equality. boolean pre_del = false; // Is there an insertion operation after the last equality. boolean post_ins = false; // Is there a deletion operation after the last equality. boolean post_del = false; Diff<T> thisDiff = pointer.next(); Diff<T> safeDiff = thisDiff; // The last Diff that is known to be unsplitable. while (thisDiff != null) { if (thisDiff.operation == Operation.EQUAL) { // Equality found. if (thisDiff.text.size() < Diff_EditCost && (post_ins || post_del)) { // Candidate found. equalities.push(thisDiff); pre_ins = post_ins; pre_del = post_del; lastequality = thisDiff.text; } else { // Not a candidate, and can never become one. equalities.clear(); lastequality = null; safeDiff = thisDiff; } post_ins = post_del = false; } else { // An insertion or deletion. if (thisDiff.operation == Operation.DELETE) { post_del = true; } else { post_ins = true; } /* * Five types to be split: * <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del> * <ins>A</ins>X<ins>C</ins><del>D</del> * <ins>A</ins><del>B</del>X<ins>C</ins> * <ins>A</del>X<ins>C</ins><del>D</del> * <ins>A</ins><del>B</del>X<del>C</del> */ if (lastequality != null && ((pre_ins && pre_del && post_ins && post_del) || ((lastequality.size() < Diff_EditCost / 2) && ((pre_ins ? 1 : 0) + (pre_del ? 1 : 0) + (post_ins ? 1 : 0) + (post_del ? 1 : 0)) == 3))) { //System.out.println("Splitting: '" + lastequality + "'"); // Walk back to offending equality. while (thisDiff != equalities.lastElement()) { thisDiff = pointer.previous(); } pointer.next(); // Replace equality with a delete. pointer.set(new Diff(Operation.DELETE, lastequality)); // Insert a corresponding an insert. pointer.add(thisDiff = new Diff(Operation.INSERT, lastequality)); equalities.pop(); // Throw away the equality we just deleted. lastequality = null; if (pre_ins && pre_del) { // No changes made which could affect previous entry, keep going. post_ins = post_del = true; equalities.clear(); safeDiff = thisDiff; } else { if (!equalities.empty()) { // Throw away the previous equality (it needs to be reevaluated). equalities.pop(); } if (equalities.empty()) { // There are no previous questionable equalities, // walk back to the last known safe diff. thisDiff = safeDiff; } else { // There is an equality we can fall back to. thisDiff = equalities.lastElement(); } while (thisDiff != pointer.previous()) { // Intentionally empty loop. } post_ins = post_del = false; } changes = true; } } thisDiff = pointer.hasNext() ? pointer.next() : null; } if (changes) { diff_cleanupMerge(diffs); } }