List of usage examples for java.util Vector elementAt
public synchronized E elementAt(int index)
From source file:gov.nih.nci.evs.browser.utils.MetaTreeUtils.java
public TreeItem addChildrenExt(LexBIGService lbs, MetaBrowserService mbs, TreeItem ti, String sab, String code, Set<String> codesToExclude, Map<String, TreeItem> code2Tree, String target_code) throws LBException { List<String> par_chd_assoc_list = new ArrayList(); par_chd_assoc_list.add("CHD"); Map<String, List<BySourceTabResults>> map = null; try {/*w ww . j a v a 2s. c o m*/ // mbs = // (MetaBrowserService)lbs.getGenericExtension("metabrowser-extension"); map = mbs.getBySourceTabDisplay(ti._code, sab, par_chd_assoc_list, Direction.SOURCEOF); } catch (Exception ex) { ex.printStackTrace(); return ti; } Vector w = new Vector(); /* * KLO, 020210 HashMap cui2SynonymsMap = createCUI2SynonymsHahMap(map); * ti.expandable = false; Set keyset = cui2SynonymsMap.keySet(); * Iterator iterator = keyset.iterator(); while (iterator.hasNext()) { * String child_cui = (String) iterator.next(); * //_logger.debug("\tchild_cui: " + child_cui); TreeItem sub = null; if * (code2Tree.containsKey(child_cui)) { sub = (TreeItem) * code2Tree.get(child_cui); } else { Vector v = (Vector) * cui2SynonymsMap.get(child_cui); //BySourceTabResults result = * findHighestRankedAtom(v, sab); BySourceTabResults result = * findHighestRankedAtom(v, sab); if (result == null) { result = * (BySourceTabResults) v.elementAt(0); } //BySourceTabResults result = * (BySourceTabResults) v.elementAt(0); sub = new TreeItem(child_cui, * result.getTerm()); sub.expandable = hasSubconcepts(lbs, mbs, * child_cui, "NCI", "CHD", true); } ti.addChild("CHD", sub); * ti.expandable = true; } */ HashMap cui2SynonymsMap = createCUI2SynonymsHahMap(map); ti._expandable = false; Iterator iterator = cui2SynonymsMap.entrySet().iterator(); while (iterator.hasNext()) { Entry thisEntry = (Entry) iterator.next(); String child_cui = (String) thisEntry.getKey(); /* Set keyset = cui2SynonymsMap.keySet(); Iterator iterator = keyset.iterator(); while (iterator.hasNext()) { String child_cui = (String) iterator.next(); */ TreeItem sub = null; if (code2Tree.containsKey(child_cui)) { sub = (TreeItem) code2Tree.get(child_cui); } else { //Vector v = (Vector) cui2SynonymsMap.get(child_cui); Vector v = (Vector) thisEntry.getValue(); BySourceTabResults result = findHighestRankedAtom(v, sab); // BySourceTabResults result = findHighestRankedAtom(v, sab); if (result == null) { result = (BySourceTabResults) v.elementAt(0); } // BySourceTabResults result = (BySourceTabResults) // v.elementAt(0); sub = new TreeItem(child_cui, result.getTerm()); sub._expandable = hasSubconcepts(lbs, mbs, child_cui, NCI_SOURCE, "CHD", true); } w.add(sub); } w = SortUtils.quickSort(w); /* * for (int i=0; i<w.size(); i++) { TreeItem sub = (TreeItem) * w.elementAt(i); ti.expandable = true; ti.addChild("CHD", sub); } */ // Truncate subconcept list to enhance search_tree performance int target_idx = -1; for (int i = 0; i < w.size(); i++) { TreeItem sub = (TreeItem) w.elementAt(i); if (sub._code.compareTo(target_code) == 0) { target_idx = i; break; } } for (int i = 0; i <= target_idx; i++) { TreeItem sub = (TreeItem) w.elementAt(i); ti._expandable = true; ti.addChild("CHD", sub); } if (target_idx == w.size() - 1) { return ti; } for (int i = target_idx + 1; i < w.size(); i++) { TreeItem sub = (TreeItem) w.elementAt(i); if (sub._expandable) { sub._text = "...";// + sub.text; sub._code = sub._code + "|" + ti._code; ti._expandable = true; ti.addChild("CHD", sub); break; } else { ti._expandable = true; ti.addChild("CHD", sub); } } // ti.addChild("CHD", sub); // } return ti; }
From source file:gov.nih.nci.evs.browser.utils.MetaTreeUtils.java
protected void buildPathsToUpperNodesExt(LexBIGService lbs, MetaBrowserService mbs, TreeItem ti, String sab, Map<String, TreeItem> code2Tree, Set<TreeItem> roots, Set<String> visited_links, int maxLevel, int currLevel) throws LBException { HashSet new_root_codes = new HashSet(); if (maxLevel != -1 && currLevel > maxLevel) { return;//from w ww. j a v a2s. c o m } // Only need to process a code once ... if (code2Tree.containsKey(ti._code)) return; // Cache for future reference. code2Tree.put(ti._code, ti); // UMLS relations can be defined with forward direction // being parent to child or child to parent on a source // by source basis. Iterate twice to ensure completeness; // once navigating child to parent relations forward // and once navigating parent to child relations // backward. Both have the net effect of navigating // from the bottom of the hierarchy to the top. boolean isRoot = true; // find parents: List<String> par_chd_assoc_list = new ArrayList(); par_chd_assoc_list.add("CHD"); Map<String, List<BySourceTabResults>> map = null; // LexBIGService lbs = RemoteServerUtil.createLexBIGService(); // MetaBrowserService mbs = null; try { mbs = (MetaBrowserService) lbs.getGenericExtension("metabrowser-extension"); map = mbs.getBySourceTabDisplay(ti._code, sab, par_chd_assoc_list, Direction.TARGETOF); } catch (Exception ex) { ex.printStackTrace(); return; } HashMap cui2SynonymsMap = createCUI2SynonymsHahMap(map); Iterator iterator = cui2SynonymsMap.entrySet().iterator(); while (iterator.hasNext()) { Entry thisEntry = (Entry) iterator.next(); String parent_cui = (String) thisEntry.getKey(); /* Set keyset = cui2SynonymsMap.keySet(); Iterator iterator = keyset.iterator(); while (iterator.hasNext()) { String parent_cui = (String) iterator.next(); */ // KLO, 020210 if (parent_cui.compareTo(_nciThesaurusCui) != 0) { //Vector v = (Vector) cui2SynonymsMap.get(parent_cui); Vector v = (Vector) thisEntry.getValue(); BySourceTabResults result = findHighestRankedAtom(v, sab); // BySourceTabResults result = findHighestRankedAtom(v, sab); if (result == null) { result = (BySourceTabResults) v.elementAt(0); } // BySourceTabResults result = (BySourceTabResults) // v.elementAt(0); String link = ti._code + "|" + parent_cui; if (!visited_links.contains(link)) { visited_links.add(link); TreeItem tiParent = code2Tree.get(parent_cui); if (tiParent == null) { tiParent = new TreeItem(parent_cui, result.getTerm()); } // Add immediate children of the parent code with an // indication of sub-nodes (+). Codes already // processed as part of the path are ignored since // they are handled through recursion. // tiParent = addChildrenExt(lbs, mbs, tiParent, sab, // parent_cui, code2Tree.keySet(), code2Tree); tiParent = addChildrenExt(lbs, mbs, tiParent, sab, parent_cui, code2Tree.keySet(), code2Tree, ti._code); tiParent.addChild("CHD", ti); // Try to go higher through recursion. buildPathsToUpperNodesExt(lbs, mbs, tiParent, sab, code2Tree, roots, visited_links, maxLevel, currLevel + 1); code2Tree.put(parent_cui, tiParent); } } else { roots.add(ti); new_root_codes.add(ti._code); } } code2Tree.put(ti._code, ti); isRoot = false; if (maxLevel != -1 && currLevel == maxLevel) { isRoot = true; } if (isRoot) { // //KLO, 020210 if (!new_root_codes.contains(ti._code)) { roots.add(ti); } } }
From source file:gov.nih.nci.evs.browser.utils.MetaTreeUtils.java
public HashMap getSubconcepts(String code, String sab, String association, boolean associationsNavigatedFwd) { LexBIGService lbs = RemoteServerUtil.createLexBIGService(); MetaBrowserService mbs = null;/* ww w . ja v a 2 s. com*/ TreeItem ti = null; HashMap hmap = new HashMap(); long ms = System.currentTimeMillis(); try { // LexBIGServiceConvenienceMethods lbscm = // (LexBIGServiceConvenienceMethods) lbSvc // .getGenericExtension("LexBIGServiceConvenienceMethods"); // lbscm.setLexBIGService(lbSvc); String name = getCodeDescription("NCI Metathesaurus", null, code); ti = new TreeItem(code, name); ti._expandable = false; List<String> par_chd_assoc_list = new ArrayList(); par_chd_assoc_list.add(association); Map<String, List<BySourceTabResults>> map = null; try { mbs = (MetaBrowserService) lbs.getGenericExtension("metabrowser-extension"); if (associationsNavigatedFwd) { map = mbs.getBySourceTabDisplay(code, sab, par_chd_assoc_list, Direction.SOURCEOF); } else { map = mbs.getBySourceTabDisplay(code, sab, par_chd_assoc_list, Direction.TARGETOF); } } catch (Exception ex) { ex.printStackTrace(); return hmap; } HashMap cui2SynonymsMap = createCUI2SynonymsHahMap(map); Iterator iterator = cui2SynonymsMap.entrySet().iterator(); while (iterator.hasNext()) { Entry thisEntry = (Entry) iterator.next(); String child_cui = (String) thisEntry.getKey(); Vector v = (Vector) thisEntry.getValue(); TreeItem sub = null; // temporary BySourceTabResults result = findHighestRankedAtom(v, sab); // BySourceTabResults result = findHighestRankedAtom(v, sab); if (result == null) { result = (BySourceTabResults) v.elementAt(0); } sub = new TreeItem(child_cui, result.getTerm()); sub._expandable = hasSubconcepts(lbs, mbs, child_cui, "NCI", association, associationsNavigatedFwd); ti.addChild(association, sub); ti._expandable = true; } } catch (Exception ex) { ex.printStackTrace(); return hmap; } hmap.put(code, ti); _logger.debug( "Run time (milliseconds) getSubconcepts: " + (System.currentTimeMillis() - ms) + " to resolve "); return hmap; }
From source file:com.globalsight.everest.webapp.pagehandler.edit.inctxrv.EditorPageHandler.java
/** * Initializes editor state from a job, i.e. when the editor is opened by an * Admin or PM./*from ww w . j a v a 2s .c o m*/ */ private void initializeFromJob(EditorState p_state, HttpServletRequest p_request, String p_jobId, String p_srcPageId, String p_trgPageId, Locale p_uiLocale, User p_user) throws EnvoyServletException { p_state.setUserIsPm(true); HttpSession session = p_request.getSession(); SessionManager sessionMgr = (SessionManager) session.getAttribute(WebAppConstants.SESSION_MANAGER); PermissionSet perms = (PermissionSet) p_request.getSession().getAttribute(WebAppConstants.PERMISSIONS); // Reset all options because the state may be inherited from a // previous page. EditorHelper.initEditorOptions(p_state, p_request.getSession()); // Initializes pages, target locales, excluded items, and termbases EditorHelper.initializeFromJob(p_state, p_jobId, p_srcPageId, p_uiLocale, p_user.getUserId(), perms); if (p_trgPageId != null && p_trgPageId.length() > 0) { // If the PM requests a specific target page... setCurrentPage(p_request.getSession(), p_state, p_srcPageId, p_trgPageId); EditorState.PagePair currentPage = p_state.getCurrentPage(); p_state.setTargetViewLocale(currentPage.getTargetPageLocale(new Long(p_trgPageId))); } else { // No target page/locale requested, find a suitable one. setCurrentPage(p_request.getSession(), p_state, p_srcPageId); // If no locale is set or the set locale doesn't exist in the // list of target locales in the job (fix for def_5545), // determine the default locale to display in target window. GlobalSightLocale viewLocale = p_state.getTargetViewLocale(); Vector trgLocales = p_state.getJobTargetLocales(); GlobalSightLocale local = (GlobalSightLocale) sessionMgr.getAttribute("targetLocale"); if (viewLocale == null || !trgLocales.contains(viewLocale)) { if (trgLocales.contains(local)) { Iterator it = trgLocales.iterator(); while (it.hasNext()) { GlobalSightLocale trgLocale = (GlobalSightLocale) it.next(); if (local.getLocale().equals(trgLocale.getLocale())) { p_state.setTargetViewLocale((GlobalSightLocale) trgLocale); } } } else { p_state.setTargetViewLocale((GlobalSightLocale) trgLocales.elementAt(0)); } } } // When coming from job page, target page is read only. // Fri Feb 20 20:18:44 2004 CvdL: Patch for HP: PMs can edit // all target pages any time at their own risk. if (s_pmCanEditTargetPages && EditorHelper.pmCanEditCurrentPage(p_state)) { p_state.setReadOnly(false); p_state.setAllowEditAll(true); p_state.setEditAllState(EDIT_ALL); } else { p_state.setReadOnly(true); } // Mon Jan 31 18:56:04 2005 CvdL: PM can edit snippets too (12665) p_state.setAllowEditSnippets(s_pmCanEditSnippets); // Indicate that main editor is in 'viewer' mode -- see // dispatchJsp for switching to review mode. // Comments are turned ON by default in popup editor from Job detail // page p_state.setReviewMode(); }
From source file:com.google.gsa.Kerberos.java
/** * Sets the kerberos and non-kerberos authentication cookies * /*ww w . j av a 2s . c o m*/ */ private void settingSessionCookies(Vector<Cookie> krbCookies, Vector<Cookie> nonKrbCookies, Cookie gsaAuthCookie, UserSession userSession) { int numKrb = 0; int numNonKrb = 0; int authCookie = 1; Cookie[] totalCookies; //check number of cookies if (!krbCookies.isEmpty()) { numKrb = krbCookies.size(); logger.debug("numKrb: " + numKrb); } if (!nonKrbCookies.isEmpty()) { numNonKrb = nonKrbCookies.size(); logger.debug("numNonKrb: " + numNonKrb); } //setting Cookies int numCookies = numKrb + numNonKrb + authCookie; logger.debug("numCookies: " + numCookies); totalCookies = new Cookie[numCookies]; //setting authCookie logger.debug("Inserting authCoookie at totalCookie"); totalCookies[0] = gsaAuthCookie; int index = 1; //getting Krb cookies if (numKrb > 0) { int krbIndex = 0; for (int i = index; i < (numKrb + 1); i++) { logger.debug("Inserting totalCookie [i=" + (i) + "]"); logger.debug("with cookie: " + krbCookies.elementAt(krbIndex)); totalCookies[i] = krbCookies.elementAt(krbIndex); krbIndex++; index++; } } //getting nonKrb cookies if (numNonKrb > 0) { int nonKrbIndex = 0; for (int j = index; j < numCookies; j++) { logger.debug("Inserting totalCookie [j=" + (j) + "]: "); logger.debug("with cookie: " + nonKrbCookies.elementAt(nonKrbIndex)); totalCookies[j] = nonKrbCookies.elementAt(nonKrbIndex); nonKrbIndex++; } } userSession.setCookies(totalCookies); }
From source file:org.apache.axis.wsdl.toJava.JavaGeneratorFactory.java
/** * Messages, PortTypes, Bindings, and Services can share the same name. If they do in this * Definition, force their names to be suffixed with _PortType and _Service, respectively. * * @param symbolTable//from w ww .ja v a2 s . c o m */ protected void resolveNameClashes(SymbolTable symbolTable) { // Keep a list of anonymous types so we don't try to resolve them twice. HashSet anonTypes = new HashSet(); List collisionCandidates = new ArrayList(); // List of vector of SymbolTable entry List localParts = new ArrayList(); // all localparts in all symboltable entries for (Iterator i = symbolTable.getHashMap().keySet().iterator(); i.hasNext();) { QName qName = (QName) i.next(); String localPart = qName.getLocalPart(); if (!localParts.contains(localPart)) localParts.add(localPart); } Map pkg2NamespacesMap = emitter.getNamespaces().getPkg2NamespacesMap(); for (Iterator i = pkg2NamespacesMap.values().iterator(); i.hasNext();) { Vector namespaces = (Vector) i.next(); // namepaces mapped to same package // Combine entry vectors, which have the same entry name, into a new entry vector. for (int j = 0; j < localParts.size(); j++) { Vector v = new Vector(); for (int k = 0; k < namespaces.size(); k++) { QName qName = new QName((String) namespaces.get(k), (String) localParts.get(j)); if (symbolTable.getHashMap().get(qName) != null) { v.addAll((Vector) symbolTable.getHashMap().get(qName)); } } if (v.size() > 0) { collisionCandidates.add(v); } } } Iterator it = collisionCandidates.iterator(); while (it.hasNext()) { Vector v = new Vector((Vector) it.next()); // New vector we can temporarily add to it // Remove MessageEntries since they are not mapped int index = 0; while (index < v.size()) { if (v.elementAt(index) instanceof MessageEntry) { // Need to resolve a Exception message. MessageEntry msgEntry = (MessageEntry) v.elementAt(index); if (msgEntry.getDynamicVar(EXCEPTION_CLASS_NAME) == null) { v.removeElementAt(index); } else { index++; } } else { index++; } } if (v.size() > 1) { boolean resolve = true; // Common Special Case: // If a Type and Element have the same QName, and the Element // references the Type, then they are the same class so // don't bother mangling. if (v.size() == 2 && ((v.elementAt(0) instanceof Element && v.elementAt(1) instanceof Type) || (v.elementAt(1) instanceof Element && v.elementAt(0) instanceof Type))) { Element e; if (v.elementAt(0) instanceof Element) { e = (Element) v.elementAt(0); } else { e = (Element) v.elementAt(1); } BooleanHolder forElement = new BooleanHolder(); QName eType = Utils.getTypeQName(e.getNode(), forElement, false); if ((eType != null) && !forElement.value) { resolve = false; } } // Other Special Case: // If the names are already different, no mangling is needed. if (resolve) { resolve = false; // Assume false String name = null; for (int i = 0; (i < v.size()) && !resolve; ++i) { SymTabEntry entry = (SymTabEntry) v.elementAt(i); if ((entry instanceof MessageEntry) || (entry instanceof BindingEntry)) { // Need to resolve a exception class name String exceptionClassName = (String) entry.getDynamicVar(EXCEPTION_CLASS_NAME); if (exceptionClassName != null) { if (name == null) { name = exceptionClassName; } else if (name.equals(exceptionClassName)) { resolve = true; } } } else if (name == null) { name = entry.getName(); } else if (name.equals(entry.getName())) { resolve = true; // Need to do resolution } } } // Full Mangle if resolution is necessary. if (resolve) { boolean firstType = true; for (int i = 0; i < v.size(); ++i) { SymTabEntry entry = (SymTabEntry) v.elementAt(i); if (entry instanceof Element) { entry.setName(mangleName(entry.getName(), ELEMENT_SUFFIX)); // If this global element was defined using // an anonymous type, then need to change the // java name of the anonymous type to match. QName anonQName = new QName(entry.getQName().getNamespaceURI(), SymbolTable.ANON_TOKEN + entry.getQName().getLocalPart()); TypeEntry anonType = symbolTable.getType(anonQName); if (anonType != null) { anonType.setName(entry.getName()); anonTypes.add(anonType); } } else if (entry instanceof TypeEntry) { // Search all other types for java names that match this one. // The sameJavaClass method returns true if the java names are // the same (ignores [] ). if (firstType) { firstType = false; Iterator types = symbolTable.getTypeIndex().values().iterator(); while (types.hasNext()) { TypeEntry type = (TypeEntry) types.next(); if ((type != entry) && (type.getBaseType() == null) && sameJavaClass(entry.getName(), type.getName())) { v.add(type); } } } // If this is an anonymous type, it's name was resolved in // the previous if block. Don't reresolve it. if (!anonTypes.contains(entry)) { // In case that other entry in name collision among // PortTypeEntry, ServiceEntry and BindingEntry boolean needResolve = false; // check collision of TypeEntry with PortTypeEntry, ServiceEtnry and/or BindingEntry for (int j = 0; j < v.size(); j++) { SymTabEntry e = (SymTabEntry) v.elementAt(j); if ((e instanceof PortTypeEntry || e instanceof ServiceEntry || e instanceof BindingEntry)) { needResolve = true; break; } } if (!needResolve) { continue; } // Appended Suffix for avoiding name collisions (JAX-RPC 1.1) Boolean isComplexTypeFault = (Boolean) entry.getDynamicVar(COMPLEX_TYPE_FAULT); if ((isComplexTypeFault != null) && isComplexTypeFault.booleanValue()) { entry.setName(mangleName(entry.getName(), EXCEPTION_SUFFIX)); } else { entry.setName(mangleName(entry.getName(), TYPE_SUFFIX)); } // should update the class name of ElementEntry which references this type entry Map elementIndex = symbolTable.getElementIndex(); List elements = new ArrayList(elementIndex.values()); for (int j = 0; j < elementIndex.size(); j++) { TypeEntry te = (TypeEntry) elements.get(j); TypeEntry ref = te.getRefType(); if (ref != null && entry.getQName().equals(ref.getQName())) { te.setName(entry.getName()); } } // Need to resolve a complex-type exception message. if ((isComplexTypeFault != null) && isComplexTypeFault.booleanValue()) { // SHOULD update the exception class name of a referencing message entry. List messageEntries = symbolTable.getMessageEntries(); for (int j = 0; j < messageEntries.size(); j++) { MessageEntry messageEntry = (MessageEntry) messageEntries.get(j); Boolean isComplexTypeFaultMsg = (Boolean) messageEntry .getDynamicVar(COMPLEX_TYPE_FAULT); if ((isComplexTypeFaultMsg != null) && (isComplexTypeFaultMsg.booleanValue())) { QName exceptionDataType = (QName) messageEntry .getDynamicVar(EXCEPTION_DATA_TYPE); if (((TypeEntry) entry).getQName().equals(exceptionDataType)) { String className = (String) messageEntry .getDynamicVar(EXCEPTION_CLASS_NAME); messageEntry.setDynamicVar(EXCEPTION_CLASS_NAME, className + EXCEPTION_SUFFIX); } } } } } } else if (entry instanceof PortTypeEntry) { entry.setName(mangleName(entry.getName(), PORT_TYPE_SUFFIX)); // "_Port" --> "_PortType" for JAX-RPC 1.1 } else if (entry instanceof ServiceEntry) { entry.setName(mangleName(entry.getName(), SERVICE_SUFFIX)); } else if (entry instanceof MessageEntry) { Boolean complexTypeFault = (Boolean) entry.getDynamicVar(COMPLEX_TYPE_FAULT); if ((complexTypeFault == null) || !complexTypeFault.booleanValue()) { String exceptionClassName = (String) entry.getDynamicVar(EXCEPTION_CLASS_NAME); entry.setDynamicVar(EXCEPTION_CLASS_NAME, exceptionClassName + EXCEPTION_SUFFIX); } } // else if (entry instanceof MessageEntry) { // we don't care about messages // } else if (entry instanceof BindingEntry) { BindingEntry bEntry = (BindingEntry) entry; // If there is no literal use, then we never see a // class named directly from the binding name. They // all have suffixes: Stub, Skeleton, Impl. // If there IS literal use, then the SDI will be // named after the binding name, so there is the // possibility of a name clash. if (bEntry.hasLiteral()) { entry.setName(mangleName(entry.getName(), BINDING_SUFFIX)); } } } } } } }
From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
/** * Builds the RTree given a serialized list of elements. It uses the given * stockObject to deserialize these elements using * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the * created tree to the disk directly./*from w ww .j ava 2s. c o m*/ * * @param element_bytes * - serialization of all elements separated by new lines * @param offset * - offset of the first byte to use in elements_bytes * @param len * - number of bytes to use in elements_bytes * @param degree * - Degree of the R-tree to build in terms of number of children per * node * @param dataOut * - output stream to write the result to. * @param fast_sort * - setting this to <code>true</code> allows the method to run * faster by materializing the offset of each element in the list * which speeds up the comparison. However, this requires an * additional 16 bytes per element. So, for each 1M elements, the * method will require an additional 16 M bytes (approximately). */ public static void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree, DataOutput dataOut, final Shape stockObject, final boolean fast_sort) { try { int elementCount = 0; // Count number of elements in the given text int i_start = offset; final Text line = new Text(); while (i_start < offset + len) { int i_end = skipToEOL(element_bytes, i_start); // Extract the line without end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); elementCount++; i_start = i_end; } LOG.info("Bulk loading an RTree with " + elementCount + " elements"); // It turns out the findBestDegree returns the best degree when the whole // tree is loaded to memory when processed. However, as current algorithms // process the tree while it's on disk, a higher degree should be selected // such that a node fits one file block (assumed to be 4K). //final int degree = findBestDegree(bytesAvailable, elementCount); int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int nonLeafNodeCount = nodeCount - leafNodeCount; // Keep track of the offset of each element in the text final int[] offsets = new int[elementCount]; final double[] xs = fast_sort ? new double[elementCount] : null; final double[] ys = fast_sort ? new double[elementCount] : null; i_start = offset; line.clear(); for (int i = 0; i < elementCount; i++) { offsets[i] = i_start; int i_end = skipToEOL(element_bytes, i_start); if (xs != null) { // Extract the line with end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); // Sample center of the shape xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; } i_start = i_end; } /**A struct to store information about a split*/ class SplitStruct extends Rectangle { /**Start and end index for this split*/ int index1, index2; /**Direction of this split*/ byte direction; /**Index of first element on disk*/ int offsetOfFirstElement; static final byte DIRECTION_X = 0; static final byte DIRECTION_Y = 1; SplitStruct(int index1, int index2, byte direction) { this.index1 = index1; this.index2 = index2; this.direction = direction; } @Override public void write(DataOutput out) throws IOException { out.writeInt(offsetOfFirstElement); super.write(out); } void partition(Queue<SplitStruct> toBePartitioned) { IndexedSortable sortableX; IndexedSortable sortableY; if (fast_sort) { // Use materialized xs[] and ys[] to do the comparisons sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (xs[i] < xs[j]) return -1; if (xs[i] > xs[j]) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { if (ys[i] < ys[j]) return -1; if (ys[i] > ys[j]) return 1; return 0; } }; } else { // No materialized xs and ys. Always deserialize objects to compare sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; if (xi < xj) return -1; if (xi > xj) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; } @Override public int compare(int i, int j) { int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; if (yi < yj) return -1; if (yi > yj) return 1; return 0; } }; } final IndexedSorter sorter = new QuickSort(); final IndexedSortable[] sortables = new IndexedSortable[2]; sortables[SplitStruct.DIRECTION_X] = sortableX; sortables[SplitStruct.DIRECTION_Y] = sortableY; sorter.sort(sortables[direction], index1, index2); // Partition into maxEntries partitions (equally) and // create a SplitStruct for each partition int i1 = index1; for (int iSplit = 0; iSplit < degree; iSplit++) { int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree; SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction)); toBePartitioned.add(newSplit); i1 = i2; } } } // All nodes stored in level-order traversal Vector<SplitStruct> nodes = new Vector<SplitStruct>(); final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>(); toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X)); while (!toBePartitioned.isEmpty()) { SplitStruct split = toBePartitioned.poll(); if (nodes.size() < nonLeafNodeCount) { // This is a non-leaf split.partition(toBePartitioned); } nodes.add(split); } if (nodes.size() != nodeCount) { throw new RuntimeException( "Expected node count: " + nodeCount + ". Real node count: " + nodes.size()); } // Now we have our data sorted in the required order. Start building // the tree. // Store the offset of each leaf node in the tree FSDataOutputStream fakeOut = null; try { fakeOut = new FSDataOutputStream(new java.io.OutputStream() { // Null output stream @Override public void write(int b) throws IOException { // Do nothing } @Override public void write(byte[] b, int off, int len) throws IOException { // Do nothing } @Override public void write(byte[] b) throws IOException { // Do nothing } }, null, TreeHeaderSize + nodes.size() * NodeSize); for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) { nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos(); if (i != nodes.elementAt(i_leaf).index1) throw new RuntimeException(); double x1, y1, x2, y2; // Initialize MBR to first object int eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); Rectangle mbr = stockObject.getMBR(); x1 = mbr.x1; y1 = mbr.y1; x2 = mbr.x2; y2 = mbr.y2; i++; while (i < nodes.elementAt(i_leaf).index2) { eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); mbr = stockObject.getMBR(); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i++; } nodes.elementAt(i_leaf).set(x1, y1, x2, y2); } } finally { if (fakeOut != null) fakeOut.close(); } // Calculate MBR and offsetOfFirstElement for non-leaves for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) { int i_first_child = i_node * degree + 1; nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement; int i_child = 0; Rectangle mbr; mbr = nodes.elementAt(i_first_child + i_child); double x1 = mbr.x1; double y1 = mbr.y1; double x2 = mbr.x2; double y2 = mbr.y2; i_child++; while (i_child < degree) { mbr = nodes.elementAt(i_first_child + i_child); if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i_child++; } nodes.elementAt(i_node).set(x1, y1, x2, y2); } // Start writing the tree // write tree header (including size) // Total tree size. (== Total bytes written - 8 bytes for the size itself) dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len); // Tree height dataOut.writeInt(height); // Degree dataOut.writeInt(degree); dataOut.writeInt(elementCount); // write nodes for (SplitStruct node : nodes) { node.write(dataOut); } // write elements for (int element_i = 0; element_i < elementCount; element_i++) { int eol = skipToEOL(element_bytes, offsets[element_i]); dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); } } catch (IOException e) { e.printStackTrace(); } }
From source file:gov.nih.nci.evs.browser.utils.SourceTreeUtils.java
public void getRootConceptsBySource(String outputfile) { PrintWriter pw = openPrintWriter(outputfile); if (pw == null) return;/* ww w .j a v a 2 s. c o m*/ long ms = System.currentTimeMillis(); _logger.debug(outputfile + " opened."); _logger.debug("Writing root concepts by source - please wait."); Vector src_with_roots = new Vector(); Vector src_without_roots = new Vector(); String scheme = "NCI Metathesaurus"; String version = null; CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag(); String relation = null; // String association = "CHD"; boolean searchInactive = true; //Vector sources = getSupportedSources(scheme, version); Vector sources = getSupportedSources(scheme, csvt); if (sources != null) { pw.println("Number of NCIm Supported Sources: " + sources.size()); } else { pw.println("getSupportedSources returns null??? "); return; } sources = SortUtils.quickSort(sources); System.out.println("sources.size(): " + sources.size()); int max = sources.size(); int lcv = 0; for (int k = 0; k < max; k++) { lcv++; String source = (String) sources.elementAt(k); System.out.println("(" + lcv + ") " + source); int k1 = k + 1; pw.println("\n(" + k1 + ")" + source); _logger.debug("(" + k1 + ")" + source); TreeItem ti = getSourceTree(source); if (ti != null && ti._expandable) { printTree(pw, ti, 0); String json = getSourceRootsJSON(ti._code, source); pw.println(json); src_with_roots.add(source); } else { src_without_roots.add(source); } } pw.println("Sources with roots: "); StringBuffer buf = new StringBuffer(); buf.append("|"); for (int i = 0; i < src_with_roots.size(); i++) { String src = (String) src_with_roots.elementAt(i); int j = i + 1; pw.println("(" + j + "): " + src); //t = t + src + "|"; buf.append(src + "|"); } String t = buf.toString(); pw.println(t); pw.println("\nSources without roots: "); //t = "|"; buf = new StringBuffer(); for (int i = 0; i < src_without_roots.size(); i++) { String src = (String) src_without_roots.elementAt(i); int j = i + 1; pw.println("(" + j + "): " + src); //t = t + src + "|"; buf.append(src + "|"); } t = buf.toString(); pw.println(t); closeWriter(pw); _logger.debug("Output file " + outputfile + " generated."); _logger.debug("Run time (ms): " + (System.currentTimeMillis() - ms)); }
From source file:org.ecoinformatics.seek.datasource.eml.eml2.Eml200Parser.java
private TextDelimitedDataFormat handleComplexDelimitedDataFormatNode(Node node) throws Exception { TextDelimitedDataFormat format = null; if (node == null) { return format; }/*from w ww . j av a 2 s .c o m*/ NodeList children = node.getChildNodes(); int length = children.getLength(); Vector quoteList = new Vector(); for (int i = 0; i < length; i++) { Node kid = children.item(i); String elementName = kid.getNodeName(); if (elementName != null && elementName.equals("fieldDelimiter")) { String fieldDelimiter = kid.getFirstChild().getNodeValue(); if (isDebugging) { log.debug("The filed delimiter for complex format in eml is " + fieldDelimiter); } format = new TextDelimitedDataFormat(fieldDelimiter); } else if (elementName != null && elementName.equals("lineNumber") && format != null) { String lineNumberStr = kid.getFirstChild().getNodeValue(); int lineNumber = (new Integer(lineNumberStr)).intValue(); if (isDebugging) { log.debug("The line number is " + lineNumber); } format.setLineNumber(lineNumber); } else if (elementName != null && elementName.equals("collapseDelimiter") && format != null) { String collapse = kid.getFirstChild().getNodeValue(); if (isDebugging) { log.debug("The collapse delimiter " + collapse); } format.setCollapseDelimiter(collapse); } else if (elementName != null && elementName.equals("quoteCharacter") && format != null) { String quote = kid.getFirstChild().getNodeValue(); quoteList.add(quote); } } // set up quoteList if (format != null) { int size = quoteList.size(); String[] quoteArray = new String[size]; for (int i = 0; i < size; i++) { quoteArray[i] = (String) quoteList.elementAt(i); } format.setQuoteCharater(quoteArray); } return format; }
From source file:org.apache.jasper.compiler.JspUtil.java
/** * Checks if all mandatory attributes are present and if all attributes * present have valid names. Checks attributes specified as XML-style * attributes as well as attributes specified using the jsp:attribute * standard action. //from www . ja v a2 s . c o m */ public static void checkAttributes(String typeOfTag, Node n, ValidAttribute[] validAttributes, ErrorDispatcher err) throws JasperException { Attributes attrs = n.getAttributes(); Mark start = n.getStart(); boolean valid = true; // AttributesImpl.removeAttribute is broken, so we do this... int tempLength = (attrs == null) ? 0 : attrs.getLength(); Vector temp = new Vector(tempLength, 1); for (int i = 0; i < tempLength; i++) { String qName = attrs.getQName(i); if ((!qName.equals("xmlns")) && (!qName.startsWith("xmlns:"))) temp.addElement(qName); } // Add names of attributes specified using jsp:attribute Node.Nodes tagBody = n.getBody(); if (tagBody != null) { int numSubElements = tagBody.size(); for (int i = 0; i < numSubElements; i++) { Node node = tagBody.getNode(i); if (node instanceof Node.NamedAttribute) { String attrName = node.getAttributeValue("name"); temp.addElement(attrName); // Check if this value appear in the attribute of the node if (n.getAttributeValue(attrName) != null) { err.jspError(n, "jsp.error.duplicate.name.jspattribute", attrName); } } else { // Nothing can come before jsp:attribute, and only // jsp:body can come after it. break; } } } /* * First check to see if all the mandatory attributes are present. * If so only then proceed to see if the other attributes are valid * for the particular tag. */ String missingAttribute = null; for (int i = 0; i < validAttributes.length; i++) { int attrPos; if (validAttributes[i].mandatory) { attrPos = temp.indexOf(validAttributes[i].name); if (attrPos != -1) { temp.remove(attrPos); valid = true; } else { valid = false; missingAttribute = validAttributes[i].name; break; } } } // If mandatory attribute is missing then the exception is thrown if (!valid) err.jspError(start, "jsp.error.mandatory.attribute", typeOfTag, missingAttribute); // Check to see if there are any more attributes for the specified tag. int attrLeftLength = temp.size(); if (attrLeftLength == 0) return; // Now check to see if the rest of the attributes are valid too. String attribute = null; for (int j = 0; j < attrLeftLength; j++) { valid = false; attribute = (String) temp.elementAt(j); for (int i = 0; i < validAttributes.length; i++) { if (attribute.equals(validAttributes[i].name)) { valid = true; break; } } if (!valid) err.jspError(start, "jsp.error.invalid.attribute", typeOfTag, attribute); } // XXX *could* move EL-syntax validation here... (sb) }