List of usage examples for java.util LinkedList removeFirst
public E removeFirst()
From source file:gate.creole.tokeniser.SimpleTokeniser.java
/** * Converts the finite state machine to a deterministic one. * * @param s/* w w w. j av a2 s . co m*/ */ private AbstractSet<FSMState> lambdaClosure(Set<FSMState> s) { //the stack/queue used by the algorithm LinkedList<FSMState> list = new LinkedList<FSMState>(s); //the set to be returned AbstractSet<FSMState> lambdaClosure = new HashSet<FSMState>(s); FSMState top; FSMState currentState; Set<FSMState> nextStates; Iterator<FSMState> statesIter; while (!list.isEmpty()) { top = list.removeFirst(); nextStates = top.nextSet(null); if (null != nextStates) { statesIter = nextStates.iterator(); while (statesIter.hasNext()) { currentState = statesIter.next(); if (!lambdaClosure.contains(currentState)) { lambdaClosure.add(currentState); list.addFirst(currentState); } //if(!lambdaClosure.contains(currentState)) } //while(statesIter.hasNext()) } //if(null != nextStates) } return lambdaClosure; }
From source file:ORG.oclc.os.SRW.SRWDatabase.java
public static SRWDatabase getDB(String dbname, Properties properties, String servletContext, HttpServletRequest request) {/* w w w .j a v a2s. com*/ log.debug("enter SRWDatabase.getDB"); if (badDbs.get(dbname) != null) // we've seen this one before return null; LinkedList<SRWDatabase> queue = dbs.get(dbname); SRWDatabase db = null; try { if (queue == null) log.info("No databases created yet for database " + dbname); else { log.debug("about to synchronize #1 on queue"); synchronized (queue) { if (queue.isEmpty()) log.info("No databases available for database " + dbname); else { db = queue.removeFirst(); if (db == null) log.debug("popped a null database off the queue for database " + dbname); } } log.debug("done synchronize #1 on queue"); } if (db == null) { log.info("creating a database for " + dbname); try { while (db == null) { createDB(dbname, properties, servletContext, request); queue = dbs.get(dbname); log.debug("about to synchronize #2 on queue"); synchronized (queue) { if (!queue.isEmpty()) // crap, someone got to it before us db = queue.removeFirst(); } } log.debug("done synchronize #2 on queue"); } catch (Exception e) { // database not available badDbs.put(dbname, dbname); log.error(e, e); return null; } } } catch (Exception e) { log.error(e, e); log.error("shoot!"); } if (log.isDebugEnabled()) log.debug("getDB: db=" + db); log.debug("exit SRWDatabase.getDB"); return db; }
From source file:appeng.items.tools.powered.ToolColorApplicator.java
private ItemStack findNextColor(final ItemStack is, final ItemStack anchor, final int scrollOffset) { ItemStack newColor = null;//from w w w .j ava2 s . c om final IMEInventory<IAEItemStack> inv = AEApi.instance().registries().cell().getCellInventory(is, null, StorageChannel.ITEMS); if (inv != null) { final IItemList<IAEItemStack> itemList = inv .getAvailableItems(AEApi.instance().storage().createItemList()); if (anchor == null) { final IAEItemStack firstItem = itemList.getFirstItem(); if (firstItem != null) { newColor = firstItem.getItemStack(); } } else { final LinkedList<IAEItemStack> list = new LinkedList<IAEItemStack>(); for (final IAEItemStack i : itemList) { list.add(i); } Collections.sort(list, new Comparator<IAEItemStack>() { @Override public int compare(final IAEItemStack a, final IAEItemStack b) { return ItemSorters.compareInt(a.getItemDamage(), b.getItemDamage()); } }); if (list.size() <= 0) { return null; } IAEItemStack where = list.getFirst(); int cycles = 1 + list.size(); while (cycles > 0 && !where.equals(anchor)) { list.addLast(list.removeFirst()); cycles--; where = list.getFirst(); } if (scrollOffset > 0) { list.addLast(list.removeFirst()); } if (scrollOffset < 0) { list.addFirst(list.removeLast()); } return list.get(0).getItemStack(); } } if (newColor != null) { this.setColor(is, newColor); } return newColor; }
From source file:org.vaadin.addons.sitekit.module.content.ContentModule.java
@Override public void injectDynamicContent(final SiteDescriptor dynamicSiteDescriptor) { final Company company = Site.getCurrent().getSiteContext().getObject(Company.class); final EntityManager entityManager = Site.getCurrent().getSiteContext().getObject(EntityManager.class); final User user = ((SecurityProviderSessionImpl) Site.getCurrent().getSecurityProvider()) .getUserFromSession();/* ww w . ja v a 2 s. c o m*/ final List<Group> groups; if (user == null) { groups = new ArrayList<Group>(); groups.add(UserDao.getGroup(entityManager, company, "anonymous")); } else { groups = UserDao.getUserGroups(entityManager, company, user); } final List<Content> contents = ContentDao.getContens(entityManager, company); final LinkedList<Content> queue = new LinkedList<Content>(); final Map<String, List<Content>> dependencies = new HashMap<String, List<Content>>(); for (final Content content : contents) { final String dependency; if (!StringUtils.isEmpty(content.getAfterPage())) { dependency = content.getAfterPage(); } else if (!StringUtils.isEmpty(content.getParentPage())) { dependency = content.getParentPage(); } else { dependency = null; } if (dependency != null) { if (!dependencies.containsKey(dependency)) { dependencies.put(dependency, new ArrayList<Content>()); } dependencies.get(dependency).add(content); } else { queue.add(content); } } final List<Content> ordered = new ArrayList<Content>(); while (queue.size() > 0) { final Content content = queue.removeFirst(); ordered.add(content); if (dependencies.containsKey(content.getPage())) { queue.addAll(dependencies.get(content.getPage())); } } final NavigationVersion navigationVersion = dynamicSiteDescriptor.getNavigation().getProductionVersion(); for (final Content content : ordered) { boolean viewPrivilege = PrivilegeCache.hasPrivilege(company, user, "view", content.getContentId()); if (!viewPrivilege) { for (final Group group : groups) { if (PrivilegeCache.hasPrivilege(company, group, "view", content.getContentId())) { viewPrivilege = true; break; } } } if (!viewPrivilege) { continue; } boolean editPrivilege = UserDao.hasUserPrivilege(entityManager, user, "edit", content.getContentId()); if (!editPrivilege) { for (final Group group : groups) { if (UserDao.hasGroupPrivilege(entityManager, group, "edit", content.getContentId())) { editPrivilege = true; break; } } } final String page = content.getPage(); if (page == null) { continue; } final String parentPage = content.getParentPage(); final String afterPage = content.getAfterPage(); final String title = content.getTitle(); final MarkupType markupType = content.getMarkupType(); final String markup = content.getMarkup(); if (StringUtils.isEmpty(parentPage)) { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addRootPage(0, page); navigationVersion.setDefaultPageName(page); } else { navigationVersion.addRootPage(afterPage, page); } } else { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addChildPage(parentPage, page); } else { navigationVersion.addChildPage(parentPage, afterPage, page); } } // Describe content view. final ViewDescriptor viewDescriptor = new ViewDescriptor(page, title, DefaultView.class); viewDescriptor.getProductionVersion().setDynamic(true); if (editPrivilege) { viewDescriptor.setViewletClass("content", RenderFlow.class, content); } else { viewDescriptor.setViewletClass("content", RenderViewlet.class, markup); } dynamicSiteDescriptor.getViewDescriptors().add(viewDescriptor); } }
From source file:org.bubblecloud.ilves.module.content.ContentModule.java
@Override public void injectDynamicContent(final SiteDescriptor dynamicSiteDescriptor) { final Company company = Site.getCurrent().getSiteContext().getObject(Company.class); final EntityManager entityManager = Site.getCurrent().getSiteContext().getObject(EntityManager.class); final User user = ((SecurityProviderSessionImpl) Site.getCurrent().getSecurityProvider()) .getUserFromSession();//from w w w . j av a 2 s. c o m final List<Group> groups; if (user == null) { groups = new ArrayList<Group>(); groups.add(UserDao.getGroup(entityManager, company, "anonymous")); } else { groups = UserDao.getUserGroups(entityManager, company, user); } final List<Content> contents = ContentDao.getContens(entityManager, company); final LinkedList<Content> queue = new LinkedList<Content>(); final Map<String, List<Content>> dependencies = new HashMap<String, List<Content>>(); for (final Content content : contents) { final String dependency; if (!StringUtils.isEmpty(content.getAfterPage())) { dependency = content.getAfterPage(); } else if (!StringUtils.isEmpty(content.getParentPage())) { dependency = content.getParentPage(); } else { dependency = null; } if (dependency != null) { if (!dependencies.containsKey(dependency)) { dependencies.put(dependency, new ArrayList<Content>()); } dependencies.get(dependency).add(content); } else { queue.add(content); } } final List<Content> ordered = new ArrayList<Content>(); while (queue.size() > 0) { final Content content = queue.removeFirst(); ordered.add(content); if (dependencies.containsKey(content.getPage())) { queue.addAll(dependencies.get(content.getPage())); } } final NavigationVersion navigationVersion = dynamicSiteDescriptor.getNavigation().getProductionVersion(); for (final Content content : ordered) { boolean viewPrivilege = PrivilegeCache.hasPrivilege(entityManager, company, user, "view", content.getContentId()); if (!viewPrivilege) { for (final Group group : groups) { if (PrivilegeCache.hasPrivilege(entityManager, company, group, "view", content.getContentId())) { viewPrivilege = true; break; } } } if (!viewPrivilege) { continue; } boolean editPrivilege = UserDao.hasUserPrivilege(entityManager, user, "edit", content.getContentId()); if (!editPrivilege) { for (final Group group : groups) { if (UserDao.hasGroupPrivilege(entityManager, group, "edit", content.getContentId())) { editPrivilege = true; break; } } } final String page = content.getPage(); if (page == null) { continue; } final String parentPage = content.getParentPage(); final String afterPage = content.getAfterPage(); final String title = content.getTitle(); final MarkupType markupType = content.getMarkupType(); final String markup = content.getMarkup(); if (StringUtils.isEmpty(parentPage)) { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addRootPage(0, page); navigationVersion.setDefaultPageName(page); } else { navigationVersion.addRootPage(afterPage, page); } } else { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addChildPage(parentPage, page); } else { navigationVersion.addChildPage(parentPage, afterPage, page); } } // Describe content view. final ViewDescriptor viewDescriptor = new ViewDescriptor(page, title, DefaultValoView.class); viewDescriptor.getProductionVersion().setDynamic(true); if (editPrivilege) { viewDescriptor.setViewletClass("content", RenderFlow.class, content); } else { viewDescriptor.setViewletClass("content", RenderViewlet.class, markup); } dynamicSiteDescriptor.getViewDescriptors().add(viewDescriptor); } }
From source file:org.deeplearning4j.nn.conf.ComputationGraphConfiguration.java
/** * Add preprocessors automatically, given the specified types of inputs for the network. Inputs are specified using the * {@link InputType} class, in the same order in which the inputs were defined in the original configuration.<br> * For example, in a network with two inputs: a convolutional input (28x28x1 images) and feed forward inputs, use * {@code .addPreProcessors(InputType.convolutional(1,28,28),InputType.feedForward())}.<br> * For the CNN->Dense and CNN->RNN transitions, the nIns on the Dense/RNN layers will also be added automatically. * <b>NOTE</b>: This method will be called automatically when using the * {@link org.deeplearning4j.nn.conf.ComputationGraphConfiguration.GraphBuilder#setInputTypes(InputType...)} functionality. * See that method for details.// w ww. j av a 2 s . co m */ public void addPreProcessors(InputType... inputTypes) { if (inputTypes == null || inputTypes.length != networkInputs.size()) { throw new IllegalArgumentException( "Invalid number of InputTypes: cannot add preprocessors if number of InputType " + "objects differs from number of network inputs"); } //Now: need to do essentially a forward pass through the network, to work out what type of preprocessors to add //To do this: need to know what the output types are for each GraphVertex. //First step: build network in reverse order (i.e., define map of a -> list(b) instead of list(a) -> b) Map<String, List<String>> verticesOutputTo = new HashMap<>(); //Key: vertex. Values: vertices that this node is an input for for (Map.Entry<String, GraphVertex> entry : vertices.entrySet()) { String vertexName = entry.getKey(); List<String> vertexInputNames; vertexInputNames = vertexInputs.get(vertexName); if (vertexInputNames == null) continue; //Build reverse network structure: for (String s : vertexInputNames) { List<String> list = verticesOutputTo.get(s); if (list == null) { list = new ArrayList<>(); verticesOutputTo.put(s, list); } list.add(vertexName); //Edge: s -> vertexName } } //Now: do topological sort LinkedList<String> noIncomingEdges = new LinkedList<>(networkInputs); //Set of all nodes with no incoming edges List<String> topologicalOrdering = new ArrayList<>(); Map<String, Set<String>> inputEdges = new HashMap<>(); for (Map.Entry<String, List<String>> entry : vertexInputs.entrySet()) { inputEdges.put(entry.getKey(), new HashSet<>(entry.getValue())); } while (noIncomingEdges.size() > 0) { String next = noIncomingEdges.removeFirst(); topologicalOrdering.add(next); //Remove edges next -> vertexOuputsTo[...] from graph; List<String> nextEdges = verticesOutputTo.get(next); if (nextEdges != null && nextEdges.size() > 0) { for (String s : nextEdges) { Set<String> set = inputEdges.get(s); set.remove(next); if (set.size() == 0) { noIncomingEdges.add(s); //No remaining edges for vertex i -> add to list for processing } } } } //If any edges remain in the graph: graph has cycles: for (Map.Entry<String, Set<String>> entry : inputEdges.entrySet()) { Set<String> set = entry.getValue(); if (set == null) continue; if (set.size() > 0) throw new IllegalStateException( "Invalid configuration: cycle detected in graph. Cannot calculate topological ordering with graph cycle (" + "cycle includes vertex \"" + entry.getKey() + "\")"); } //Now, given the topological sort: do equivalent of forward pass Map<String, InputType> vertexOutputs = new HashMap<>(); for (String s : topologicalOrdering) { int inputIdx = networkInputs.indexOf(s); if (inputIdx != -1) { vertexOutputs.put(s, inputTypes[inputIdx]); continue; } GraphVertex gv = vertices.get(s); List<InputType> inputTypeList = new ArrayList<>(); if (gv instanceof LayerVertex) { //Add preprocessor, if necessary: String in = vertexInputs.get(s).get(0); InputType layerInput = vertexOutputs.get(in); LayerVertex lv = (LayerVertex) gv; if (lv.getPreProcessor() != null) continue; //skip: preprocessor is manually defined Layer l = lv.getLayerConf().getLayer(); if (l instanceof ConvolutionLayer || l instanceof SubsamplingLayer) { //To add x-to-CNN preprocessor: need to know image depth/width/height //But this can't be inferred from the FF/RNN activations directly (could be anything) switch (layerInput.getType()) { case FF: //FF -> CNN log.warn("Automatic addition of FF -> CNN preprocessors: not yet implemented (layer: " + s + ")"); break; case RNN: //RNN -> CNN log.warn("Automatic addition of RNN -> CNN preprocessors: not yet implemented (layer: " + s + ")"); break; case CNN: //CNN -> CNN: no preprocessor required //UNLESS: network input -> CNN layer. Input is in 2d format, not 4d format... if (networkInputs.contains(vertexInputs.get(s).get(0))) { InputType.InputTypeConvolutional conv = (InputType.InputTypeConvolutional) layerInput; lv.setPreProcessor(new FeedForwardToCnnPreProcessor(conv.getHeight(), conv.getWidth(), conv.getDepth())); } break; } } else if (l instanceof BaseRecurrentLayer || l instanceof RnnOutputLayer) { switch (layerInput.getType()) { case FF: //FF -> RNN lv.setPreProcessor(new FeedForwardToRnnPreProcessor()); //Also set nIn if possible: setNInIfNecessary(lv, layerInput); break; case RNN: //RNN -> RNN: no preprocessor required. But set nIn if possible/required setNInIfNecessary(lv, layerInput); break; case CNN: //CNN -> RNN InputType.InputTypeConvolutional conv = (InputType.InputTypeConvolutional) layerInput; lv.setPreProcessor( new CnnToRnnPreProcessor(conv.getHeight(), conv.getWidth(), conv.getDepth())); int nIn = conv.getHeight() * conv.getWidth() * conv.getDepth(); ((FeedForwardLayer) lv.getLayerConf().getLayer()).setNIn(nIn); break; } } else { //Feed forward layer switch (layerInput.getType()) { case FF: //FF -> FF: no preprocessor required. But set nIn if possible/required setNInIfNecessary(lv, layerInput); break; case RNN: //RNN -> FF lv.setPreProcessor(new RnnToFeedForwardPreProcessor()); //Set nIn if possible/required setNInIfNecessary(lv, layerInput); break; case CNN: //CNN -> FF InputType.InputTypeConvolutional conv = (InputType.InputTypeConvolutional) layerInput; lv.setPreProcessor(new CnnToFeedForwardPreProcessor(conv.getHeight(), conv.getWidth(), conv.getDepth())); int nIn = conv.getHeight() * conv.getWidth() * conv.getDepth(); ((FeedForwardLayer) lv.getLayerConf().getLayer()).setNIn(nIn); break; } } inputTypeList.add(layerInput); } else { List<String> inputs = vertexInputs.get(s); if (inputs != null) { for (String inputVertexName : inputs) { inputTypeList.add(vertexOutputs.get(inputVertexName)); } } } InputType outputFromVertex = gv .getOutputType(inputTypeList.toArray(new InputType[inputTypeList.size()])); vertexOutputs.put(s, outputFromVertex); } }
From source file:com.streamsets.pipeline.lib.jdbc.JdbcMultiRowRecordWriter.java
@SuppressWarnings("unchecked") private void processPartition(Connection connection, Multimap<Long, Record> partitions, Long partitionKey, List<OnRecordErrorException> errorRecords) throws SQLException, OnRecordErrorException { Collection<Record> partition = partitions.get(partitionKey); // Fetch the base insert query for this partition. SortedMap<String, String> columnsToParameters = getFilteredColumnsToParameters(getColumnsToParameters(), partition.iterator().next()); // put all the records in a queue for consumption LinkedList<Record> queue = new LinkedList<>(partition); // compute number of rows per batch if (columnsToParameters.isEmpty()) { throw new OnRecordErrorException(Errors.JDBCDEST_22); }// w ww. j ava 2s. com int maxRowsPerBatch = maxPrepStmtParameters / columnsToParameters.size(); PreparedStatement statement = null; // parameters are indexed starting with 1 int paramIdx = 1; int rowCount = 0; while (!queue.isEmpty()) { // we're at the start of a batch. if (statement == null) { // instantiate the new statement statement = generatePreparedStatement(columnsToParameters, // the next batch will have either the max number of records, or however many are left. Math.min(maxRowsPerBatch, queue.size()), getTableName(), connection); } // process the next record into the current statement Record record = queue.removeFirst(); for (String column : columnsToParameters.keySet()) { Field field = record.get(getColumnsToFields().get(column)); Field.Type fieldType = field.getType(); Object value = field.getValue(); try { switch (fieldType) { case LIST: List<Object> unpackedList = unpackList((List<Field>) value); Array array = connection.createArrayOf(getSQLTypeName(fieldType), unpackedList.toArray()); statement.setArray(paramIdx, array); break; case DATE: case DATETIME: // Java Date types are not accepted by JDBC drivers, so we need to convert to java.sql.Date java.util.Date date = field.getValueAsDatetime(); statement.setObject(paramIdx, new java.sql.Date(date.getTime())); break; default: statement.setObject(paramIdx, value, getColumnType(column)); break; } } catch (SQLException e) { LOG.error(Errors.JDBCDEST_23.getMessage(), column, fieldType.toString(), e); throw new OnRecordErrorException(record, Errors.JDBCDEST_23, column, fieldType.toString()); } ++paramIdx; } rowCount++; // check if we've filled up the current batch if (rowCount == maxRowsPerBatch) { // time to execute the current batch statement.addBatch(); statement.executeBatch(); statement.close(); statement = null; // reset our counters rowCount = 0; paramIdx = 1; } } // check if there are any records left. this should occur whenever there isn't *exactly* maxRowsPerBatch records in // this partition. if (statement != null) { statement.addBatch(); statement.executeBatch(); statement.close(); } }
From source file:Graph.java
/** * Perform a breadth first search of this graph, starting at v. The vist may * be cut short if visitor throws an exception during a vist callback. * /*from w w w. j a v a 2 s . c o m*/ * @param <E> * * @param v - * the search starting point * @param visitor - * the vistor whose vist method is called prior to visting a vertex. * @throws E * if vistor.visit throws an exception */ public <E extends Exception> void breadthFirstSearch(Vertex<T> v, VisitorEX<T, E> visitor) throws E { LinkedList<Vertex<T>> q = new LinkedList<Vertex<T>>(); q.add(v); if (visitor != null) visitor.visit(this, v); v.visit(); while (q.isEmpty() == false) { v = q.removeFirst(); for (int i = 0; i < v.getOutgoingEdgeCount(); i++) { Edge<T> e = v.getOutgoingEdge(i); Vertex<T> to = e.getTo(); if (!to.visited()) { q.add(to); if (visitor != null) visitor.visit(this, to); to.visit(); } } } }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeSameAsGroups() throws IOException { //load all entities and assign an id to them //dbpedia entites are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); if ((s.startsWith("http://www.dbpedia.org/resource") || s.startsWith("http://dbpedia.org/resource")) && !entityIds.containsKey(s) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(s, entityIds.size() + 1); }//from ww w . jav a2 s .co m String v = m.group(7); if ((v.startsWith("http://www.dbpedia.org/resource") || v.startsWith("http://dbpedia.org/resource")) && !entityIds.containsKey(v) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(v, entityIds.size() + 1); } } l = in.readLine(); } } } //now non-dpedia entities are loaded: http://www4.wiwiss.fu-berlin.de, http://data.linkedct.org, http://purl.org, http://bio2rdf.org, http://www.ncbi.nlm.nih.gov for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l; while ((l = in.readLine()) != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); if (s.startsWith("http://www4.wiwiss.fu-berlin.de") && !entityIds.containsKey(s) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(s, entityIds.size() + 1); } String v = m.group(7); if ((v.startsWith("http://www4.wiwiss.fu-berlin.de") //|| v.startsWith("http://data.linkedct.org") || v.startsWith("http://129.128.185.122")) && !entityIds.containsKey(v) && !classIds.containsKey(s) && !propertyIds.containsKey(s)) { entityIds.put(v, entityIds.size() + 1); } } if (entityIds.size() == 12413) { System.out.println(); } } } } //create the sameAsEdges sets sameAsEdges = new HashSet[entityIds.size() + 1]; entityById = new String[entityIds.size() + 1]; for (Map.Entry<String, Integer> e : entityIds.entrySet()) { entityById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l; while ((l = in.readLine()) != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#sameAs")) { String s = m.group(2); int idS = entityIds.get(s); String v = m.group(7); Integer idV = entityIds.get(v); if (idV == null) { continue; } if (sameAsEdges[idS] == null) { sameAsEdges[idS] = new HashSet<>(); } sameAsEdges[idS].add(idV); if (sameAsEdges[idV] == null) { sameAsEdges[idV] = new HashSet<>(); } sameAsEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } sameAs = new int[entityIds.size() + 1]; int i = 1; while (i < sameAs.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (sameAs[j] != 0) { if (sameAs[j] != i) { System.out.println("Error"); System.exit(0); } } else { sameAs[j] = i; if (sameAsEdges[j] != null) { for (int k : sameAsEdges[j]) { q.addLast(k); } } } } i++; while (i < sameAs.length && sameAs[i] != 0) { i++; } } }
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentPropertyGroups() throws IOException { //load all classes and assign an id to them //dbpedia properties are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); }/*from w w w .ja v a 2 s . c o m*/ if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia properties are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property") && !(s.equals("http://www.w3.org/2000/01/rdf-schema#label") || s.equals("http://www.w3.org/2002/07/owl#sameAs") || s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(s)) { propertyIds.put(s, propertyIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !propertyIds.containsKey(v)) { propertyIds.put(v, propertyIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentPropertyEdges sets equivalentPropertyEdges = new HashSet[propertyIds.size() + 1]; propertyById = new String[propertyIds.size() + 1]; for (Map.Entry<String, Integer> e : propertyIds.entrySet()) { propertyById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentProperty")) { String s = m.group(2); int idS = propertyIds.get(s); String v = m.group(7); int idV = propertyIds.get(v); if (equivalentPropertyEdges[idS] == null) { equivalentPropertyEdges[idS] = new HashSet<>(); } equivalentPropertyEdges[idS].add(idV); if (equivalentPropertyEdges[idV] == null) { equivalentPropertyEdges[idV] = new HashSet<>(); } equivalentPropertyEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } equivalentProperty = new int[propertyIds.size() + 1]; int i = 1; while (i < equivalentProperty.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentProperty[j] != 0) { if (equivalentProperty[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentProperty[j] = i; if (equivalentPropertyEdges[j] != null) { for (int k : equivalentPropertyEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentProperty.length && equivalentProperty[i] != 0) { i++; } } }