List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.VClassDaoJena.java
private List<VClass> getVClassesForProperty(String vclassURI, String propertyURI, boolean domainSide) { List<VClass> vClasses = new ArrayList<VClass>(); getOntModel().enterCriticalSection(Lock.READ); try {//from w w w. j a v a 2s .c o m ObjectProperty op = getOntModel().getObjectProperty(propertyURI); if (op != null) { OntResource superclass = null; if (vclassURI != null) { // TODO need a getAllSuperPropertyURIs method in ObjectPropertyDao List<String> superproperties = getWebappDaoFactory().getObjectPropertyDao() .getSuperPropertyURIs(propertyURI, false); superproperties.add(propertyURI); HashSet<String> subjSuperclasses = new HashSet<String>(getAllSuperClassURIs(vclassURI)); subjSuperclasses.add(vclassURI); for (String objectPropertyURI : superproperties) { for (Iterator restStmtIt = getOntModel().listStatements(null, OWL.onProperty, getOntModel().getProperty(objectPropertyURI)); restStmtIt.hasNext();) { Statement restStmt = (Statement) restStmtIt.next(); Resource restRes = restStmt.getSubject(); for (Iterator axStmtIt = getOntModel().listStatements(null, null, restRes); axStmtIt .hasNext();) { Statement axStmt = (Statement) axStmtIt.next(); OntResource subjOntRes = null; if (axStmt.getSubject().canAs(OntResource.class)) { subjOntRes = axStmt.getSubject().as(OntResource.class); } if ((subjOntRes != null) && (subjSuperclasses.contains(getClassURIStr(subjOntRes))) && (axStmt.getPredicate().equals(RDFS.subClassOf) || (axStmt.getPredicate().equals(OWL.equivalentClass)))) { if (restRes.canAs(AllValuesFromRestriction.class)) { AllValuesFromRestriction avfRest = restRes .as(AllValuesFromRestriction.class); Resource avf = avfRest.getAllValuesFrom(); if (avf.canAs(OntClass.class)) { superclass = avfRest.getAllValuesFrom().as(OntClass.class); } } } } } } } if (superclass == null) { superclass = (domainSide) ? op.getRange() : op.getDomain(); if (superclass == null) { //this section to prevent all subclasses of owl:Thing //returned if range is owl:Thing, refer to NIHVIVO-3357 NIHVIVO-3814 //This is unfortunate case of warping the model for the ease of the display. return Collections.emptyList(); } } if (superclass != null) { VClass superVclass; if (superclass.isAnon()) { superVclass = getVClassByURI(getClassURIStr(superclass)); } else { superVclass = getVClassByURI(superclass.getURI()); } if (OWL.Thing.equals(superclass)) { //this section to prevent all subclasses of owl:Thing //returned if range is owl:Thing, refer to NIHVIVO-3357 NIHVIVO-3814 //This is unfortunate case of warping the model for the ease of the display. return Collections.emptyList(); } if (superVclass != null) { vClasses.add(superVclass); // if this model infers types based on the taxonomy, adding the subclasses will only // waste time for no benefit if (!isUnderlyingStoreReasoned()) { Iterator classURIs = getAllSubClassURIs(getClassURIStr(superclass)).iterator(); while (classURIs.hasNext()) { String classURI = (String) classURIs.next(); VClass vClass = getVClassByURI(classURI); if (vClass != null) vClasses.add(vClass); } } } } } } finally { getOntModel().leaveCriticalSection(); } return vClasses; }
From source file:edu.ucla.cs.scai.canali.core.index.utils.DBpediaOntology201510Utils.java
public void createClassLabelsFile(HashSet<String> classes) throws Exception { System.out.println("Saving class labels"); try (PrintWriter outC = new PrintWriter(new FileOutputStream(destinationPath + "class_labels", false), true)) {/*from w w w . ja va 2s . co m*/ ExtendedIterator<OntClass> ontClasses = dbpedia.listClasses(); while (ontClasses.hasNext()) { OntClass clazz = ontClasses.next(); if (classes.contains(clazz.getURI())) { outC.println(clazz.getURI() + "\t" + URLDecoder.decode(StringEscapeUtils.unescapeJava(clazz.getLabel("en")), "UTF-8")); } } //now process Yago classes try (BufferedReader in = new BufferedReader( new FileReader(downloadedFilesPath + "yago_type_links.nt"))) { String l = in.readLine(); String regex = "(\\s*)<(.*)> <http://www.w3.org/2002/07/owl#equivalentClass> <(.*)>"; Pattern p = Pattern.compile(regex); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String aUri = m.group(2); if (classes.contains(aUri)) { String[] s = aUri.split("\\/"); String label = s[s.length - 1]; outC.println(aUri + "\t" + URLDecoder.decode(StringEscapeUtils.unescapeJava(label), "UTF-8")); } } l = in.readLine(); } in.close(); } } }
From source file:edu.ku.brc.specify.dbsupport.cleanuptools.MultipleRecordComparer.java
/** * // www. jav a2 s. co m */ public boolean loadData() { boolean isVerbose = true; columns.clear(); int numSQLCols = 0; for (DisplayColInfo dci : displayCols) { if (dci.isIncludeSQLOnly()) { columns.add(dci.getFi()); numSQLCols++; } } for (DBFieldInfo fi : tblInfo.getFields()) { if (containsFieldInfo(fi)) { columns.insertElementAt(fi, numSQLCols); } else if (fi.isHidden()) { hiddenCols.add(fi); } else { columns.add(fi); } } HashSet<Integer> inclColsIndexSet = new HashSet<Integer>(); int index = 0; for (DBFieldInfo fi : columns) { if (containsFieldInfo(fi)) { inclColsIndexSet.add(index); } index++; } //---------------------------------------------- // Build SELECT //---------------------------------------------- StringBuilder cols = new StringBuilder(); for (DBFieldInfo fi : columns) { if (cols.length() > 0) cols.append(","); cols.append(fi.getColumn()); } // Load Data DBTableInfo ti = parentTI != null ? parentTI : tblInfo; final String sql = String.format("SELECT %s, %s FROM %s WHERE %s in %s", cols.toString(), tblInfo.getIdColumnName(), tblInfo.getName(), ti.getIdColumnName(), fii.getInClause(true)); if (isVerbose) System.out.println("Data ------------\n" + sql); dataItems = BasicSQLUtils.query(sql); if (dataItems.size() > 0) { //---------------------------------------------- // First check to see which columns have data //---------------------------------------------- for (Object[] row : dataItems) { if (colHasData == null) { colHasData = new boolean[row.length]; // all columns (including id) for (int i = 0; i < row.length; i++) colHasData[i] = false; } for (int i = 0; i < row.length; i++) { if (inclColsIndexSet.contains(i)) { colHasData[i] = true; } else if (row[i] != null) { if (row[i] instanceof String) { colHasData[i] = StringUtils.isNotEmpty((String) row[i]); } else { colHasData[i] = true; } } if (isVerbose) System.out.print(row[i] + ", "); } if (isVerbose) System.out.println(); } if (isVerbose) System.out.println("------------" + sql); //for (int j=0;j<hasData.length;j++) System.out.print(j+" "+hasData[j]+", "); //System.out.println(); // Now check to see if the value in the each column are the same. colIsSame = new boolean[colHasData.length]; // Don't check last column for (int i = 0; i < colHasData.length - 1; i++) { if (colHasData[i]) { colIsSame[i] = false; if (!inclColsIndexSet.contains(i)) { colIsSame[i] = true; Object value = null; for (Object[] row : dataItems) { Object otherVal = row[i]; if (value == null) { if (otherVal != null) { value = otherVal; } } else if (value != null) { if (!value.equals(otherVal)) { colIsSame[i] = false; break; } } } } else { if (isVerbose) System.out.println("Skipping " + i); } } else { colIsSame[i] = true; } } if (displayCols.size() > 0) { int inx = 0; for (DBFieldInfo fldCol : columns) { if (containsFieldInfo(fldCol)) { colToIndexMap.put(fldCol, inx); } inx++; } indexForTitle = colToIndexMap.size() > 0 ? colToIndexMap.values().iterator().next() : -1; // for singles only } // for (Integer index : colToIndexMap.values()) // { // colIsSame[index] = false; // } //System.out.println(String.format("Cols %d hd: %d", columns.size(), hasData.length)); Vector<DBFieldInfo> oldColumns = new Vector<DBFieldInfo>(columns); // does not include ID column columns.clear(); // Add 'Is Included to data model if (isParent) { DBFieldInfo fldInfo = new DBFieldInfo(tblInfo, "FALSE", "MergedInto", "boolean", 1, true, true, false, false, false, null); fldInfo.setTitle(getResourceString("CLNUP_MERGE_INTO")); columns.add(fldInfo); fldInfo = new DBFieldInfo(tblInfo, "FALSE", "MergedFrom", "boolean", 1, true, true, false, false, false, null); fldInfo.setTitle(getResourceString("CLNUP_MERGE_FROM")); columns.add(fldInfo); if (indexForTitle > -1) indexForTitle += 2; } else { DBFieldInfo isInclFld = new DBFieldInfo(tblInfo, "FALSE", "IsIncluded", "boolean", 1, true, true, false, false, false, null); isInclFld.setTitle(getResourceString("CLNUP_MERGE_ISINCL")); columns.add(isInclFld); if (indexForTitle > -1) indexForTitle++; } if (isVerbose) { for (int j = 0; j < colIsSame.length; j++) System.out.print(String.format("%3d", j)); System.out.println(); for (int j = 0; j < colHasData.length; j++) System.out.print(String.format(" %s", colHasData[j] ? "Y" : "N")); System.out.println(" (Has Data)"); for (int j = 0; j < colIsSame.length; j++) System.out.print(String.format(" %s", colIsSame[j] ? "Y" : "N")); System.out.println(" (Is Same)"); } numColsWithData = 0; for (int i = 0; i < colHasData.length - 1; i++) { if (isVerbose) System.out.println(i + " -> " + (colHasData[i] && !colIsSame[i]) + " Has: " + colHasData[i] + " !SM: " + !colIsSame[i] + " " + oldColumns.get(i).getTitle()); if (colHasData[i] && !colIsSame[i]) { numColsWithData++; columns.add(oldColumns.get(i)); if (isVerbose) System.out.println(i + " Added: " + oldColumns.get(i).getTitle()); } } hasColmnsOfDataThatsDiff = numColsWithData > 0; if (hasColmnsOfDataThatsDiff) { numColsWithData += 2; // For IsIncluded and IdColumn if (isParent) numColsWithData++; Vector<Object[]> oldDataItems = new Vector<Object[]>(dataItems); dataItems.clear(); for (Object[] row : oldDataItems) { int inx = 0; Object[] newRow = new Object[numColsWithData]; newRow[inx++] = false; // isIncluded or Merged Into if (isParent) newRow[inx++] = false; // Merged From for (int i = 0; i < row.length; i++) { //if (isVerbose) System.out.println(i+" -> "+(colHasData[i] && !colIsSame[i])+" "+colHasData[i]+" "+!colIsSame[i]); if (colHasData[i] && !colIsSame[i]) { newRow[inx++] = row[i]; } } dataItems.add(newRow); } if (isVerbose) { for (int j = 0; j < columns.size(); j++) System.out.print(j + " " + columns.get(j).getTitle() + ", "); System.out.println(); System.out.println(String.format("Cols %d hd: %d", columns.size(), colHasData.length)); } } } hasKidsDataThatsDiff = false; for (MultipleRecordComparer mrc : kids) { if (mrc.loadData()) { hasKidsDataThatsDiff = true; } } return hasColmnsOfDataThatsDiff; }
From source file:com.android.contacts.list.ContactEntryListAdapter.java
/** * Updates partitions according to the directory meta-data contained in the supplied * cursor./* ww w . j a v a 2 s . co m*/ */ public void changeDirectories(Cursor cursor) { if (cursor.getCount() == 0) { // Directory table must have at least local directory, without which this adapter will // enter very weird state. Log.e(TAG, "Directory search loader returned an empty cursor, which implies we have " + "no directory entries.", new RuntimeException()); return; } HashSet<Long> directoryIds = new HashSet<Long>(); int idColumnIndex = cursor.getColumnIndex(Directory._ID); int directoryTypeColumnIndex = cursor.getColumnIndex(DirectoryListLoader.DIRECTORY_TYPE); int displayNameColumnIndex = cursor.getColumnIndex(Directory.DISPLAY_NAME); int photoSupportColumnIndex = cursor.getColumnIndex(Directory.PHOTO_SUPPORT); // TODO preserve the order of partition to match those of the cursor // Phase I: add new directories cursor.moveToPosition(-1); while (cursor.moveToNext()) { long id = cursor.getLong(idColumnIndex); directoryIds.add(id); if (getPartitionByDirectoryId(id) == -1) { DirectoryPartition partition = new DirectoryPartition(false, true); partition.setDirectoryId(id); partition.setDirectoryType(cursor.getString(directoryTypeColumnIndex)); partition.setDisplayName(cursor.getString(displayNameColumnIndex)); int photoSupport = cursor.getInt(photoSupportColumnIndex); partition.setPhotoSupported(photoSupport == Directory.PHOTO_SUPPORT_THUMBNAIL_ONLY || photoSupport == Directory.PHOTO_SUPPORT_FULL); addPartition(partition); } } // Phase II: remove deleted directories int count = getPartitionCount(); for (int i = count; --i >= 0;) { Partition partition = getPartition(i); if (partition instanceof DirectoryPartition) { long id = ((DirectoryPartition) partition).getDirectoryId(); if (!directoryIds.contains(id)) { removePartition(i); } } } invalidate(); notifyDataSetChanged(); }
From source file:com.projity.pm.task.Task.java
/** * This rather complex function determines whether one task depends on another. Because of the rules for parent tasks, * the algorithm is rather complicated. Basically: * - A (parent) task depends on its childrens predecessors, as these are potentially equivalent * - A task depends on its parents predecessors - in the case of a link to its parent task, the links applies to this task too * - A task depends on its parent, of course * - A task depends on its predecessors (obviously) * - A task depends on its predecessors children * * @param other Task to compare to//from ww w.ja v a 2 s . co m * @param set - A set used to prevent treating same task twice * @return true if linking to other would cause a circular link */ private boolean dependsOn(Task other, Task me, HashSet set, String taskNames) { // To avoid infinite loops which can occur under certain circumstances, use a set to prevent looking up twice if (set.contains(this)) return false; set.add(this); // Here is the primary exit point. We have arrived back at the other node, so it is circular if (this == other) { if (taskNames != null) System.out.println("Circular: \n" + taskNames); return true; } if (taskNames != null) taskNames += (getId() + ": " + getName() + '\n'); Task predecessor; Dependency dep; Collection children; Iterator i; i = getPredecessorList().iterator(); while (i.hasNext()) { dep = (Dependency) i.next(); if (dep.isDisabled()) continue; predecessor = (Task) dep.getPredecessor(); // I depend on my predecessors if (predecessor.dependsOn(other, me, set, taskNames == null ? null : taskNames + "Pred-")) return true; } //parent Task parent = getWbsParentTask(); if (other.getWbsParentTask() != parent) { // only do parents if they are different // if (!this.isAncestorOrDescendent(other)) if (parent != null) { // if ( !other.wbsDescendentOf(parent)) if (parent.dependsOn(other, me, set, taskNames == null ? null : taskNames + "Parent- ")) return true; } } children = getWbsChildrenNodes(); Task child; Object current; Iterator j; // I depend on my children's preds if (children != null) { i = children.iterator(); while (i.hasNext()) { current = ((Node) i.next()).getImpl(); if (!(current instanceof Task)) continue; child = (Task) current; j = child.getPredecessorList().iterator(); while (j.hasNext()) { dep = (Dependency) j.next(); if (dep.isDisabled()) continue; predecessor = (Task) dep.getPredecessor(); // I depend on my predecessors if (predecessor.wbsDescendentOf(this)) {// skip if already belongs to parent thru an ancestry relation continue; } if (predecessor.dependsOn(other, me, set, taskNames == null ? null : taskNames + "pred-child " + child.getId())) return true; } } } return false; }
From source file:edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasoner.java
protected void setMostSpecificTypes(Resource individual, HashSet<String> typeURIs, Model inferenceModel) { Model retractions = ModelFactory.createDefaultModel(); inferenceModel.enterCriticalSection(Lock.READ); try {// w ww .java2s . co m // remove obsolete mostSpecificType assertions StmtIterator iter = inferenceModel.listStatements(individual, mostSpecificType, (RDFNode) null); while (iter.hasNext()) { Statement stmt = iter.next(); if (!stmt.getObject().isResource()) { log.warn("The object of this assertion is expected to be a resource: " + stmtString(stmt)); continue; } if (!typeURIs.contains(stmt.getObject().asResource().getURI())) { retractions.add(stmt); } } } finally { inferenceModel.leaveCriticalSection(); } Iterator<Statement> rIter = retractions.listStatements(); while (rIter.hasNext()) { removeInference(rIter.next(), inferenceModel, true, false); } Iterator<String> typeIter = typeURIs.iterator(); while (typeIter.hasNext()) { String typeURI = typeIter.next(); Statement mstStmt = ResourceFactory.createStatement(individual, mostSpecificType, ResourceFactory.createResource(typeURI)); addInference(mstStmt, inferenceModel, true); } return; }
From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.ProgramConverter.java
/** * // www . j a va 2s . c o m * @param pbs * @param cand * @throws DMLRuntimeException */ public static void rFindSerializationCandidates(ArrayList<ProgramBlock> pbs, HashSet<String> cand) throws DMLRuntimeException { for (ProgramBlock pb : pbs) { if (pb instanceof WhileProgramBlock) { WhileProgramBlock wpb = (WhileProgramBlock) pb; rFindSerializationCandidates(wpb.getChildBlocks(), cand); } else if (pb instanceof ForProgramBlock || pb instanceof ParForProgramBlock) { ForProgramBlock fpb = (ForProgramBlock) pb; rFindSerializationCandidates(fpb.getChildBlocks(), cand); } else if (pb instanceof IfProgramBlock) { IfProgramBlock ipb = (IfProgramBlock) pb; rFindSerializationCandidates(ipb.getChildBlocksIfBody(), cand); if (ipb.getChildBlocksElseBody() != null) rFindSerializationCandidates(ipb.getChildBlocksElseBody(), cand); } else //all generic program blocks { for (Instruction inst : pb.getInstructions()) if (inst instanceof FunctionCallCPInstruction) { FunctionCallCPInstruction fci = (FunctionCallCPInstruction) inst; String fkey = DMLProgram.constructFunctionKey(fci.getNamespace(), fci.getFunctionName()); if (!cand.contains(fkey)) //memoization for multiple calls, recursion { cand.add(fkey); //add to candidates //investigate chains of function calls FunctionProgramBlock fpb = pb.getProgram().getFunctionProgramBlock(fci.getNamespace(), fci.getFunctionName()); rFindSerializationCandidates(fpb.getChildBlocks(), cand); } } } } }
From source file:edu.ku.brc.specify.tasks.InteractionsTask.java
/** * @param dataObj/*from w w w. java 2 s .co m*/ */ private void returnLoan(final RecordSetIFace dataObj) { RecordSetIFace recordSet = null; if (dataObj instanceof RecordSetIFace) { recordSet = dataObj; } else if (dataObj == null) { RecordSetTask rsTask = (RecordSetTask) ContextMgr.getTaskByClass(RecordSetTask.class); Vector<RecordSetIFace> loanRSList = new Vector<RecordSetIFace>( rsTask.getRecordSets(Loan.getClassTableId())); recordSet = getRecordSetOfDataObjs(null, Loan.class, "loanNumber", loanRSList.size()); } if (recordSet == null) { return; } List<LoanReturnInfo> lriList = new ArrayList<LoanReturnInfo>(); if (recordSet.getDbTableId() == Loan.getClassTableId()) { DataProviderSessionIFace session = null; try { HashSet<Integer> loanHashMap = new HashSet<Integer>(); session = DataProviderFactory.getInstance().createSession(); for (RecordSetItemIFace rsi : recordSet.getItems()) { if (!loanHashMap.contains(rsi.getRecordId())) { Loan loan = session.get(Loan.class, rsi.getRecordId()); if (loan != null) { loanHashMap.add(rsi.getRecordId()); if (!loan.getIsClosed()) { for (LoanPreparation lp : loan.getLoanPreparations()) { if (!lp.getIsResolved()) { // Returned items are always resolved. // but resolved items are not always returned. int qty = lp.getQuantity(); int qtyResolved = lp.getQuantityResolved(); int qtyReturned = lp.getQuantityReturned(); int qtyToBeReturned = qty - qtyResolved; qtyResolved += qtyToBeReturned; qtyReturned += qtyToBeReturned; lriList.add( new LoanReturnInfo(lp, null, qtyToBeReturned, qtyResolved, true)); } } } } } } } catch (Exception ex) { ex.printStackTrace(); edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(InteractionsTask.class, ex); } finally { if (session != null) { session.close(); } } } if (lriList.size() > 0) { Agent currAgent = AppContextMgr.getInstance().getClassObject(Agent.class); doReturnLoans(null, currAgent, Calendar.getInstance(), lriList, false); } }
From source file:com.mindcognition.mindraider.tools.Checker.java
public static void checkAndFixRepository() { logger.debug("Repository integrity check..."); // {{debug}} // fields/*from w w w. jav a 2s . co m*/ HashSet<ResourceDescriptor> allOutlines = new HashSet<ResourceDescriptor>(); OutlineCustodian outlineCustodian = MindRaider.outlineCustodian; int fixes = 0; int totalConcepts = 0; // labels (folders) RDF Model labelsRdfModel = MindRaider.labelCustodian.getRdfModel(); // folders.rdf.xml ResourceDescriptor[] labelDescriptors = MindRaider.labelCustodian.getLabelDescriptors(); if (!ArrayUtils.isEmpty(labelDescriptors)) { for (ResourceDescriptor labelDescriptor : labelDescriptors) { String labelUri = labelDescriptor.getUri(); // check whether [label]/folder.xml exists (eventually re-create it) StatusBar.show("Checking label XML resource: " + labelUri); Resource labelXmlResource = MindRaider.labelCustodian.get(labelUri); if (labelXmlResource == null) { try { StatusBar.show("Fixing label XML resource: " + labelUri); // create directory String labelDirectory = MindRaider.labelCustodian.createLabelDirectory(labelUri); // resource Resource resource = MindRaider.labelCustodian .createXmlResourceForLabel(labelDescriptor.getLabel(), labelUri); MindRaider.labelCustodian.addOutlinesGroupToLabelXmlResource(resource); resource.toXmlFile(MindRaider.labelCustodian.getLabelXmlResourceFileName(labelDirectory)); // label resource doesn't exist => must be re-created from RDF ResourceDescriptor[] outlineDescriptors = MindRaider.labelCustodian .getOutlineDescriptors(labelUri); if (outlineDescriptors != null && outlineDescriptors.length > 0) { for (int i = 0; i < outlineDescriptors.length; i++) { MindRaider.labelCustodian.addOutlineToLabelXmlResourceAndSave(labelUri, outlineDescriptors[i].getUri()); System.out.println("Fixing label XML resource: " + labelUri + " -> " + outlineDescriptors[i].getUri()); ++fixes; } } } catch (Exception ee) { logger.debug("Unable to fix label: " + labelDescriptor.getUri(), ee); // {{debug}} } } // folder.rdf.xml ResourceDescriptor[] outlineDescriptors = MindRaider.labelCustodian.getOutlineDescriptors(labelUri); if (outlineDescriptors != null) { for (ResourceDescriptor outlineDescriptor : outlineDescriptors) { if (!allOutlines.contains(outlineDescriptor)) { allOutlines.add(outlineDescriptor); StatusBar.show("Checking outline: " + outlineDescriptor.getLabel() + " (" + outlineDescriptor.getUri() + ")"); logger.debug(" Outline: '" + outlineDescriptor.getLabel() + "', " + outlineDescriptor.getUri()); // {{debug}} Model outlineRdfModel; OutlineResource outlineResource; Resource outlineXmlResource; String outlineModelFilename; String outlineResourceFilename; try { // outline's RDF (notebook.rdf.xml) outlineModelFilename = outlineCustodian.getModelFilenameByDirectory( outlineCustodian.getOutlineDirectory(outlineDescriptor.getUri())); logger.debug(" RDF: " + outlineModelFilename); outlineRdfModel = RdfModel.loadModel(outlineModelFilename, false); // detect whether it is active outline if (MindRaider.outlineCustodian.getActiveOutlineResource() != null && MindRaider.outlineCustodian.getActiveOutlineResource().getUri() .equals(outlineDescriptor.getUri())) { //JOptionPane.showConfirmDialog(MindRaider.mainJFrame, "Fixing active outline: "+outlineDescriptor.getUri()); outlineRdfModel = MindRaider.spidersGraph.getRdfModel().getModel(); } if (outlineRdfModel == null) { // RDF model doesn't exist - such outline can not be restored, just delete it final String fixMessage = "Fix: removing broken outline '" + outlineDescriptor.getLabel() + "'"; StatusBar.show(fixMessage); System.out.println(fixMessage); // check that outline is NOT BROKEN - otherwise standard functions will not // be able to discard and delete it com.hp.hpl.jena.rdf.model.Resource orphan = labelsRdfModel .getResource(outlineDescriptor.getUri()); if (orphan != null) { logger.debug(" Orphan outline found: " + outlineDescriptor.getUri()); // {{debug}} if (RdfModel.getLabel(labelsRdfModel, orphan) == null) { logger.debug(" ... having no label"); // {{debug}} if (RdfModel.getHref(labelsRdfModel, orphan) == null) { logger.debug(" ... having no href"); // {{debug}} // if it has no HREF, then fix it -> standard functions will delete that String relativePath = MindRaider.profile .getRelativePath(outlineCustodian .getOutlineDirectory(outlineDescriptor.getUri())); RdfModel.setHref(orphan, relativePath + OutlineCustodian.FILENAME_XML_RESOURCE); } } } MindRaider.labelCustodian.discardOutline(outlineDescriptor.getUri()); MindRaider.labelCustodian.deleteOutline(outlineDescriptor.getUri()); ++fixes; continue; } // outline's XML (notebook.xml) outlineResourceFilename = outlineCustodian.getResourceFilenameByDirectory( outlineCustodian.getOutlineDirectory(outlineDescriptor.getUri())); logger.debug(" XML: " + outlineResourceFilename); outlineXmlResource = new Resource(outlineResourceFilename); outlineResource = new OutlineResource(outlineXmlResource); } catch (Exception e) { logger.debug("Unable to load outline" + outlineDescriptor.getUri(), e); // TODO fix it continue; } //logger.debug(" Loaded: "+outlineRdfModel+" / "+outlineXmlResource); // {{debug}} // FIX outline label: on rename changed only in folder's RDF, not xml name (and notebook's XML) // FIX rename: notebook name is changed on rename ONLY in the labels (folders) RDF model, // in here it is propagated to notebook's XML and (notebook.xml) and RDF (notebook.rdf.xml) String outlineLabel = MindRaider.labelCustodian .getOutlineDescriptor(outlineDescriptor.getUri()).getLabel(); String outlineComment = "'" + outlineLabel + "' outline."; if (outlineLabel != null && outlineLabel.length() > 0) { if (!outlineLabel.equals(outlineResource.getLabel())) { fixes++; StatusBar.show("Fixing title and description: " + outlineDescriptor.getUri()); System.out.println(" Fix: inconsistent outline's title & description (" + outlineDescriptor.getUri() + ")"); // {{debug}} logger.debug(" Label's RDF : " + outlineLabel); // {{debug}} logger.debug(" Outline's XML: " + outlineResource.getLabel()); // {{debug}} if (outlineResource.getLabelProperty() != null) { outlineResource.getLabelProperty().setLabelContent(outlineLabel); } if (outlineResource.getAnnotationProperty() != null) { outlineResource.getAnnotationProperty().setAnnotation(outlineComment); } try { outlineResource.save(); } catch (Exception e) { logger.debug("Unable to save outline XML resource", e); // {{debug}} } } } com.hp.hpl.jena.rdf.model.Resource rdfResource = outlineRdfModel .getResource(outlineDescriptor.getUri()); if (rdfResource != null) { rdfResource.removeAll(RDFS.label); rdfResource.addProperty(RDFS.label, outlineLabel); rdfResource.removeAll(RDFS.comment); rdfResource.addProperty(RDFS.comment, outlineComment); RdfModel.saveModel(outlineRdfModel, outlineModelFilename); } // iterate outline's concepts final SimpleSelector simpleSelector = new SimpleSelector(null, RDF.type, outlineRdfModel.createResource(MindRaiderConstants.MR_OWL_CLASS_CONCEPT)); StmtIterator conceptsIterator = outlineRdfModel.listStatements(simpleSelector); while (conceptsIterator.hasNext()) { ++totalConcepts; Statement statement = (Statement) conceptsIterator.next(); final com.hp.hpl.jena.rdf.model.Resource conceptRdfResource = statement .getSubject(); //logger.debug(" Concept: " +totalConcepts+" "+conceptRdfResource.getURI()); // TODO check whether the concept is in notebook.xml // load note resource [concept name].xml try { ConceptResource noteResource = MindRaider.noteCustodian .get(outlineDescriptor.getUri(), conceptRdfResource.getURI()); // TODO check and fix note's attachments: if attachment is in the resource and not in RDF, add it to RDF logger.debug("Attachments:"); AttachmentProperty[] attachments = noteResource.getAttachments(); if (attachments != null && attachments.length > 0) { for (AttachmentProperty attachmentProperty : attachments) { logger.debug(" " + attachmentProperty.getUrl()); StmtIterator listStatements = outlineRdfModel .listStatements(conceptRdfResource, outlineRdfModel.getProperty( MindRaiderConstants.MR_RDF_NS, "attachment"), attachmentProperty.getUrl()); if (!listStatements.hasNext()) { //JOptionPane.showConfirmDialog(MindRaider.mainJFrame, "Missing attach in RDF: "+attachmentProperty.getUrl()); conceptRdfResource.addProperty( outlineRdfModel.getProperty( MindRaiderConstants.MR_RDF_NS + "attachment"), attachmentProperty.getUrl()); RdfModel.saveModel(outlineRdfModel, outlineModelFilename); ++fixes; } } } } catch (Exception e) { // there is a problem (file doesn't exist, it is empty file, ...) // fix: build *.xml resource from RDF and write it back // rdf contains: label/timestamp/comment/?attachments ignored for now String label = RdfModel.getLabel(outlineRdfModel, conceptRdfResource); String comment = RdfModel.getComment(outlineRdfModel, conceptRdfResource); long timestamp = RdfModel.getTimestamp(outlineRdfModel, conceptRdfResource); try { ConceptResource conceptResource = new ConceptResource( new Resource(MindRaider.profile.getProfileName(), timestamp, 1, System.currentTimeMillis(), conceptRdfResource.getURI())); conceptResource.resource.getMetadata() .setMindRaiderVersion(MindRaider.getVersion()); conceptResource.resource.getMetadata() .setType(MindRaiderConstants.MR_OWL_CLASS_CONCEPT); conceptResource.resource.getData().addProperty(new LabelProperty(label)); conceptResource.resource.getData() .addProperty(new AnnotationProperty(comment)); conceptResource.resource.getData() .addProperty(new AnnotationContentTypeProperty( MindRaiderConstants.MR_OWL_CONTENT_TYPE_PLAIN_TEXT)); conceptResource.resource.getData().addProperty( new NotebookProperty(new URI(outlineDescriptor.getUri()))); conceptResource.resource .toXmlFile(MindRaider.noteCustodian.getConceptResourceFilename( outlineDescriptor.getUri(), conceptRdfResource.getURI())); } catch (Exception exception) { logger.error("Unable to ressurect concept from RDF - deleting " + conceptRdfResource.getURI(), e); // TODO purge concept from the filesystem (a robust implementation that expects // that [concept].xml is not there/is locked // TODO do purge } } } // TODO FIX: remove concepts from notebook.xml.rdf, that do not exist in notebook.xml OR rather create notebook.xml from what's in RDF // TODO FIX: concepts in RDF vs. notebook.xml vs. on the filesystem // TODO run discarded :-) MindRaider.outlineCustodian.getDiscardedConceptDescriptors(outlineDescriptor.getUri()); } } } } } // TODO rebuild search index (after low level changes, FTS index must be updated) // clean memory Runtime.getRuntime().runFinalization(); Runtime.getRuntime().gc(); logger.debug("Total outlines: " + allOutlines.size()); // {{debug}} logger.debug("Fixed problems: " + fixes); // {{debug}} StatusBar.show("Check & fix results: total outlines " + allOutlines.size() + ", total concepts " + totalConcepts + ", fixed problems " + fixes); }
From source file:es.caib.seycon.ng.servei.XarxaServiceImpl.java
protected Collection<Maquina> handleFindMaquinaOfirmaticaUsuariByFiltre(String nom, String sistemaOperatiu, String adreca, String dhcp, String correu, String ofimatica, String alias, String mac, String descripcio, String xarxa, String codiUsuari, Boolean restringeixCerca, String servidorImpressores) throws Exception { int limitResults = Integer.parseInt(System.getProperty("soffid.ui.maxrows")); //$NON-NLS-1$ if (nom != null && (nom.trim().compareTo("") == 0 || nom.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ nom = null;/* w ww. j a va 2s . c o m*/ } if (sistemaOperatiu != null && (sistemaOperatiu.trim().compareTo("") == 0 || sistemaOperatiu.trim().compareTo( //$NON-NLS-1$ "%") == 0)) { //$NON-NLS-1$ sistemaOperatiu = null; } if (adreca != null && (adreca.trim().compareTo("") == 0 || adreca.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ adreca = null; } if (dhcp != null && (dhcp.trim().compareTo("") == 0 || dhcp.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ dhcp = null; } if (correu != null && (correu.trim().compareTo("") == 0 || correu.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ correu = null; } if (ofimatica != null && (ofimatica.trim().compareTo("") == 0 || ofimatica.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ ofimatica = null; } if (alias != null && (alias.trim().compareTo("") == 0 || alias.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ alias = null; } if (mac != null && (mac.trim().compareTo("") == 0 || mac.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ mac = null; } if (descripcio != null && (descripcio.trim().compareTo("") == 0 || descripcio.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ descripcio = null; } if (xarxa != null && (xarxa.trim().compareTo("") == 0 || xarxa.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ xarxa = null; } if (codiUsuari != null && (codiUsuari.trim().compareTo("") == 0 || codiUsuari.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ codiUsuari = null; } if (servidorImpressores != null && (servidorImpressores.trim().compareTo("") == 0 || servidorImpressores.trim() //$NON-NLS-1$ .compareTo("%") == 0)) { //$NON-NLS-1$ servidorImpressores = null; } Collection<MaquinaEntity> maquines = null; // Realizamos la siguiente consulta (sin tener cuenta el alias) String query = "select distinct maquina from " //$NON-NLS-1$ + " es.caib.seycon.ng.model.SessioEntity sessio " //$NON-NLS-1$ + " right outer join sessio.maquina as maquina " //$NON-NLS-1$ + " left outer join sessio.usuari as usuari" //$NON-NLS-1$ + " where " //$NON-NLS-1$ + "(:nom is null or maquina.nom like :nom) and (:sistemaOperatiu is null or " //$NON-NLS-1$ + "maquina.operatingSystem.name like :sistemaOperatiu) and (:adreca is null or " //$NON-NLS-1$ + "maquina.adreca like :adreca) and (:dhcp is null or " //$NON-NLS-1$ + "maquina.dhcp like :dhcp) and (:correu is null or " //$NON-NLS-1$ + "maquina.correu like :correu) and (:ofimatica is null or " //$NON-NLS-1$ + "maquina.ofimatica like :ofimatica) " //$NON-NLS-1$ + "and (:mac is null or maquina.mac like :mac) and " //$NON-NLS-1$ + "(:descripcio is null or maquina.descripcio like :descripcio) and " //$NON-NLS-1$ + "(:xarxa is null or maquina.xarxa.codi like :xarxa) and " //$NON-NLS-1$ + "(:codiUsuari is null or (usuari is not null and usuari.codi like :codiUsuari))" //$NON-NLS-1$ + "and (:servidorImpressores is null or maquina.servidorImpressores like :servidorImpressores) " //$NON-NLS-1$ + "order by maquina.nom "; //$NON-NLS-1$ Parameter[] params = new Parameter[] { new Parameter("nom", nom), //$NON-NLS-1$ new Parameter("sistemaOperatiu", sistemaOperatiu), new Parameter("adreca", adreca), //$NON-NLS-1$ //$NON-NLS-2$ new Parameter("dhcp", dhcp), new Parameter("correu", correu), //$NON-NLS-1$ //$NON-NLS-2$ new Parameter("ofimatica", ofimatica), new Parameter("mac", mac), //$NON-NLS-1$ //$NON-NLS-2$ new Parameter("descripcio", descripcio), new Parameter("xarxa", xarxa), //$NON-NLS-1$ //$NON-NLS-2$ new Parameter("codiUsuari", codiUsuari), //$NON-NLS-1$ new Parameter("servidorImpressores", servidorImpressores) }; //$NON-NLS-1$ maquines = getMaquinaEntityDao().query(query, params); // Filtramos por alias (si se ha especificado algn valor) if (alias != null) { Collection maquinesAlias = getAliasMaquinaEntityDao().findMaquinaByAlias(alias); HashSet h_maquinesAlias = new HashSet(maquinesAlias.size()); for (Iterator it = maquinesAlias.iterator(); it.hasNext();) { MaquinaEntity maqAlias = (MaquinaEntity) it.next(); h_maquinesAlias.add(maqAlias.getId()); } // Nos quedamos slo con las mquinas de la bsqueda que tengan el // alias indicado for (Iterator it = maquines.iterator(); it.hasNext();) { MaquinaEntity maq = (MaquinaEntity) it.next(); if (!h_maquinesAlias.contains(maq.getId())) it.remove(); // Lo eliminamos (no tiene el alias buscado) } } // Check results list lenght if (maquines.size() > limitResults) { return getMaquinaEntityDao().toMaquinaList(maquines).subList(0, limitResults); } return getMaquinaEntityDao().toMaquinaList(maquines); }