List of usage examples for java.util Set clear
void clear();
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopierTest.java
@Test public void cowReadDoneFromLocalIfFileExist() throws Exception { final Set<String> readLocal = newHashSet(); Directory baseDir = new CloseSafeDir() { @Override/* w w w . ja v a2s.c o m*/ public IndexInput openInput(String name, IOContext context) throws IOException { readLocal.add(name); return super.openInput(name, context); } }; IndexDefinition defn = new IndexDefinition(root, builder.getNodeState()); IndexCopier copier = new RAMIndexCopier(baseDir, sameThreadExecutor(), getWorkDir()); final Set<String> readRemotes = newHashSet(); Directory remote = new RAMDirectory() { @Override public IndexInput openInput(String name, IOContext context) throws IOException { readRemotes.add(name); return super.openInput(name, context); } }; byte[] t1 = writeFile(remote, "t1"); Directory local = copier.wrapForWrite(defn, remote, false); //Read should be served from remote readRemotes.clear(); readLocal.clear(); readAndAssert(local, "t1", t1); assertEquals(newHashSet("t1"), readRemotes); assertEquals(newHashSet(), readLocal); //Now pull in the file t1 via CopyOnRead in baseDir Directory localForRead = copier.wrapForRead("/foo", defn, remote); readAndAssert(localForRead, "t1", t1); //Read should be served from local readRemotes.clear(); readLocal.clear(); readAndAssert(local, "t1", t1); assertEquals(newHashSet(), readRemotes); assertEquals(newHashSet("t1"), readLocal); local.close(); }
From source file:gate.creole.tokeniser.SimpleTokeniser.java
/** Converts the FSM from a non-deterministic to a deterministic one by * eliminating all the unrestricted transitions. *//*from www.j a va 2 s . co m*/ void eliminateVoidTransitions() throws TokeniserException { //kalina:clear() faster than init() which is called with init() newStates.clear(); Set<Set<FSMState>> sdStates = new HashSet<Set<FSMState>>(); LinkedList<Set<FSMState>> unmarkedDStates = new LinkedList<Set<FSMState>>(); DFSMState dCurrentState = new DFSMState(this); Set<FSMState> sdCurrentState = new HashSet<FSMState>(); sdCurrentState.add(initialState); sdCurrentState = lambdaClosure(sdCurrentState); newStates.put(sdCurrentState, dCurrentState); sdStates.add(sdCurrentState); //find out if the new state is a final one Iterator<FSMState> innerStatesIter = sdCurrentState.iterator(); String rhs; FSMState currentInnerState; Set<String> rhsClashSet = new HashSet<String>(); boolean newRhs = false; while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); if (currentInnerState.isFinal()) { rhs = currentInnerState.getRhs(); rhsClashSet.add(rhs); dCurrentState.rhs = rhs; newRhs = true; } } if (rhsClashSet.size() > 1) { Err.println("Warning, rule clash: " + rhsClashSet + "\nSelected last definition: " + dCurrentState.rhs); } if (newRhs) dCurrentState.buildTokenDesc(); rhsClashSet.clear(); unmarkedDStates.addFirst(sdCurrentState); dInitialState = dCurrentState; Set<FSMState> nextSet; while (!unmarkedDStates.isEmpty()) { //Out.println("\n\n=====================" + unmarkedDStates.size()); sdCurrentState = unmarkedDStates.removeFirst(); for (int type = 0; type < maxTypeId; type++) { //Out.print(type); nextSet = new HashSet<FSMState>(); innerStatesIter = sdCurrentState.iterator(); while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); Set<FSMState> tempSet = currentInnerState.nextSet(type); if (null != tempSet) nextSet.addAll(tempSet); } //while(innerStatesIter.hasNext()) if (!nextSet.isEmpty()) { nextSet = lambdaClosure(nextSet); dCurrentState = newStates.get(nextSet); if (dCurrentState == null) { //we have a new DFSMState dCurrentState = new DFSMState(this); sdStates.add(nextSet); unmarkedDStates.add(nextSet); //check to see whether the new state is a final one innerStatesIter = nextSet.iterator(); newRhs = false; while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); if (currentInnerState.isFinal()) { rhs = currentInnerState.getRhs(); rhsClashSet.add(rhs); dCurrentState.rhs = rhs; newRhs = true; } } if (rhsClashSet.size() > 1) { Err.println("Warning, rule clash: " + rhsClashSet + "\nSelected last definition: " + dCurrentState.rhs); } if (newRhs) dCurrentState.buildTokenDesc(); rhsClashSet.clear(); newStates.put(nextSet, dCurrentState); } newStates.get(sdCurrentState).put(type, dCurrentState); } // if(!nextSet.isEmpty()) } // for(byte type = 0; type < 256; type++) } // while(!unmarkedDStates.isEmpty()) }
From source file:com.linkedin.databus.core.TestDbusEventBufferMult.java
@Test public void testEnableStreamFromLatest() throws DatabusException, IOException { CheckpointMult cp = null;/*from w w w . j a v a 2s . c o m*/ Collection<PhysicalPartitionKey> phyPartitions = new ArrayList<PhysicalPartitionKey>(); DbusEventBufferMult db = new DbusEventBufferMult(); DbusEventBufferMult.DbusEventBufferBatchReader dbr = db.new DbusEventBufferBatchReader(cp, phyPartitions, null); // Return true only once if streamFromLatest == true PhysicalPartitionKey pk = new PhysicalPartitionKey(); Set<PhysicalPartitionKey> sls = new HashSet<PhysicalPartitionKey>(); boolean streamFromLatestScn = true; // The set is initially empty - meaning none of the partitions have been served Assert.assertTrue(dbr.computeStreamFromLatestScnForPartition(pk, sls, streamFromLatestScn)); Assert.assertEquals(sls.size(), 1); Assert.assertFalse(dbr.computeStreamFromLatestScnForPartition(pk, sls, streamFromLatestScn)); sls.clear(); // Check null input Assert.assertFalse(dbr.computeStreamFromLatestScnForPartition(null, sls, streamFromLatestScn)); streamFromLatestScn = false; // The set is initially empty - meaning none of the partitions have been served Assert.assertFalse(dbr.computeStreamFromLatestScnForPartition(pk, sls, streamFromLatestScn)); Assert.assertEquals(sls.size(), 0); Assert.assertFalse(dbr.computeStreamFromLatestScnForPartition(pk, sls, streamFromLatestScn)); Assert.assertEquals(sls.size(), 0); // Check null input Assert.assertFalse(dbr.computeStreamFromLatestScnForPartition(null, sls, streamFromLatestScn)); return; }
From source file:net.sourceforge.sqlexplorer.service.SqlexplorerService.java
/** * /* w w w .j ava 2 s .c o m*/ * find all jar pathes by jar names. * * @param root * @param jarNames * @return if return an empty Set,indicate that it find failed. */ private Set<String> findAllJarPath(File root, List<String> jarNames) { Set<String> jarPathes = new HashSet<String>(); if (!root.exists() || jarNames == null || jarNames.isEmpty()) { return jarPathes; } boolean allIsOK = true; try { for (String jarName : jarNames) { List<File> jarFiles = FilesUtils.getJarFilesFromFolder(root, jarName); if (jarFiles.isEmpty()) { allIsOK = false; break; } for (File file : jarFiles) { jarPathes.add(file.getPath()); } } } catch (MalformedURLException e) { log.error(e); } if (!allIsOK) { jarPathes.clear(); } return jarPathes; }
From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java
private void getFilesInDirInProgress(PooledDbConnection conn, long dirRecordId) throws SQLException { PreparedStatement filesInDirStmt = conn.prepareStatement(GET_FILES_IN_DIR_IN_PROGRESS_STMT_NAME, getFilesInDirInProgressSql); filesInDirStmt.clearParameters();// ww w .j a v a2 s . c o m filesInDirStmt.setLong(1, dirRecordId); Set<String> paths = new HashSet<String>(); ResultSet rs = filesInDirStmt.executeQuery(); while (rs.next()) { String path = rs.getString(ManagedJobFilesTableColumn.SOURCE_PATH.toString()); paths.add(path); if (paths.size() >= HCPMoverProperties.PREPROCESS_FILES_BATCH_SIZE.getAsInt()) { insertIntoTempTable(conn, paths); paths.clear(); } } if (paths.size() > 0) { insertIntoTempTable(conn, paths); } rs.close(); }
From source file:net.sf.jabref.gui.entryeditor.EntryEditor.java
/** * NOTE: This method is only used for the source panel, not for the * other tabs. Look at EntryEditorTab for the setup of text components * in the other tabs.//from w w w . ja v a 2 s . c o m */ private void setupJTextComponent(JTextComponent textComponent) { // Set up key bindings and focus listener for the FieldEditor. InputMap inputMap = textComponent.getInputMap(JComponent.WHEN_FOCUSED); ActionMap actionMap = textComponent.getActionMap(); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_STORE_FIELD), "store"); actionMap.put("store", getStoreFieldAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_NEXT_PANEL), "right"); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_NEXT_PANEL_2), "right"); actionMap.put("right", getSwitchRightAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_PREVIOUS_PANEL), "left"); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_PREVIOUS_PANEL_2), "left"); actionMap.put("left", getSwitchLeftAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.HELP), "help"); actionMap.put("help", getHelpAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.SAVE_DATABASE), "save"); actionMap.put("save", getSaveDatabaseAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.NEXT_TAB), "nexttab"); actionMap.put("nexttab", frame.nextTab); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.PREVIOUS_TAB), "prevtab"); actionMap.put("prevtab", frame.prevTab); Set<AWTKeyStroke> keys = new HashSet<>( textComponent.getFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS)); keys.clear(); keys.add(AWTKeyStroke.getAWTKeyStroke("pressed TAB")); textComponent.setFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS, keys); keys = new HashSet<>(textComponent.getFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS)); keys.clear(); keys.add(KeyStroke.getKeyStroke("shift pressed TAB")); textComponent.setFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS, keys); textComponent.addFocusListener(new FieldListener()); }
From source file:org.alfresco.repo.node.getchildren.GetChildrenCannedQueryTest.java
public void testTypeFiltering() throws Exception { NodeRef parentNodeRef = getOrCreateParentTestFolder("GetChildrenCannedQueryTest-" + TEST_RUN_ID); // note: parent should contain test example(s) of each type Set<QName> childTypeQNames = new HashSet<QName>(3); Set<QName> antiChildTypeQNames = new HashSet<QName>(3); // note: subtype != supertype // folders//ww w . j ava2 s . c o m childTypeQNames.clear(); childTypeQNames.add(ContentModel.TYPE_FOLDER); antiChildTypeQNames.clear(); antiChildTypeQNames.add(TEST_FOLDER_SUBTYPE); filterByTypeAndCheck(parentNodeRef, childTypeQNames, antiChildTypeQNames); // files (content) childTypeQNames.clear(); childTypeQNames.add(ContentModel.TYPE_CONTENT); antiChildTypeQNames.clear(); antiChildTypeQNames.add(TEST_CONTENT_SUBTYPE); filterByTypeAndCheck(parentNodeRef, childTypeQNames, antiChildTypeQNames); // folders and files (base types) childTypeQNames.clear(); childTypeQNames.add(ContentModel.TYPE_CONTENT); childTypeQNames.add(ContentModel.TYPE_FOLDER); antiChildTypeQNames.clear(); antiChildTypeQNames.add(TEST_CONTENT_SUBTYPE); antiChildTypeQNames.add(TEST_FOLDER_SUBTYPE); filterByTypeAndCheck(parentNodeRef, childTypeQNames, antiChildTypeQNames); // folders and files (specific subtypes) childTypeQNames.clear(); childTypeQNames.add(TEST_CONTENT_SUBTYPE); childTypeQNames.add(TEST_FOLDER_SUBTYPE); antiChildTypeQNames.clear(); antiChildTypeQNames.add(ContentModel.TYPE_CONTENT); antiChildTypeQNames.add(ContentModel.TYPE_FOLDER); filterByTypeAndCheck(parentNodeRef, childTypeQNames, antiChildTypeQNames); // Specific super-type (that likely does not exist in DB, at least yet - see ACE-5114 - alternatively could create custom type to ensure this) // note: results should return 0 childTypeQNames.clear(); childTypeQNames.add(ContentModel.TYPE_LINK); PagingResults<NodeRef> results = list(parentNodeRef, -1, -1, 0, childTypeQNames, null, null); assertEquals(0, results.getPage().size()); childTypeQNames.clear(); childTypeQNames.add(ContentModel.TYPE_CMOBJECT); results = list(parentNodeRef, -1, -1, 0, childTypeQNames, null, null); assertEquals(0, results.getPage().size()); }
From source file:io.ecarf.core.cloud.task.processor.reason.phase0.DoReasonTask3.java
/** * // w ww .j a v a 2 s. c o m * @param term * @param select * @param schemaTriples * @param rows * @param table * @param writer * @return * @throws IOException */ private int inferAndSaveTriplesToFile(Term term, List<String> select, Set<Triple> schemaTriples, BigInteger rows, String table, PrintWriter writer) throws IOException { int inferredTriples = 0; int failedTriples = 0; // loop through the instance triples probably stored in a file and generate all the triples matching the schema triples set try (BufferedReader r = new BufferedReader(new FileReader(term.getFilename()), Constants.GZIP_BUF_SIZE)) { Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(r); // records will contain lots of duplicates Set<String> inferredAlready = new HashSet<String>(); try { for (CSVRecord record : records) { String values = ((select.size() == 1) ? record.get(0) : StringUtils.join(record.values(), ',')); if (!inferredAlready.contains(values)) { inferredAlready.add(values); NTriple instanceTriple = new NTriple(); if (select.size() == 1) { instanceTriple.set(select.get(0), record.get(0)); } else { instanceTriple.set(select, record.values()); } for (Triple schemaTriple : schemaTriples) { Rule rule = GenericRule.getRule(schemaTriple); Triple inferredTriple = rule.head(schemaTriple, instanceTriple); writer.println(inferredTriple.toCsv()); inferredTriples++; } // this is just to avoid any memory issues if (inferredAlready.size() > MAX_CACHE) { inferredAlready.clear(); log.info("Cleared cache of inferred terms"); } } } } catch (Exception e) { log.error("Failed to parse selected terms", e); failedTriples++; } } //inferredFiles.add(inferredTriplesFile); log.info("\nSelect Triples: " + rows + ", Inferred: " + inferredTriples + ", Triples for term: " + term + ", Failed Triples: " + failedTriples); return inferredTriples; }
From source file:gr.omadak.leviathan.asp.AspParser.java
private void produceCode(List ast, Writer writer, Set includes, String path) { VbsGenerator vbgenerator = null;// ww w .ja v a 2 s. co m JsGenerator jsgenerator = null; boolean isFirst = true; for (Iterator it = ast.iterator(); it.hasNext();) { Object[] nodes = (Object[]) it.next(); if (nodes.length == 3) { AST phpTree = (AST) nodes[2]; boolean isVbTree = ((Boolean) nodes[0]).booleanValue(); CodeGenerator generator; if (isVbTree) { if (vbgenerator == null) { vbgenerator = new VbsGenerator(); vbgenerator.setWriter(writer); } generator = vbgenerator; } else { if (jsgenerator == null) { jsgenerator = new JsGenerator(); } jsgenerator.setWriter(writer); generator = jsgenerator; } try { if (isFirst && !includes.isEmpty()) { printIncludes(generator, includes); includes.clear(); } isFirst = false; generator.generate(phpTree); } catch (ANTLRException an) { LOG.error("Failed to produce code from " + (isVbTree ? "vb" : "js") + " : " + path, an); try { generator.getBuffer().end(); writer.flush(); } catch (IOException ioex) { LOG.error("Failed to flush buffers" + ioex); } } } } }
From source file:com.bstek.dorado.data.config.xml.DataTypeParser.java
@SuppressWarnings("unchecked") @Override/*from w w w . ja v a2 s. c o m*/ protected Object internalParse(Node node, ParseContext context) throws Exception { Element element = (Element) node; DataParseContext dataContext = (DataParseContext) context; Set<Node> parsingNodes = dataContext.getParsingNodes(); Map<String, DataTypeDefinition> parsedDataTypes = dataContext.getParsedDataTypes(); String name = element.getAttribute(XmlConstants.ATTRIBUTE_NAME); if (StringUtils.isEmpty(name)) { throw new XmlParseException("DataType name undefined.", element, context); } DataTypeDefinition dataType = parsedDataTypes.get(name); if (dataType != null) { return dataType; } parsingNodes.add(element); dataContext.setPrivateObjectName(Constants.PRIVATE_DATA_OBJECT_PREFIX + DataXmlConstants.PATH_DATE_TYPE_SHORT_NAME + Constants.PRIVATE_DATA_OBJECT_SUBFIX + name); dataType = (DataTypeDefinition) super.internalParse(node, dataContext); Class<?> matchType = (Class<?>) dataType.removeProperty(DataXmlConstants.ATTRIBUTE_MATCH_TYPE); dataType.setMatchType(matchType); Class<?> creationType = (Class<?>) dataType.removeProperty(DataXmlConstants.ATTRIBUTE_CREATION_TYPE); if (creationType != null) { if (matchType != null && !matchType.isAssignableFrom(creationType)) { throw new XmlParseException("The CreationType [" + creationType + "] is not a sub type of the MatchType [" + matchType + "].", element, context); } dataType.setCreationType(creationType); } dataContext.restorePrivateObjectName(); parsingNodes.clear(); dataType.setName(name); final String DEFAULT_DATATYPE_PARENT = Configure.getString("data.defaultEntityDataTypeParent", "Entity"); if (dataType.getParentReferences() == null && !DEFAULT_DATATYPE_PARENT.equals(name)) { boolean useDefaultParent = false; String impl = dataType.getImpl(); if (StringUtils.isNotEmpty(impl)) { Class<? extends DataType> type = ClassUtils.forName(impl); useDefaultParent = EntityDataType.class.isAssignableFrom(type); } else { useDefaultParent = true; } if (useDefaultParent) { DefinitionReference<?> dataTypeRef = dataContext.getDataTypeReference(DEFAULT_DATATYPE_PARENT); dataType.setParentReferences(new DefinitionReference[] { dataTypeRef }); } } parsedDataTypes.put(name, dataType); return dataType; }