List of usage examples for java.util BitSet set
public void set(int fromIndex, int toIndex)
From source file:org.caleydo.core.util.impute.KNNImpute.java
/** * split the neighbor hood in two groups based on 2 k-means * * @param neighborhood/*from ww w . j ava2 s . c o m*/ * @return */ private Pair<List<Gene>, List<Gene>> twoMeanClusterSplit(List<Gene> neighborhood) { final int n = neighborhood.size(); final int maxit = desc.getMaxit(); final double eps = desc.getEps(); int a_start = r.nextInt(n); int b_start = r.nextInt(n); Gene a_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(a_start).data, samples)); Gene b_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(b_start).data, samples)); float[] a_center_pong = new float[samples]; Arrays.fill(a_center_pong, Float.NaN); float[] b_center_pong = new float[samples]; Arrays.fill(b_center_pong, Float.NaN); float[] tmp; BitSet partOf_a = new BitSet(n); double d_old = 0; for (int i = 0; i < maxit; ++i) { int j = 0; int changed = 0; double d_new = 0; for (Gene gene : neighborhood) { final double a_distance = distance(a_center, gene); final double b_distance = distance(b_center, gene); final boolean in_a = a_distance < b_distance; if (partOf_a.get(j) != in_a) { changed++; partOf_a.set(j, in_a); } d_new += in_a ? a_distance : b_distance; tmp = in_a ? a_center_pong : b_center_pong; // shift new center for (int k = 0; k < samples; ++k) { if (!gene.isNaN(k)) { if (Float.isNaN(tmp[k])) tmp[k] = gene.get(k); else tmp[k] += gene.get(k); } } j++; } if (changed == 0 || d_new == 0) break; final double ratio = Math.abs(d_new - d_old) / d_old; if (i > 0 && ratio < eps) break; d_old = d_new; int a_n = partOf_a.cardinality(); int b_n = n - a_n; if (a_n == 0 || b_n == 0) { // FIXME } updateCenter(a_center, a_center_pong, a_n); updateCenter(b_center, b_center_pong, b_n); } return split(neighborhood, partOf_a); }
From source file:srebrinb.compress.sevenzip.SevenZFile.java
private BitSet readAllOrBits(final ByteBuffer header, final int size) throws IOException { final int areAllDefined = getUnsignedByte(header); final BitSet bits; if (areAllDefined != 0) { bits = new BitSet(size); for (int i = 0; i < size; i++) { bits.set(i, true); }/*w w w .ja va2 s.co m*/ } else { bits = readBits(header, size); } return bits; }
From source file:bobs.is.compress.sevenzip.SevenZFile.java
private BitSet readAllOrBits(final DataInput header, final int size) throws IOException { final int areAllDefined = header.readUnsignedByte(); final BitSet bits; if (areAllDefined != 0) { bits = new BitSet(size); for (int i = 0; i < size; i++) { bits.set(i, true); }//w w w . ja v a 2s. c o m } else { bits = readBits(header, size); } return bits; }
From source file:srebrinb.compress.sevenzip.SevenZFile.java
private BitSet readBits(final ByteBuffer header, final int size) throws IOException { final BitSet bits = new BitSet(size); int mask = 0; int cache = 0; for (int i = 0; i < size; i++) { if (mask == 0) { mask = 0x80;//from w w w. ja va 2 s.c o m cache = getUnsignedByte(header); } bits.set(i, (cache & mask) != 0); mask >>>= 1; } return bits; }
From source file:bobs.is.compress.sevenzip.SevenZFile.java
private BitSet readBits(final DataInput header, final int size) throws IOException { final BitSet bits = new BitSet(size); int mask = 0; int cache = 0; for (int i = 0; i < size; i++) { if (mask == 0) { mask = 0x80;// w ww . j a v a2 s.c o m cache = header.readUnsignedByte(); } bits.set(i, (cache & mask) != 0); mask >>>= 1; } return bits; }
From source file:org.alfresco.module.org_alfresco_module_rm.capability.RMAfterInvocationProvider.java
private Object[] decide(Authentication authentication, Object object, ConfigAttributeDefinition config, Object[] returnedObject) { // Assumption: value is not null BitSet incudedSet = new BitSet(returnedObject.length); List<ConfigAttributeDefintion> supportedDefinitions = extractSupportedDefinitions(config); if (supportedDefinitions.size() == 0) { return returnedObject; }/* w w w .j a v a 2s.c o m*/ for (int i = 0, l = returnedObject.length; i < l; i++) { Object current = returnedObject[i]; int parentReadCheck = checkRead(getParentReadCheckNode(current)); int childReadChek = checkRead(getChildReadCheckNode(current)); for (ConfigAttributeDefintion cad : supportedDefinitions) { incudedSet.set(i, true); NodeRef testNodeRef = null; if (cad.parent) { if (StoreRef.class.isAssignableFrom(current.getClass())) { testNodeRef = null; } else if (NodeRef.class.isAssignableFrom(current.getClass())) { testNodeRef = nodeService.getPrimaryParent((NodeRef) current).getParentRef(); } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) { testNodeRef = ((ChildAssociationRef) current).getParentRef(); } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) { NodeRef nodeRef = ((PermissionCheckValue) current).getNodeRef(); testNodeRef = nodeService.getPrimaryParent(nodeRef).getParentRef(); } else { throw new ACLEntryVoterException( "The specified parameter is recognized: " + current.getClass()); } } else { if (StoreRef.class.isAssignableFrom(current.getClass())) { testNodeRef = nodeService.getRootNode((StoreRef) current); } else if (NodeRef.class.isAssignableFrom(current.getClass())) { testNodeRef = (NodeRef) current; } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) { testNodeRef = ((ChildAssociationRef) current).getChildRef(); } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) { testNodeRef = ((PermissionCheckValue) current).getNodeRef(); } else { throw new ACLEntryVoterException( "The specified parameter is recognized: " + current.getClass()); } } if (logger.isDebugEnabled()) { logger.debug("\t" + cad.typeString + " test on " + testNodeRef + " from " + current.getClass().getName()); } if (isUnfiltered(testNodeRef)) { continue; } int readCheck = childReadChek; if (cad.parent) { readCheck = parentReadCheck; } if (incudedSet.get(i) && (testNodeRef != null) && (readCheck != AccessDecisionVoter.ACCESS_GRANTED)) { incudedSet.set(i, false); } } } if (incudedSet.cardinality() == returnedObject.length) { return returnedObject; } else { Object[] answer = new Object[incudedSet.cardinality()]; for (int i = incudedSet.nextSetBit(0), p = 0; i >= 0; i = incudedSet.nextSetBit(++i), p++) { answer[p] = returnedObject[i]; } return answer; } }
From source file:org.alfresco.repo.security.permissions.impl.acegi.ACLEntryAfterInvocationProvider.java
@SuppressWarnings("rawtypes") private Object[] decide(Authentication authentication, Object object, ConfigAttributeDefinition config, Object[] returnedObject) throws AccessDeniedException { // Assumption: value is not null BitSet incudedSet = new BitSet(returnedObject.length); List<ConfigAttributeDefintion> supportedDefinitions = extractSupportedDefinitions(config); if (supportedDefinitions.size() == 0) { return returnedObject; }//from w w w. j av a 2s . c o m for (int i = 0, l = returnedObject.length; i < l; i++) { Object current = returnedObject[i]; for (ConfigAttributeDefintion cad : supportedDefinitions) { incudedSet.set(i, true); NodeRef testNodeRef = null; if (cad.typeString.equals(AFTER_ACL_NODE)) { if (StoreRef.class.isAssignableFrom(current.getClass())) { testNodeRef = nodeService.getRootNode((StoreRef) current); } else if (NodeRef.class.isAssignableFrom(current.getClass())) { testNodeRef = (NodeRef) current; } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) { testNodeRef = ((ChildAssociationRef) current).getChildRef(); } else if (Pair.class.isAssignableFrom(current.getClass())) { testNodeRef = (NodeRef) ((Pair) current).getSecond(); } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) { testNodeRef = ((PermissionCheckValue) current).getNodeRef(); } else { throw new ACLEntryVoterException( "The specified parameter is recognized: " + current.getClass()); } } else if (cad.typeString.equals(AFTER_ACL_PARENT)) { if (StoreRef.class.isAssignableFrom(current.getClass())) { testNodeRef = null; } else if (NodeRef.class.isAssignableFrom(current.getClass())) { testNodeRef = nodeService.getPrimaryParent((NodeRef) current).getParentRef(); } else if (ChildAssociationRef.class.isAssignableFrom(current.getClass())) { testNodeRef = ((ChildAssociationRef) current).getParentRef(); } else if (Pair.class.isAssignableFrom(current.getClass())) { testNodeRef = (NodeRef) ((Pair) current).getSecond(); } else if (PermissionCheckValue.class.isAssignableFrom(current.getClass())) { NodeRef nodeRef = ((PermissionCheckValue) current).getNodeRef(); testNodeRef = nodeService.getPrimaryParent(nodeRef).getParentRef(); } else { throw new ACLEntryVoterException( "The specified parameter is recognized: " + current.getClass()); } } if (log.isDebugEnabled()) { log.debug("\t" + cad.typeString + " test on " + testNodeRef + " from " + current.getClass().getName()); } if (isUnfiltered(testNodeRef)) { continue; } if (incudedSet.get(i) && (testNodeRef != null) && (permissionService.hasPermission(testNodeRef, cad.required.toString()) == AccessStatus.DENIED)) { incudedSet.set(i, false); } } } if (incudedSet.cardinality() == returnedObject.length) { return returnedObject; } else { Object[] answer = new Object[incudedSet.cardinality()]; for (int i = incudedSet.nextSetBit(0), p = 0; i >= 0; i = incudedSet.nextSetBit(++i), p++) { answer[p] = returnedObject[i]; } return answer; } }
From source file:org.apache.mele.embedded.HadoopQueueEmbedded.java
private boolean ackCheck() throws IOException { LOG.info("Starting ack check"); BitSet bitSet = new BitSet(); FileSystem fileSystem = null; try {//from ww w . ja v a 2s . c o m _ackLock.lock(); _ackOutputStream.close(); fileSystem = newFileSystem(_file); FileStatus fileStatus = fileSystem.getFileStatus(_file); long dataLength = fileStatus.getLen(); long totalAckLength = getTotalAckLength(fileSystem); if (!couldContainAllAcks(totalAckLength)) { LOG.info("Existing early [" + totalAckLength + "] because [" + totalAckLength % 12 + "]"); return false; } for (Path ackFile : _ackFiles) { LOG.info("Starting ack check for file [" + ackFile + "]"); DFSInputStream inputStream = null; try { inputStream = getDFS(fileSystem.open(ackFile)); long length = inputStream.getFileLength(); DataInputStream dataInputStream = new DataInputStream(inputStream); while (length > 0) { int pos = (int) dataInputStream.readLong(); // @TODO check position // 4 bytes for storing the length of the message int len = dataInputStream.readInt() + 4; bitSet.set(pos, pos + len); length -= 12; } if (bitSet.cardinality() == dataLength) { return true; } } finally { if (inputStream != null) { inputStream.close(); } } } return false; } finally { reopenAckFile(fileSystem); _ackLock.unlock(); if (fileSystem != null) { fileSystem.close(); } } }
From source file:com.ibm.cics.ca1y.Emit.java
/** * Add name / value properties from the contents of CICS start data. * //from w ww. j av a2 s. co m * @param props * - the properties to add to. * @return true if successful */ private static boolean addRetrieveData(Properties props) { BitSet bs = new BitSet(); bs.set(RetrieveBits.DATA, true); RetrievedDataHolder rdh = new RetrievedDataHolder(); try { Task.getTask().retrieve(bs, rdh); } catch (Exception e) { e.printStackTrace(); return false; } String configuration; try { configuration = new String(rdh.getValue().getData(), LOCAL_CCSID); } catch (Exception e) { logger.warning(messages.getString("InvalidStartData") + " " + LOCAL_CCSID + ":" + e.getMessage()); return false; } if (logger.isLoggable(Level.FINE)) { logger.fine(messages.getString("LoadingPropertiesFromStartData") + ":" + configuration); } return Util.loadProperties(props, configuration); }
From source file:de.unijena.bioinf.FragmentationTreeConstruction.computation.FragmentationPatternAnalysis.java
/** * Step 3. Normalizing// w w w.j a v a 2 s.co m * Merge all peaks within a single spectrum * Return a list of peaks (from all spectra) with relative intensities */ public ProcessedInput performNormalization(ProcessedInput input) { final Ms2Experiment experiment = input.getExperimentInformation(); final double parentMass = experiment.getIonMass(); final ArrayList<ProcessedPeak> peaklist = new ArrayList<ProcessedPeak>(100); final Deviation mergeWindow = getDefaultProfile().getAllowedMassDeviation().divide(2d); final Ionization ion = experiment.getPrecursorIonType().getIonization(); double globalMaxIntensity = 0d; for (Ms2Spectrum s : experiment.getMs2Spectra()) { // merge peaks: iterate them from highest to lowest intensity and remove peaks which // are in the mass range of a high intensive peak final MutableSpectrum<Peak> sortedByIntensity = new SimpleMutableSpectrum(s); Spectrums.sortSpectrumByDescendingIntensity(sortedByIntensity); // simple spectra are always ordered by mass final SimpleSpectrum sortedByMass = new SimpleSpectrum(s); final BitSet deletedPeaks = new BitSet(s.size()); for (int i = 0; i < s.size(); ++i) { // get index of peak in mass-ordered spectrum final double mz = sortedByIntensity.getMzAt(i); final int index = Spectrums.binarySearch(sortedByMass, mz); assert index >= 0; if (deletedPeaks.get(index)) continue; // peak is already deleted // delete all peaks within the mass range for (int j = index - 1; j >= 0 && mergeWindow.inErrorWindow(mz, sortedByMass.getMzAt(j)); --j) deletedPeaks.set(j, true); for (int j = index + 1; j < s.size() && mergeWindow.inErrorWindow(mz, sortedByMass.getMzAt(j)); ++j) deletedPeaks.set(j, true); } final int offset = peaklist.size(); // add all remaining peaks to the peaklist for (int i = 0; i < s.size(); ++i) { if (!deletedPeaks.get(i)) { final ProcessedPeak propeak = new ProcessedPeak( new MS2Peak(s, sortedByMass.getMzAt(i), sortedByMass.getIntensityAt(i))); propeak.setIon(ion); peaklist.add(propeak); } } // now performNormalization spectrum. Ignore peaks near to the parent peak final double lowerbound = parentMass - 0.1d; double scale = 0d; for (int i = offset; i < peaklist.size() && peaklist.get(i).getMz() < lowerbound; ++i) { scale = Math.max(scale, peaklist.get(i).getIntensity()); } if (scale == 0) scale = peaklist.get(0).getIntensity(); // happens for spectra with only one peak // now set local relative intensities for (int i = offset; i < peaklist.size(); ++i) { final ProcessedPeak peak = peaklist.get(i); peak.setLocalRelativeIntensity(peak.getIntensity() / scale); } // and adjust global relative intensity globalMaxIntensity = Math.max(globalMaxIntensity, scale); } // now calculate global normalized intensities for (ProcessedPeak peak : peaklist) { peak.setGlobalRelativeIntensity(peak.getIntensity() / globalMaxIntensity); peak.setRelativeIntensity( normalizationType == NormalizationType.GLOBAL ? peak.getGlobalRelativeIntensity() : peak.getLocalRelativeIntensity()); } // finished! input.setMergedPeaks(peaklist); // postprocess postProcess(PostProcessor.Stage.AFTER_NORMALIZING, input); return input; }