List of usage examples for java.util ArrayList contains
public boolean contains(Object o)
From source file:org.dklisiaris.downtown.helper.XMLParser.java
public ArrayList<String> getImgUrlsFromElem(Element elem, String multiTag) { ArrayList<String> multi = new ArrayList<String>(); NodeList subItems = elem.getElementsByTagName(multiTag); for (int j = 0; j < subItems.getLength(); j++) { Element subItem = (Element) subItems.item(j); String value = getElementValue(subItem); if (!multi.contains(value) && isValidImageName(value)) { //Log.d(getValue(elem,KEY_NAME),value); multi.add(value);//from w w w.ja v a 2 s . co m } } return multi; }
From source file:lyonlancer5.karasu.block.BlockRedstoneWire.java
/** * Creates a list of all horizontal sides that can get powered by a wire. * The list is ordered the same as the facingsHorizontal. * //from w ww.jav a2s . c om * @param worldIn World * @param pos Position of the wire * @return List of all facings that can get powered by this wire */ private List<EnumFacing> getSidesToPower(World worldIn, BlockPos pos) { ArrayList<EnumFacing> retval = Lists.<EnumFacing>newArrayList(); for (EnumFacing facing : facingsHorizontal) { if (isPowerSourceAt(worldIn, pos, facing)) retval.add(facing); } if (retval.isEmpty()) return Lists.<EnumFacing>newArrayList(facingsHorizontal); boolean northsouth = retval.contains(EnumFacing.NORTH) || retval.contains(EnumFacing.SOUTH); boolean eastwest = retval.contains(EnumFacing.EAST) || retval.contains(EnumFacing.WEST); if (northsouth) { retval.remove(EnumFacing.EAST); retval.remove(EnumFacing.WEST); } if (eastwest) { retval.remove(EnumFacing.NORTH); retval.remove(EnumFacing.SOUTH); } return retval; }
From source file:org.dklisiaris.downtown.helper.XMLParser.java
public ArrayList<String> getMultiValuesFromElem(Element elem, String multiTag) { ArrayList<String> multi = new ArrayList<String>(); NodeList subItems = elem.getElementsByTagName(multiTag); for (int j = 0; j < subItems.getLength(); j++) { Element subItem = (Element) subItems.item(j); String value = getElementValue(subItem); if (!multi.contains(value)) { //Log.d(getValue(elem,KEY_NAME),value); multi.add(value);//from ww w . ja va 2 s . com //Log.d("Tel",value); } } return multi; }
From source file:com.clustercontrol.ws.agent.AgentEndpoint.java
/** * [JobFileCheck] ?()??//from w w w . j a v a2s.c o m * * HinemosAgentAccess??? * * @param jobFileCheck * @return * @throws HinemosUnknown * @throws InvalidRole * @throws InvalidUserPass * @throws JobMasterNotFound * @throws JobInfoNotFound * @throws FacilityNotFound */ public String jobFileCheckResult(JobFileCheck jobFileCheck, AgentInfo agentInfo) throws HinemosUnknown, InvalidUserPass, InvalidRole, JobMasterNotFound, FacilityNotFound, JobInfoNotFound, JobSessionDuplicate { String id = jobFileCheck.getId(); String jobunitId = jobFileCheck.getJobunitId(); String jobId = jobFileCheck.getJobId(); String filename = jobFileCheck.getFileName(); String directory = jobFileCheck.getDirectory(); Integer eventType = jobFileCheck.getEventType(); Integer modifyType = jobFileCheck.getModifyType(); m_log.info("jobFileCheckResult : id=" + id + ", jobunitId=" + jobunitId + ", jobId=" + jobId + ", filename=" + filename + ", directory=" + directory + ", eventType=" + eventType + ", modifyType=" + modifyType); ArrayList<SystemPrivilegeInfo> systemPrivilegeList = new ArrayList<SystemPrivilegeInfo>(); systemPrivilegeList .add(new SystemPrivilegeInfo(FunctionConstant.HINEMOS_AGENT, SystemPrivilegeMode.MODIFY)); HttpAuthenticator.authCheck(wsctx, systemPrivilegeList); JobTriggerInfo trigger = new JobTriggerInfo(); trigger.setJobkickId(jobFileCheck.getId()); trigger.setTrigger_type(JobTriggerTypeConstant.TYPE_FILECHECK); trigger.setTrigger_info(jobFileCheck.getName() + "(" + id + ") file=" + filename); trigger.setFilename(filename); trigger.setDirectory(directory); OutputBasicInfo output = null; String sessionId = null; for (String facilityId : getFacilityId(agentInfo)) { ArrayList<String> facilityList = FacilitySelector.getFacilityIdList(jobFileCheck.getFacilityId(), jobFileCheck.getOwnerRoleId(), 0, false, false); if (facilityList.contains(facilityId)) { output = new OutputBasicInfo(); output.setFacilityId(facilityId); try { sessionId = new JobControllerBean().runJob(jobunitId, jobId, output, trigger); } catch (Exception e) { m_log.warn("jobFileCheckResult() : " + e.getMessage()); String[] args = { jobId, trigger.getTrigger_info() }; AplLogger.put(PriorityConstant.TYPE_WARNING, HinemosModuleConstant.JOB, MessageConstant.MESSAGE_SYS_017_JOB, args); throw new HinemosUnknown(e.getMessage(), e); } } } return sessionId; }
From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { Integer projectId = null;//w w w .ja va2 s. c o m for (SampleId spId : sampleIDs) { if (projectId == null) projectId = spId.getProject(); else if (projectId != spId.getProject()) { projectId = 0; break; // more than one project are involved: no header will be written } } File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>(); ArrayList<String> individualList = new ArrayList<String>(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); for (int i = 0; i < sampleIDs.size(); i++) { String individualId = individuals.get(i).getId(); sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId); if (!individualList.contains(individualId)) { individualList.add(individualId); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".vcf")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; VariantContextWriter writer = null; try { List<String> distinctSequenceNames = new ArrayList<String>(); String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class); if (mongoTemplate.collectionExists(sequenceSeqCollName)) { DBCursor markerCursorCopy = markerCursor.copy(); markerCursorCopy.batchSize(nQueryChunkSize); while (markerCursorCopy.hasNext()) { int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; while (markerCursorCopy.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursorCopy.next(); String chr = (String) ((DBObject) exportVariant .get(VariantData.FIELDNAME_REFERENCE_POSITION)) .get(ReferencePosition.FIELDNAME_SEQUENCE); if (!distinctSequenceNames.contains(chr)) distinctSequenceNames.add(chr); } } markerCursorCopy.close(); } Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator()); SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames); writer = new CustomVCFWriter(null, zos, dict, false, false, true); // VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder(); // vcwb.unsetOption(Options.INDEX_ON_THE_FLY); // vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES); // vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY); // vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER); // vcwb.setReferenceDictionary(dict); // writer = vcwb.build(); // writer = new AsyncVariantContextWriter(writer, 3000); progress.moveToNextStep(); // done with dictionary DBCursor headerCursor = mongoTemplate .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class)) .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId)); Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>(); boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values while (headerCursor.hasNext()) { DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next()); headerLines.addAll(dbVcfHeader.getHeaderLines()); // Add sequence header lines (not stored in our vcf header collection) BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true); int nSequenceIndex = 0; for (String sequenceName : distinctSequenceNames) { String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class); boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName); if (fCollectionExists) { DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne( new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection); if (record == null) { LOG.warn("Sequence '" + sequenceName + "' not found in collection " + sequenceInfoCollName); continue; } Map<String, String> sequenceLineData = new LinkedHashMap<String, String>(); sequenceLineData.put("ID", (String) record.get("_id")); sequenceLineData.put("length", ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString()); headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++)); } } fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs if (!dbVcfHeader.getWriteEngineHeaders()) fWriteEngineHeaders = false; } headerCursor.close(); VCFHeader header = new VCFHeader(headerLines, individualList); header.setWriteCommandLine(fWriteCommandLine); header.setWriteEngineHeaders(fWriteEngineHeaders); writer.writeHeader(header); short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>(); while (markerCursor.hasNext()) { if (progress.hasAborted()) return; int nLoadedMarkerCountInLoop = 0; boolean fStartingNewChunk = true; markerCursor.batchSize(nQueryChunkSize); List<Comparable> currentMarkers = new ArrayList<Comparable>(); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); currentMarkers.add((Comparable) exportVariant.get("_id")); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) { VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant), !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap, phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter, markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId())); try { writer.add(vc); } catch (Throwable t) { Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t); LOG.debug("error", e); throw e; } if (nLoadedMarkerCountInLoop > currentMarkers.size()) LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only " + currentMarkers.size() + " variants expected)"); } nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } progress.setCurrentStepProgress((short) 100); } catch (Exception e) { LOG.error("Error exporting", e); progress.setError(e.getMessage()); return; } finally { warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); if (writer != null) try { writer.close(); } catch (Throwable ignored) { } } }
From source file:io.hakbot.controller.persistence.QueryManager.java
@SuppressWarnings("unchecked") private boolean hasPermission(Job job, LdapUser ldapUser) { final ApiKey apiKey = pm.getObjectById(ApiKey.class, job.getStartedByApiKeyId()); final ArrayList<Long> list = new ArrayList<>(); for (alpine.model.Team alpineTeam : apiKey.getTeams()) { final Team team = getObjectById(Team.class, alpineTeam.getId()); list.add(team.getId());/*ww w .ja v a 2s . co m*/ } for (alpine.model.Team alpineTeam : ldapUser.getTeams()) { final Team team = getObjectById(Team.class, alpineTeam.getId()); if (team.isHakmaster()) { return true; } if (list.contains(team.getId())) { return true; } } return false; }
From source file:com.sec.ose.osi.sdk.protexsdk.bom.BOMReportAPIWrapper.java
public static ArrayList<String> getPendingFileList(String projectName, UIResponseObserver observer) { ArrayList<String> pendingList = new ArrayList<String>(); log.debug(projectName);// w w w .j a va 2 s. c o m String projectId = ProjectAPIWrapper.getProjectID(projectName); PartialCodeTree root = null; try { root = ProtexSDKAPIManager.getCodeTreeAPI().getCodeTree(projectId, ROOT, 0, Boolean.TRUE); } catch (SdkFault e) { log.warn("getCodeTree failed() " + e.getMessage()); } // Check for valid return if (root == null) { System.err.println("getCodeTree returned Unexpected value '" + projectId + "'"); return null; } List<CodeMatchType> precisionOnly = new ArrayList<CodeMatchType>(1); precisionOnly.add(CodeMatchType.PRECISION); List<CodeMatchDiscovery> discoveries = null; try { discoveries = ProtexSDKAPIManager.getDiscoveryAPI().getCodeMatchDiscoveries(projectId, root, precisionOnly); } catch (SdkFault e) { log.warn("getCodeMatchDiscoveries() failed: " + e.getMessage()); } // Check for valid return if (discoveries == null) { System.err.println("Invalid returne from getCodeMatchDiscoveries() '" + projectId + "'"); return null; } if (discoveries.size() != 0) { for (CodeMatchDiscovery discovery : discoveries) { if ((discovery.getIdentificationStatus() == IdentificationStatus.PENDING_IDENTIFICATION) && !pendingList.contains(discovery.getFilePath())) { log.debug(discovery.getFilePath()); pendingList.add(discovery.getFilePath()); } } } return pendingList; }
From source file:com.brightcove.zartan.encoding.test.ZencodeTest.java
public void testZencode() throws Throwable { VerifiableSerializer vser = new VerifiableSerializer(); ZencodeAPI zapi = new ZencodeAPI(); User u = new User("storpey@brightcove.com", "Password!", true); List<Credentials> creds = new ArrayList<Credentials>(); creds.add(new ZencoderCredentials("716a8085f2094a65d8688d06d2637364")); Account acc = new Account(u, 123l, creds); AccountHelper ah = new AccountHelper(); ArrayList<VideoFileInfo> files = getFils(); TencodeAPI tapi = new TencodeAPI(); User tu = new User("storpey@brightcove.com", "Password!", true); Account tacc = new Account(u, 123l, null); TranscodeEnvironment env = new TranscodeEnvironment("zen-prod", "https://app.zencoder.com/api/v2/jobs"); TranscodeEnvironment tenv = new TranscodeEnvironment("tencode", "http://golftrans02.qanet.local:23080/tencode/encode/submit"); FTPHelperBuilder ftpHB = new FTPHelperBuilder("10.1.11.139", "qa", "Passw0rd!", FTPClient.PASSIVE_LOCAL_DATA_CONNECTION_MODE); FTPHelper ftpH = ftpHB.createFTPHelper(); for (VideoFileInfo file : files) { TranscodeInfo transcode = new TranscodeInfo( new TranscodeOptionGroup(ah.getDefaultTranscodeOptions(), "Default Transcode Options"), file); Verifiable vt = zapi.submitTranscode(transcode, env, acc); vser.addVerifiable(vt);//from ww w. j a v a2 s . co m TranscodeInfo ttranscode = new TranscodeInfo( new TranscodeOptionGroup(ah.getDefaultTranscodeOptions(), "Default Transcode Options"), file); tapi.submitTranscode(ttranscode, tenv, tacc); } try { vser.serializeToFile("test.json"); } catch (IOException e) { e.printStackTrace(); } ArrayList<String> haveSeen = new ArrayList<String>(); VerifiableSerializer v = VerifiableSerializer.deserializeFromFile("test.json"); //TranscodeVerifierRunner tvr= new TranscodeVerifierRunner(); for (Verifiable verf : v.getVerifiableList()) { if (verf instanceof VerifiableTranscode) { VerifiableTranscode tverf = (VerifiableTranscode) verf; TrancodeResponse response = new TrancodeResponse(); for (TranscodedVideoFile tf : ((VerifiableTranscode) verf).getTranscodedVideoFiles()) { response.addTranscodedVideoFile(tf); String location = response.getFileLocation(tf); if (!haveSeen.contains(location)) { String location2 = "/zencoder/output-" + tf.getMatchingRequest().getOptionId() + tverf.getTranscodeInfo().getFile().getFileName() + ".mp4"; ftpH.connect(); ftpH.put("/", location2, new File(location)); ftpH.disconnect(); haveSeen.add(location); System.out.println(location + " , " + location2); } } //tvr.runVerifiers(tverf, response); } } ftpH.disconnect(); }
From source file:com.concursive.connect.web.modules.members.portlets.InviteMembersPortlet.java
private void checkDuplicates(LinkedHashMap<String, String> members, String member, String userId) { Iterator<String> memIterator = members.keySet().iterator(); while (memIterator.hasNext()) { String keyName = memIterator.next(); String idValue = members.get(keyName); //check only previous values and not entire list if (keyName.equals(member)) { return; }//from w w w. j a v a2 s. co m //check if valid ids if (NO_MATCH_FOUND.equals(idValue) || !StringUtils.hasText(idValue)) { continue; } //convert comma separated string to ArrayList and remove duplicates ArrayList<String> lstIds = new ArrayList<String>(Arrays.asList(idValue.split(","))); while (lstIds.contains(userId)) { lstIds.remove(userId); } //convert the id list to comma separated string and assign it to members list if there ids remaining if (!lstIds.isEmpty()) { String ids = lstIds.toString(); ids = ids.replace("[", ""); ids = ids.replace("]", ""); ids = ids.replace(" ", ""); members.put(keyName, ids); } else { memIterator.remove(); } } }
From source file:edu.umass.cs.reconfiguration.reconfigurationutils.ConsistentReconfigurableNodeConfig.java
/** * This method maps a set of addresses, newAddresses, to a set of nodes such * that there is maximal overlap with the specified set of nodes, oldNodes. * It is somewhat nontrivial only because there is a many-to-one mapping * from nodes to addresses, so a simple reverse lookup is not meaningful. * //ww w .jav a2s .c o m * @param newAddresses * @param oldNodes * @return Set of active replica IPs corresponding to {@code newAddresses} * that have high overlap with the set of old active replica nodes * {@code oldNodes}. */ public Set<NodeIDType> getIPToActiveReplicaIDs(ArrayList<InetAddress> newAddresses, Set<NodeIDType> oldNodes) { Set<NodeIDType> newNodes = new HashSet<NodeIDType>(); // return value ArrayList<InetAddress> unassigned = new ArrayList<InetAddress>(); for (InetAddress address : newAddresses) unassigned.add(address); // assign old nodes first if they match any new address for (NodeIDType oldNode : oldNodes) { InetAddress oldAddress = this.nodeConfig.getNodeAddress(oldNode); if (unassigned.contains(oldAddress)) { newNodes.add(oldNode); unassigned.remove(oldAddress); } } // assign any node to unassigned addresses for (NodeIDType node : this.nodeConfig.getActiveReplicas()) { if (this.activesSlatedForRemoval.contains(node)) continue; InetAddress address = this.nodeConfig.getNodeAddress(node); if (unassigned.contains(address)) { newNodes.add(node); unassigned.remove(address); } } return newNodes; }