List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:es.caib.seycon.ng.servei.XarxaServiceImpl.java
protected Collection<Maquina> handleFindMaquinaByFiltreSenseRestriccions(String nom, String sistemaOperatiu, String adreca, String dhcp, String correu, String ofimatica, String alias, String mac, String descripcio, String xarxa, String codiUsuari, Boolean filtra) throws Exception { if (nom != null && (nom.trim().compareTo("") == 0 || nom.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ nom = null;//from w ww. j av a 2s.c o m } if (sistemaOperatiu != null && (sistemaOperatiu.trim().compareTo("") == 0 || sistemaOperatiu.trim().compareTo( //$NON-NLS-1$ "%") == 0)) { //$NON-NLS-1$ sistemaOperatiu = null; } if (adreca != null && (adreca.trim().compareTo("") == 0 || adreca.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ adreca = null; } if (dhcp != null && (dhcp.trim().compareTo("") == 0 || dhcp.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ dhcp = null; } if (correu != null && (correu.trim().compareTo("") == 0 || correu.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ correu = null; } if (ofimatica != null && (ofimatica.trim().compareTo("") == 0 || ofimatica.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ ofimatica = null; } if (alias != null && (alias.trim().compareTo("") == 0 || alias.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ alias = null; } if (mac != null && (mac.trim().compareTo("") == 0 || mac.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ mac = null; } if (descripcio != null && (descripcio.trim().compareTo("") == 0 || descripcio.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ descripcio = null; } if (xarxa != null && (xarxa.trim().compareTo("") == 0 || xarxa.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ xarxa = null; } if (codiUsuari != null && (codiUsuari.trim().compareTo("") == 0 || codiUsuari.trim().compareTo("%") == 0)) { //$NON-NLS-1$ //$NON-NLS-2$ codiUsuari = null; } Collection<MaquinaEntity> maquines = null; LinkedList<Parameter> params = new LinkedList<Parameter>(); // Realizamos la siguiente consulta (sin tener cuenta el alias) String query = "select distinct maquina from " //$NON-NLS-1$ + " es.caib.seycon.ng.model.SessioEntity sessio " //$NON-NLS-1$ + " right outer join sessio.maquina as maquina " //$NON-NLS-1$ + " left outer join sessio.usuari as usuari" + //$NON-NLS-1$ " where maquina.deleted = false "; //$NON-NLS-1$ if (nom != null) { query = query + "and maquina.nom like :nom "; //$NON-NLS-1$ params.add(new Parameter("nom", nom)); //$NON-NLS-1$ } if (sistemaOperatiu != null) { // query = query + "and maquina.oldSistemaOperatiu like :sistemaOperatiu " // + "or maquina.operatingSystem = :operatingSystem "; // params.add(new Parameter("sistemaOperatiu", sistemaOperatiu)); //$NON-NLS-1$ query = query + "and maquina.operatingSystem.name = :operatingSystem "; //$NON-NLS-1$ params.add(new Parameter("operatingSystem", sistemaOperatiu)); //$NON-NLS-1$ } if (adreca != null) { query = query + "and maquina.adreca like :adreca "; //$NON-NLS-1$ params.add(new Parameter("adreca", adreca)); //$NON-NLS-1$ } if (dhcp != null) { query = query + "and maquina.dhcp like :dhcp "; //$NON-NLS-1$ params.add(new Parameter("adreca", adreca)); //$NON-NLS-1$ } if (correu != null) { query = query + "and maquina.correu like :correu "; //$NON-NLS-1$ params.add(new Parameter("correu", correu)); //$NON-NLS-1$ } if (ofimatica != null) { query = query + "and maquina.ofimatica like :ofimatica) "; //$NON-NLS-1$ params.add(new Parameter("ofimatica", ofimatica)); //$NON-NLS-1$ } if (mac != null) { query = query + "and maquina.mac like :mac "; //$NON-NLS-1$ params.add(new Parameter("mac", mac)); //$NON-NLS-1$ } if (descripcio != null) { query = query + "and maquina.descripcio like :descripcio "; //$NON-NLS-1$ params.add(new Parameter("descripcio", descripcio)); //$NON-NLS-1$ } if (xarxa != null) { query = query + "and maquina.xarxa.codi like :xarxa "; //$NON-NLS-1$ params.add(new Parameter("xarxa", xarxa)); //$NON-NLS-1$ } if (codiUsuari != null) { query = query + "and usuari.codi like :codiUsuari "; //$NON-NLS-1$ params.add(new Parameter("codiUsuari", codiUsuari)); //$NON-NLS-1$ } query = query + "order by maquina.nom "; //$NON-NLS-1$ maquines = getMaquinaEntityDao().query(query, params.toArray(new Parameter[0])); // Filtramos por alias (si se ha especificado algn valor) if (alias != null) { Collection<AliasMaquinaEntity> maquinesAlias = getAliasMaquinaEntityDao().findMaquinaByAlias(alias); HashSet<Long> h_maquinesAlias = new HashSet(maquinesAlias.size()); for (Iterator it = maquinesAlias.iterator(); it.hasNext();) { MaquinaEntity maqAlias = (MaquinaEntity) it.next(); h_maquinesAlias.add(maqAlias.getId()); } // Nos quedamos slo con las mquinas de la bsqueda que tengan el // alias indicado for (Iterator it = maquines.iterator(); it.hasNext();) { MaquinaEntity maq = (MaquinaEntity) it.next(); if (!h_maquinesAlias.contains(maq.getId())) it.remove(); // Lo eliminamos (no tiene el alias buscado) } } return getMaquinaEntityDao().toMaquinaList(maquines); }
From source file:com.ryan.ryanreader.fragments.CommentListingFragment.java
private void makeFirstRequest(final Context context) { final RedditAccount user = RedditAccountManager.getInstance(context).getDefaultAccount(); final CacheManager cm = CacheManager.getInstance(context); // TODO parameterise limit request = new CacheRequest(url, user, session, Constants.Priority.API_COMMENT_LIST, 0, downloadType, Constants.FileType.COMMENT_LIST, true, true, false, context) { @Override//from ww w .j a v a 2 s. c o m protected void onDownloadNecessary() { new Handler(Looper.getMainLooper()).post(new Runnable() { public void run() { listFooter.addView(loadingView); adapter.notifyDataSetChanged(); } }); } @Override protected void onDownloadStarted() { loadingView.setIndeterminate(context.getString(R.string.download_connecting)); } @Override protected void onCallbackException(final Throwable t) { request = null; BugReportActivity.handleGlobalError(context, t); } @Override protected void onFailure(final RequestFailureType type, final Throwable t, final StatusLine status, final String readableMessage) { request = null; if (!isAdded()) return; if (loadingView != null) loadingView.setDoneNoAnim(R.string.download_failed); final RRError error = General.getGeneralErrorForFailure(context, type, t, status); new Handler(Looper.getMainLooper()).post(new Runnable() { public void run() { notifications.addView(new ErrorView(getSupportActivity(), error)); } }); } @Override protected void onProgress(final long bytesRead, final long totalBytes) { } @Override protected void onSuccess(final CacheManager.ReadableCacheFile cacheFile, final long timestamp, final UUID session, final boolean fromCache, final String mimetype) { request = null; } @Override public void onJsonParseStarted(final JsonValue value, final long timestamp, final UUID session, final boolean fromCache) { if (isAdded() && loadingView != null) loadingView.setIndeterminate("Downloading..."); // TODO pref (currently 10 mins) // TODO xml if (fromCache && RRTime.since(timestamp) > 10 * 60 * 1000) { new Handler(Looper.getMainLooper()).post(new Runnable() { public void run() { if (isDetached()) return; final TextView cacheNotif = new TextView(context); cacheNotif.setText(context.getString(R.string.listing_cached) + " " + RRTime.formatDateTime(timestamp, context)); final int paddingPx = General.dpToPixels(context, 6); final int sidePaddingPx = General.dpToPixels(context, 10); cacheNotif.setPadding(sidePaddingPx, paddingPx, sidePaddingPx, paddingPx); cacheNotif.setTextSize(13f); listHeaderNotifications.addView(cacheNotif); adapter.notifyDataSetChanged(); } }); } ((SessionChangeListener) getSupportActivity()).onSessionChanged(session, SessionChangeListener.SessionChangeType.COMMENTS, timestamp); // TODO {"error": 403} is received for unauthorized subreddits try { // Download main post if (value.getType() == JsonValue.Type.ARRAY) { // lol, reddit api final JsonBufferedArray root = value.asArray(); final JsonBufferedObject thing = root.get(0).asObject(); final JsonBufferedObject listing = thing.getObject("data"); final JsonBufferedArray postContainer = listing.getArray("children"); final RedditThing postThing = postContainer.getObject(0, RedditThing.class); final RedditPost post = postThing.asPost(); // TODO show upvote/downvote/etc buttons final RedditSubreddit parentSubreddit = new RedditSubreddit("/r/" + post.subreddit, post.subreddit, false); CommentListingFragment.this.post = new RedditPreparedPost(context, cm, 0, post, timestamp, true, parentSubreddit, false, false, false, user); final ViewGroup selfText; if (post.is_self && post.selftext != null && post.selftext.trim().length() > 0) { selfText = RedditCommentTextParser.parse(StringEscapeUtils.unescapeHtml4(post.selftext)) .generate(context, 14f * commentFontScale, null, new ActiveTextView.OnLinkClickedListener() { public void onClickUrl(String url) { if (url != null) LinkHandler.onLinkClicked(getSupportActivity(), url, false); } public void onClickText(Object attachment) { } }, CommentListingFragment.this.post); } else { selfText = null; } new Handler(Looper.getMainLooper()).post(new Runnable() { public void run() { final RedditPostHeaderView postHeader = new RedditPostHeaderView( getSupportActivity(), CommentListingFragment.this.post, CommentListingFragment.this); listHeaderPost.addView(postHeader); if (selfText != null) { selfText.setFocusable(false); selfText.setDescendantFocusability(ViewGroup.FOCUS_BLOCK_DESCENDANTS); final int paddingPx = General.dpToPixels(context, 10); listHeaderSelftext.addView(selfText); listHeaderSelftext.setPadding(paddingPx, paddingPx, paddingPx, paddingPx); listHeaderNotifications.setBackgroundColor(Color.argb(35, 128, 128, 128)); } if (!General.isTablet(context, PreferenceManager.getDefaultSharedPreferences(context))) { getSupportActivity().getSupportActionBar().setTitle(post.title); } } }); } // Download comments final JsonBufferedObject thing; if (value.getType() == JsonValue.Type.ARRAY) { thing = value.asArray().get(1).asObject(); } else { thing = value.asObject(); } final JsonBufferedObject listing = thing.getObject("data"); final JsonBufferedArray topLevelComments = listing.getArray("children"); final HashSet<String> needsChanging = RedditChangeDataManager.getInstance(context) .getChangedForParent(parentPostIdAndType, user); for (final JsonValue commentThingValue : topLevelComments) { buildComments(commentThingValue, null, timestamp, needsChanging); } commentHandler.sendMessage(General.handlerMessage(0, buffer)); } catch (Throwable t) { notifyFailure(RequestFailureType.PARSE, t, null, "Parse failure"); return; } if (isAdded() && loadingView != null) loadingView.setDoneNoAnim(R.string.download_done); } private ArrayList<RedditPreparedComment> buffer = new ArrayList<RedditPreparedComment>(); private void buildComments(final JsonValue value, final RedditPreparedComment parent, final long timestamp, final HashSet<String> needsChanging) throws IOException, InterruptedException, IllegalAccessException, java.lang.InstantiationException, NoSuchMethodException, InvocationTargetException { final RedditThing commentThing = value.asObject(RedditThing.class); if (commentThing.getKind() != RedditThing.Kind.COMMENT) return; final RedditComment comment = commentThing.asComment(); final RedditPreparedComment preparedComment = new RedditPreparedComment(context, comment, parent, timestamp, needsChanging.contains(comment.name), post, user, headerItems); after = preparedComment.idAndType; buffer.add(preparedComment); if (buffer.size() >= 40) { commentHandler.sendMessage(General.handlerMessage(0, buffer)); buffer = new ArrayList<RedditPreparedComment>(); } if (comment.replies.getType() == JsonValue.Type.OBJECT) { final JsonBufferedObject replies = comment.replies.asObject(); final JsonBufferedArray children = replies.getObject("data").getArray("children"); for (final JsonValue v : children) { buildComments(v, preparedComment, timestamp, needsChanging); } } } }; cm.makeRequest(request); }
From source file:org.jahia.modules.external.modules.ModulesDataSource.java
private synchronized void saveNodeType(ExternalData data) throws RepositoryException { String path = data.getPath(); String pathLowerCase = path.toLowerCase(); String cndPath = getCndPath(path, pathLowerCase); String subPath = getSubPath(path, pathLowerCase); String nodeTypeName = StringUtils.substringBefore(subPath, "/"); nodeTypeName = nodeTypeName.replace('-', '_'); NodeTypeRegistry nodeTypeRegistry = loadRegistry(cndPath); ExtendedNodeType nodeType = null;//from ww w . j a va2 s .c o m try { nodeType = nodeTypeRegistry.getNodeType(nodeTypeName); } catch (NoSuchNodeTypeException e) { nodeType = new ExtendedNodeType(nodeTypeRegistry, module.getId()); nodeType.setName(new Name(nodeTypeName, nodeTypeRegistry.getNamespaces())); } Map<String, String[]> properties = data.getProperties(); List<String> declaredSupertypes = new ArrayList<String>(); String[] values = properties.get("j:supertype"); final HashSet<String> supertypes = Sets.newHashSet(nodeType.getDeclaredSupertypeNames()); if (values != null && values.length > 0) { if (!supertypes.contains(values[0])) { checkCndItemUsage(path, "modulesDataSource.errors.changeSuperType"); } declaredSupertypes.add(values[0]); } values = properties.get("j:mixins"); if (values != null) { for (String mixin : values) { declaredSupertypes.add(mixin); } } nodeType.setDeclaredSupertypes(declaredSupertypes.toArray(new String[declaredSupertypes.size()])); values = properties.get("j:isAbstract"); if (values != null && values.length > 0) { nodeType.setAbstract(Boolean.parseBoolean(values[0])); } else { nodeType.setAbstract(false); } values = properties.get("j:isQueryable"); if (values != null && values.length > 0) { nodeType.setQueryable(Boolean.parseBoolean(values[0])); } else { nodeType.setQueryable(true); } values = properties.get("j:hasOrderableChildNodes"); if (values != null && values.length > 0) { nodeType.setHasOrderableChildNodes(Boolean.parseBoolean(values[0])); } else { nodeType.setHasOrderableChildNodes(false); } values = properties.get("j:itemsType"); if (values != null && values.length > 0) { nodeType.setItemsType(values[0]); } else { nodeType.setItemsType(null); } values = properties.get("j:mixinExtends"); if (values != null) { nodeType.setMixinExtendNames(Lists.newArrayList(values)); } else { nodeType.setMixinExtendNames(new ArrayList<String>()); } values = properties.get("j:primaryItemName"); if (values != null && values.length > 0) { nodeType.setPrimaryItemName(values[0]); } else { nodeType.setPrimaryItemName(null); } nodeType.setMixin(JNT_MIXIN_NODE_TYPE.equals(data.getType())); nodeTypeRegistry.addNodeType(nodeType.getNameObject(), nodeType); try { nodeType.validate(); } catch (NoSuchNodeTypeException e) { logger.error("Failed to save child node definition", e); removeNodeTypeRegistry(cndPath); throw e; } writeDefinitionFile(nodeTypeRegistry, cndPath); saveCndResourceBundle(data, JCRContentUtils.replaceColon(nodeTypeName)); }
From source file:com.jaspersoft.jasperserver.api.engine.scheduling.hibernate.HibernateReportJobsPersistenceService.java
private void verifyOutputLocation(List persistentJobs, ReportJobModel jobModel, ExecutionContext context) throws DuplicateOutputLocationException { Boolean isSaveToRepository = null; if ((jobModel.getContentRepositoryDestinationModel() != null) && jobModel.getContentRepositoryDestinationModel().isSaveToRepositoryModified()) { isSaveToRepository = jobModel.getContentRepositoryDestinationModel().isSaveToRepository(); }/*from ww w . jav a2 s .c o m*/ // nothing will get save to repository, skip verification if ((isSaveToRepository != null) && (!isSaveToRepository)) return; String baseOutputName = null; if ((jobModel.getContentRepositoryDestinationModel() != null) && jobModel.getContentRepositoryDestinationModel().isFolderURIModified()) { baseOutputName = jobModel.getBaseOutputFilename(); } String folderURI = null; if ((jobModel.getContentRepositoryDestinationModel() != null) && jobModel.getContentRepositoryDestinationModel().isFolderURIModified()) { folderURI = jobModel.getContentRepositoryDestinationModel().getFolderURI(); } Boolean isUsingDefaultFolderURI = null; if ((jobModel.getContentRepositoryDestinationModel() != null) && jobModel.getContentRepositoryDestinationModel().isUsingDefaultReportOutputFolderURIModified()) { isUsingDefaultFolderURI = jobModel.getContentRepositoryDestinationModel() .isUsingDefaultReportOutputFolderURI(); } String defaultFolderURI = null; if ((jobModel.getContentRepositoryDestinationModel() != null) && jobModel.getContentRepositoryDestinationModel().isDefaultReportOutputFolderURIModified()) { defaultFolderURI = jobModel.getContentRepositoryDestinationModel().getDefaultReportOutputFolderURI(); } // return if not updating any fields if ((isSaveToRepository == null) && (folderURI == null) && (baseOutputName == null) && (isUsingDefaultFolderURI == null) && (defaultFolderURI == null)) return; HashSet<String> pathList = new HashSet<String>(); for (Object persistentJob : persistentJobs) { ReportJob job = toClient(((PersistentReportJob) persistentJob), context); if (job.getContentRepositoryDestination() == null) continue; boolean isSaveToRepository_TMP = (isSaveToRepository != null ? isSaveToRepository.booleanValue() : job.getContentRepositoryDestination().isSaveToRepository()); // not save to repository if (!isSaveToRepository_TMP) continue; String baseOutputName_TMP = (baseOutputName != null ? new String(baseOutputName) : job.getBaseOutputFilename()); boolean isUsingDefaultFolderURI_TMP = (isUsingDefaultFolderURI != null ? isUsingDefaultFolderURI.booleanValue() : job.getContentRepositoryDestination().isUsingDefaultReportOutputFolderURI()); String folderURI_TMP; if (isUsingDefaultFolderURI_TMP) { folderURI_TMP = (defaultFolderURI != null ? new String(defaultFolderURI) : job.getContentRepositoryDestination().getDefaultReportOutputFolderURI()); } else { folderURI_TMP = (folderURI != null ? new String(folderURI) : job.getContentRepositoryDestination().getFolderURI()); } String path = folderURI_TMP + "/" + baseOutputName_TMP; if (pathList.contains(path)) { throw new DuplicateOutputLocationException(((PersistentReportJob) persistentJob).getId(), path); } else pathList.add(path); } }
From source file:com.milaboratory.core.tree.SequenceTreeMapTest.java
@Test public void testRandomizedTest3() throws Exception { for (int f = 0; f < repeats * 6; ++f) { //System.out.println(f); Alphabet alphabet = getAlphabetSequence(f); for (byte t = 0; t < 3; ++t) { final Sequence seqRight = randomSequence(alphabet, 50, 100), seqLeft = randomSequence(alphabet, 50, 100), spacer = randomSequence(alphabet, 200, 200), goodSequence = concatenate(seqLeft, spacer, seqRight); SequenceTreeMap map = new SequenceTreeMap(alphabet); int[] mut = new int[3]; mut[t] = 3;//from w w w . j a v a 2s . c o m HashSet<Sequence> lErr = new HashSet<>(), rErr = new HashSet<>(), lrErr = new HashSet<>(); Sequence seq1, seq2, mseq; for (int i = 0; i < 100; ++i) { //Left Error seq1 = introduceErrors(seqLeft, mut); mseq = concatenate(seq1, spacer, seqRight); lErr.add(mseq); map.put(mseq, mseq); //Right Error seq1 = introduceErrors(seqRight, mut); mseq = concatenate(seqLeft, spacer, seq1); rErr.add(mseq); map.put(mseq, mseq); //LR Error seq1 = introduceErrors(seqLeft, mut); seq2 = introduceErrors(seqRight, mut); mseq = concatenate(seq1, spacer, seq2); lrErr.add(mseq); map.put(mseq, mseq); } SequenceTreeMap.Node<Sequence> n; //Left run NeighborhoodIterator neighborhoodIterator = map.getNeighborhoodIterator(goodSequence, 1.3, new double[] { 0.1, 0.1, 0.1 }, mut, new MutationGuide() { @Override public boolean allowMutation(Sequence ref, int position, byte type, byte code) { return position < seqLeft.size() + 100; } }); HashSet<Sequence> acc = new HashSet<>(lErr); while ((n = neighborhoodIterator.nextNode()) != null) { assertTrue(lErr.contains(n.object)); assertFalse(rErr.contains(n.object)); assertFalse(lrErr.contains(n.object)); acc.remove(n.object); } assertTrue(acc.isEmpty()); //Right run neighborhoodIterator = map.getNeighborhoodIterator(goodSequence, 1.3, new double[] { 0.1, 0.1, 0.1 }, mut, new MutationGuide() { @Override public boolean allowMutation(Sequence ref, int position, byte type, byte code) { return position > seqLeft.size() + 100; } }); acc = new HashSet<>(rErr); while ((n = neighborhoodIterator.nextNode()) != null) { assertTrue(rErr.contains(n.object)); assertFalse(lErr.contains(n.object)); assertFalse(lrErr.contains(n.object)); acc.remove(n.object); } assertTrue(acc.isEmpty()); } } }
From source file:com.ikanow.infinit.e.api.social.sharing.ShareHandler.java
private String getReferenceString(SharePojo share) { // FILE:// www . ja va 2s . c om if (null == share.getDocumentLocation().get_id()) { // local file based reference FileInputStream fin = null; Scanner s = null; try { File f = new File(share.getDocumentLocation().getCollection()); fin = new FileInputStream(f); s = new Scanner(fin, "UTF-8"); return (s.useDelimiter("\n").next()); } catch (Exception e) { return null; } finally { try { if (null != fin) fin.close(); if (null != s) s.close(); } catch (Exception e) { } // (probably just never opened) } } // DB: // Carry on, this is a database object HashSet<String> shareIdStrs = new HashSet<String>(); for (ShareCommunityPojo commIds : share.getCommunities()) { shareIdStrs.add(commIds.get_id().toString()); } String retVal = null; BasicDBObject query = new BasicDBObject(DocumentPojo._id_, share.getDocumentLocation().get_id()); // (same for all artifacts) String dbName = share.getDocumentLocation().getDatabase(); String collectionName = share.getDocumentLocation().getCollection(); BasicDBObject returnVal = (BasicDBObject) MongoDbManager.getCollection(dbName, collectionName) .findOne(query); try { BasicDBList communities = null; boolean bCustomJob = dbName.equals("custommr"); // (a bit different) boolean bFoundOverlap = false; if (!bCustomJob) { ObjectId communityId = (ObjectId) returnVal.get(DocumentPojo.communityId_); // (same for other artifacts) bFoundOverlap = shareIdStrs.contains(communityId.toString()); } else { communities = (BasicDBList) returnVal.get("communityIds"); // (shared across multiple json types) for (Object commIdObj : communities) { ObjectId commId = (ObjectId) commIdObj; if (shareIdStrs.contains(commId.toString())) { bFoundOverlap = true; break; } } } if (!bFoundOverlap) { throw new RuntimeException(""); // (turned into the common message below) } if (!bCustomJob) { // everything but custom jobs Date modifiedTime = returnVal.getDate(DocumentPojo.modified_); // (same for other artifacts) if (null != modifiedTime) { share.setModified(modifiedTime); } retVal = returnVal.toString(); } else { // custom jobs String database = returnVal.getString(CustomMapReduceJobPojo.outputDatabase_); if (null == database) { database = dbName; } Date modifiedTime = returnVal.getDate(CustomMapReduceJobPojo.lastCompletionTime_); if (null != modifiedTime) { share.setModified(modifiedTime); } String collection = returnVal.getString(CustomMapReduceJobPojo.outputCollection_); BasicDBObject returnVal2 = (BasicDBObject) MongoDbManager.getCollection(database, collection) .findOne(); retVal = returnVal2.toString(); } } catch (Exception e) { throw new RuntimeException("Document not found or permission issue (no overlapping communities)"); } return retVal; }
From source file:diffhunter.Indexer.java
private HashMap<String, HashMap<String, IntervalTree<String>>> get_gene_coords_updated( Map<String, Gene> dic_genes, String file_name, Map<String, List<String>> dic_gene_transcripts, Map<String, AbstractMap.SimpleEntry<Integer, Integer>> dic_transcripts_locs, Map<String, Object> lock_dic_genes) throws FileNotFoundException, IOException { /* Dictionary<string, Dictionary<string, IntervalTree<string, int>>> dic_Loc_gene = new Dictionary<string, Dictionary<string, IntervalTree<string, int>>>(); dic_Loc_gene["-"] = new Dictionary<string, IntervalTree<string, int>>(); dic_Loc_gene["+"] = new Dictionary<string, IntervalTree<string, int>>();*/ HashMap<String, HashMap<String, IntervalTree<String>>> dic_Loc_gene = new HashMap<>(); dic_Loc_gene.put("+", new HashMap<>()); dic_Loc_gene.put("-", new HashMap<>()); /* StreamReader reader = new StreamReader(file_name); reader.ReadLine();/*w ww . j av a2 s . com*/ HashSet<string> processed_genes = new HashSet<string>();*/ BufferedReader br = new BufferedReader(new FileReader(file_name)); String line = br.readLine(); HashSet<String> processed_Genes = new HashSet<>(); while (((line = br.readLine()) != null)) { List<String> toks = Arrays.asList(line.split("\t")); /* Gene temp = new Gene(); //temp.chromosome = toks[1];//.Replace("_random", ""); temp.chromosome = toks[8]; //temp.symbol = toks[0]; //TEMP CHANGE WARNIING WARNING WARNING WARNING WARNING WARNIING WARNING WARNING WARNING WARNING WARNIING WARNING WARNING WARNING WARNING WARNIING WARNING WARNING WARNING WARNING WARNIING WARNING WARNING WARNING WARNING //temp.symbol = toks[1] + "|" + toks[0]; temp.symbol = toks[0] + "|" + toks[1]; */ Gene temp = new Gene(); temp.chromosome = toks.get(8).replace("chr", ""); temp.symbol = toks.get(0) + "|" + toks.get(1); /* string gene_name = temp.symbol; string trasncript = toks[2]; if (!dic_gene_transcripts.ContainsKey(gene_name)) dic_gene_transcripts.Add(gene_name, new List<string>()); if (!dic_transcript_locs.ContainsKey(trasncript)) dic_transcript_locs.Add(trasncript, new KeyValuePair<int, int>(int.Parse(toks[5]), int.Parse(toks[6]))); dic_gene_transcripts[gene_name].Add(trasncript);*/ String gene_name = temp.symbol; String trasncript = toks.get(2); if (!dic_gene_transcripts.containsKey(gene_name)) { dic_gene_transcripts.put(gene_name, new ArrayList<>()); } if (!dic_transcripts_locs.containsKey(trasncript)) { dic_transcripts_locs.put(trasncript, new AbstractMap.SimpleEntry<>(Integer.parseInt(toks.get(5)), Integer.parseInt(toks.get(6)))); } dic_gene_transcripts.get(gene_name).add(trasncript); /* if (!processed_genes.Contains(temp.symbol)) processed_genes.Add(temp.symbol); else continue; temp.strand = toks[7] == "1" ? "+" : "-"; //The additions and deductions are based on to map the reads in the start and end of genes. temp.start_loc = int.Parse(toks[3]);// -200; temp.end_loc = int.Parse(toks[4]);// +200;*/ if (!processed_Genes.contains(temp.symbol)) { processed_Genes.add(temp.symbol); } else { continue; } temp.strand = "1".equals(toks.get(7)) ? "+" : "-"; temp.start_loc = Integer.parseInt(toks.get(3)); temp.end_loc = Integer.parseInt(toks.get(4)); /* if (!dic_Loc_gene[temp.strand].ContainsKey(temp.chromosome)) dic_Loc_gene[temp.strand].Add(temp.chromosome, new IntervalTree<string, int>()); //for start dic_Loc_gene[temp.strand][temp.chromosome].AddInterval(temp.start_loc, temp.end_loc, temp.symbol); //for end //dic_Loc_gene[temp.strand][temp.chromosome][temp.start_loc][temp.end_loc] = temp.symbol; //temp.start_loc = temp.start_loc + 200; //temp.end_loc = temp.end_loc - 200; dic_genes[temp.symbol] = temp; lock_dic_genes[temp.symbol] = true;*/ if (!dic_Loc_gene.get(temp.strand).containsKey(temp.chromosome)) { dic_Loc_gene.get(temp.strand).put(temp.chromosome, new IntervalTree<>()); } dic_Loc_gene.get(temp.strand).get(temp.chromosome).addInterval(temp.start_loc, temp.end_loc, temp.symbol); dic_genes.put(temp.symbol, temp); lock_dic_genes.put(temp.symbol, true); } return dic_Loc_gene; }
From source file:com.vmware.bdd.plugin.ambari.service.AmbariImpl.java
private boolean isBlueprintCreatedByBDE(final AmClusterDef clusterDef) throws SoftwareManagementPluginException { /*//from ww w . j a va2s . c o m For cluster resume/resize, the blueprint is already exist, we need to check if this blueprint is created by BDE. So far, just check if all goup names and components exist in Ambari Cluster are included in given blueprint */ ApiBlueprint apiBlueprint = clusterDef.toApiBlueprint(); String clusterName = clusterDef.getName(); ApiBlueprint apiBlueprintFromAm = apiManager.getBlueprint(clusterName); Map<String, Set> groupNamesWithComponents = new HashMap<String, Set>(); for (ApiHostGroup hostGroup : apiBlueprint.getApiHostGroups()) { HashSet<String> components = new HashSet<String>(); groupNamesWithComponents.put(hostGroup.getName(), components); } for (ApiHostGroup apiHostGroup : apiBlueprintFromAm.getApiHostGroups()) { String groupName = apiHostGroup.getName(); if (!groupNamesWithComponents.containsKey(groupName)) { throw AmException.BLUEPRINT_ALREADY_EXIST(clusterName); } @SuppressWarnings("unchecked") Set<String> components = groupNamesWithComponents.get(groupName); if (components != null && !components.isEmpty()) { for (ApiComponentInfo apiComponent : apiHostGroup.getApiComponents()) { if (!components.contains(apiComponent.getName())) { throw AmException.BLUEPRINT_ALREADY_EXIST(clusterName); } } } } return true; }
From source file:org.agnitas.backend.Data.java
/** * Set standard columns, if they are not already found in database * @param use already used column names//ww w .ja v a2 s . c o m */ public void setUsedFieldsInLayout(HashSet<String> use, Hashtable<String, EMMTag> tags) { int sanity = 0; HashSet<String> predef; if (use != null) { predef = new HashSet<String>(use); } else { predef = new HashSet<String>(); } if (targets != null) { for (Enumeration<Target> e = targets.elements(); e.hasMoreElements();) { Target t = e.nextElement(); t.requestFields(this, predef); } } setStandardFields(predef, tags); for (int n = 0; n < lcount; ++n) { Column c = layout.elementAt(n); if (predef.contains(c.qname)) { if (!c.inuse) { c.inuse = true; ++lusecount; } ++sanity; } else { if (c.inuse) { c.inuse = false; --lusecount; } } } if (sanity != lusecount) logging(Log.ERROR, "layout", "Sanity check failed in setUsedFieldsInLayout"); }
From source file:de.burlov.amazon.s3.dirsync.DirSync.java
/** * Generiert Liste mit geanderten Dateien. Fehlende oder neue Dateien werden auch hinzugefuegt. * List wird unter berucksichtigung der 'exclude' und/oder 'include' Patterns erstellt * /*from w ww .ja v a2 s .co m*/ * @param baseDir * @param folder * @return * @throws IOException */ @SuppressWarnings("unchecked") private List<LocalFile> generateChangedFileList(File baseDir, Folder folder) throws Exception { HashSet<String> localFiles = new HashSet<String>(); List<LocalFile> ret = new LinkedList<LocalFile>(); for (File file : (Collection<File>) FileUtils.listFiles(baseDir, FileFilterUtils.trueFileFilter(), FileFilterUtils.trueFileFilter())) { if (!file.isFile()) { /* * Ordner ignorieren */ continue; } String filename = computeRelativeName(baseDir, file); if (!shouldIncludeFile(filename)) { /* * Laut exclude/include Regeln soll die Datei ignoriert werden */ continue; } localFiles.add(filename); FileInfo info = folder.getFileInfo(filename); if (info == null) { /* * Datei ist neu und wurde noch nicht hochgeladen, bei synchronisierung abhaengig * von der Richtung, 'up' oder 'down' wird sie entweder hochgeladen oder geloescht */ ret.add(new LocalFile(file, filename)); } else { if (isFileChanged(file, info)) { ret.add(new LocalFile(file, filename)); } } } /* * Jetzt evt entfernte Dateien hinzufuegen */ for (String filename : folder.getIndexData().keySet()) { if (!shouldIncludeFile(filename)) { /* * Laut exclude/include Regeln soll die Datei ignoriert werden */ continue; } if (!localFiles.contains(filename)) { /* * Datei wurde lokal geloescht */ ret.add(new LocalFile(new File(baseDir, filename), filename)); } } return ret; }