List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:edu.buffalo.cse.pigout.parser.PigOutMacro.java
void validate() throws IOException { if (rets.isEmpty()) { return;/* ww w . j a v a2 s . c o m*/ } HashSet<String> testSet = new HashSet<String>(); StreamTokenizer st = new StreamTokenizer(new StringReader(body)); st.wordChars('.', '.'); st.wordChars('0', '9'); st.wordChars('_', '_'); st.wordChars('$', '$'); st.lowerCaseMode(false); st.ordinaryChar('/'); st.slashStarComments(true); while (st.nextToken() != StreamTokenizer.TT_EOF) { if (matchWord(st, "define", false) && matchDollarAlias(st, true)) { testSet.add(st.sval.substring(1)); } else if (matchDollarAlias(st, false)) { String prevWord = st.sval; if (matchWord(st, "if", true) || matchWord(st, "otherwise", true)) { testSet.add(prevWord.substring(1)); } else if (matchChar(st, '=', true) && !matchChar(st, '=', true)) { testSet.add(prevWord.substring(1)); } else if (matchChar(st, ',', true)) { // possible mult-alias inlining of a macro ArrayList<String> mlist = new ArrayList<String>(); mlist.add(prevWord); if (isMultiValueReturn(st, mlist, true)) { for (String s : mlist) { testSet.add(s.substring(1)); } } } } else if (matchChar(st, '-', false) && matchChar(st, '-', true)) { skipSingleLineComment(st); } } for (String s : rets) { if (!testSet.contains(s)) { throw new IOException("Macro '" + name + "' missing return alias: " + s); } } }
From source file:gedi.riboseq.inference.orf.OrfFinder.java
/** * Coordinates are in codonsRegion space! * @param index//from ww w. j a va 2s . com * @param sequence * @param sg * @param codonsRegion * @return */ public ArrayList<OrfWithCodons> findOrfs(int index, String sequence, SpliceGraph sg, ImmutableReferenceGenomicRegion<IntervalTreeSet<Codon>> codonsRegion) { SimpleDirectedGraph<Codon> fg = new SimpleDirectedGraph<Codon>("Codongraph"); // if (!codonsRegion.getReference().toString().equals("chr4+") || !codonsRegion.getRegion().contains(140_283_087)) // return 0; LeftMostInFrameAndClearList buff = new LeftMostInFrameAndClearList(); IntervalTreeSet<Codon> codons = codonsRegion.getData(); codons.removeIf(c -> c.getTotalActivity() < minCodonActivity); if (codons.size() == 0) return new ArrayList<OrfWithCodons>(); // add stop codons for easy orf inference HashSet<Codon> stopCodons = new HashSet<Codon>(); Trie<String> stop = new Trie<String>(); stop.put("TAG", "TAG"); stop.put("TGA", "TGA"); stop.put("TAA", "TAA"); stop.iterateAhoCorasick(sequence) .map(r -> new Codon(new ArrayGenomicRegion(r.getStart(), r.getEnd()), r.getValue())) .toCollection(stopCodons); for (Intron intr : sg.iterateIntrons().loop()) { ArrayGenomicRegion reg = new ArrayGenomicRegion(intr.getStart() - 2, intr.getStart(), intr.getEnd(), intr.getEnd() + 1); String cod = stop.get(SequenceUtils.extractSequence(reg, sequence)); if (cod != null) stopCodons.add(new Codon(reg, cod)); reg = new ArrayGenomicRegion(intr.getStart() - 1, intr.getStart(), intr.getEnd(), intr.getEnd() + 2); cod = stop.get(SequenceUtils.extractSequence(reg, sequence)); if (cod != null) stopCodons.add(new Codon(reg, cod)); } stopCodons.removeAll(codons); codons.addAll(stopCodons); ArrayList<OrfWithCodons> re = new ArrayList<OrfWithCodons>(); HashSet<Codon> usedForAnno = new HashSet<Codon>(); if (assembleAnnotationFirst) { // new: first use annotated transcripts in a greedy fashion ArrayList<ImmutableReferenceGenomicRegion<Transcript>> transcripts = annotation.ei(codonsRegion) .filter(t -> t.getData().isCoding()).map(t -> codonsRegion.induce(t, "T")).list(); int acount = 0; LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>(); GenomicRegion best; HashSet<Codon> aremoved = new HashSet<Codon>(); do { best = null; double bestSum = 0; for (ImmutableReferenceGenomicRegion<Transcript> tr : transcripts) { double[] a = new double[tr.getRegion().getTotalLength()]; for (Codon c : codons) { if (tr.getRegion().containsUnspliced(c)) { int p = tr.induce(c.getStart()); assert a[p] == 0; if (!aremoved.contains(c)) a[p] = c.totalActivity; if (c.isStop()) a[p] = -1; } } for (int f = 0; f < 3; f++) { int s = -1; double sum = 0; for (int p = f; p < a.length; p += 3) { if (a[p] == -1) {//stop if (sum > bestSum) { bestSum = sum; best = tr.getRegion().map(new ArrayGenomicRegion(s, p + 3)); } s = -1; sum = 0; } else sum += a[p]; if (a[p] > 0 && s == -1) s = p; } } } if (best != null) { ArrayList<Codon> cods = new ArrayList<>(); int uniqueCodons = 0; double uniqueActivity = 0; double totalActivity = 0; for (Codon c : codons) { if (best.containsUnspliced(c) && best.induce(c.getStart()) % 3 == 0) { if (aremoved.add(c)) { uniqueActivity += c.totalActivity; uniqueCodons++; } totalActivity += c.totalActivity; if (c.totalActivity > 0) cods.add(c); } } // System.out.println(codonsRegion.map(best)); if ((uniqueCodons >= minUniqueCodons || uniqueCodons == cods.size()) && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) { Collections.sort(cods); usedForAnno.addAll(cods); OrfWithCodons orf = new OrfWithCodons(index, 0, acount++, best.toArrayGenomicRegion(), cods, true); orfs.add(orf); } } } while (best != null); if (orfs.size() > 1) { // they are not necessarily connected! LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs); orfs.clear(); for (LinkedList<OrfWithCodons> corfs : connected) { for (boolean changed = true; changed && corfs.size() > 1;) { changed = false; if (useEM) inferOverlappingOrfActivitiesEM(corfs); else overlapUniqueCoverage(corfs); Iterator<OrfWithCodons> it = corfs.iterator(); while (it.hasNext()) { OrfWithCodons orf = it.next(); if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) { it.remove(); changed = true; } } } if (corfs.size() > 1) distributeCodons(corfs); orfs.addAll(corfs); } } re.addAll(orfs); } // as edges only are represented in the splice graph, singleton codons are discarded (which does make sense anyway) for (Codon c : codons) { if (!c.isStop()) { // find unspliced successors (can be more than one, when the successor codon itself is spliced! all of them have the same start!) int max = c.getEnd() + maxAminoDist * 3; for (Codon n : codons .getIntervalsIntersecting(c.getEnd(), c.getEnd() + maxAminoDist * 3, buff.startAndClear(c)) .get()) { if (!containsInframeStop(sequence.substring(c.getEnd(), n.getStart()))) fg.addInteraction(c, n); max = n.getStart() + 2; } // find all spliced successors for each splice junction that comes before n or maxAminoDist sg.forEachIntronStartingBetween(c.getEnd(), max + 1, intron -> { for (Codon n : codons.getIntervalsIntersecting(intron.getEnd(), intron.getEnd() + maxAminoDist * 3 - (intron.getStart() - c.getEnd()), buff.startAndClear(c, intron)).get()) if (!containsInframeStop(SequenceUtils.extractSequence(new ArrayGenomicRegion(c.getStart(), intron.getStart(), intron.getEnd(), n.getStart()), sequence))) fg.addInteraction(c, n, intron); }); } } int cc = 1; for (SimpleDirectedGraph<Codon> g : fg.getWeaklyConnectedComponents()) { if (EI.wrap(g.getSources()).mapToDouble(c -> c.getTotalActivity()).sum() == 0) continue; // iterate longest paths in g LinkedList<Codon> topo = g.getTopologicalOrder(); HashSet<Codon> remInTopo = new HashSet<Codon>(topo); remInTopo.removeIf(c -> !stopCodons.contains(c) && !usedForAnno.contains(c)); HashSet<Codon> removed = new HashSet<Codon>(remInTopo); // double maxPathScore = 0; LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>(); int count = 0; while (removed.size() < topo.size()) { HashMap<Codon, MutablePair<GenomicRegion, Double>> longestPrefixes = new HashMap<Codon, MutablePair<GenomicRegion, Double>>(); for (Codon c : topo) longestPrefixes.put(c, new MutablePair<GenomicRegion, Double>(c, removed.contains(c) ? 0 : (c.getTotalActivity()))); Codon longestEnd = null; HashMap<Codon, Codon> backtracking = new HashMap<Codon, Codon>(); for (Codon c : topo) { // if (codonsRegion.map(c).getStart()==100_466_118) // System.out.println(c); // // if (codonsRegion.map(c).getStart()==100_465_842) // System.out.println(c); double len = longestPrefixes.get(c).Item2; for (AdjacencyNode<Codon> n = g.getTargets(c); n != null; n = n.next) { MutablePair<GenomicRegion, Double> pref = longestPrefixes.get(n.node); double nnact = removed.contains(n.node) ? 0 : (n.node.getTotalActivity()); if (pref.Item2 <= len + nnact) { pref.set(extendFullPath(longestPrefixes.get(c).Item1, c, n.node, n.getLabel()), len + nnact); backtracking.put(n.node, c); } } if (longestEnd == null || longestPrefixes.get(longestEnd).Item2 <= len) longestEnd = c; } // determine longest path by backtracking and mark all codons on the path as removed ArrayList<Codon> orfCodons = new ArrayList<Codon>(); double totalActivity = 0; double uniqueActivity = 0; int uniqueCodons = 0; for (Codon c = longestEnd; c != null; c = backtracking.get(c)) { if (removed.add(c) && c.getTotalActivity() > 0) { uniqueCodons++; uniqueActivity += c.getTotalActivity(); } if (c.getTotalActivity() > 0) // to remove dummy stop codons orfCodons.add(c); totalActivity += c.getTotalActivity(); } // System.out.println(codonsRegion.map(longestPrefixes.get(longestEnd).Item1)); if ((uniqueCodons >= minUniqueCodons || uniqueCodons == orfCodons.size()) && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) { Collections.reverse(orfCodons); MutablePair<GenomicRegion, Double> triple = longestPrefixes.get(longestEnd); ArrayGenomicRegion region = triple.Item1.toArrayGenomicRegion(); String lastCodon = SequenceUtils.extractSequence( region.map( new ArrayGenomicRegion(region.getTotalLength() - 3, region.getTotalLength())), sequence); OrfWithCodons orf = new OrfWithCodons(index, cc, count++, region, orfCodons, stop.containsKey(lastCodon)); orfs.add(orf); } // maxPathScore = Math.max(maxPathScore,totalActivity); } if (orfs.size() > 1) { // they are not necessarily connected! LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs); orfs.clear(); for (LinkedList<OrfWithCodons> corfs : connected) { for (boolean changed = true; changed && corfs.size() > 1;) { changed = false; if (useEM) inferOverlappingOrfActivitiesEM(corfs); else overlapUniqueCoverage(corfs); Iterator<OrfWithCodons> it = corfs.iterator(); while (it.hasNext()) { OrfWithCodons orf = it.next(); if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) { it.remove(); changed = true; } } } if (corfs.size() > 1) distributeCodons(corfs); orfs.addAll(corfs); } } re.addAll(orfs); cc++; } return re; }
From source file:eu.europa.ec.fisheries.uvms.rules.service.business.AbstractFact.java
public boolean salesPartiesValueDoesNotContainAny(List<SalesPartyFact> salesPartyTypes, String... valuesToMatch) { List<CodeType> codeTypes = new ArrayList<>(); HashSet<String> valuesToBeFound = new HashSet<>(Arrays.asList(valuesToMatch)); for (SalesPartyFact salesPartyFact : salesPartyTypes) { codeTypes.addAll(salesPartyFact.getRoleCodes()); }/*from ww w . j a va 2 s.c o m*/ if (valuesToMatch == null || valuesToMatch.length == 0 || CollectionUtils.isEmpty(codeTypes)) { return true; } for (CodeType codeType : codeTypes) { String value = codeType.getValue(); if (valuesToBeFound.contains(value)) { return false; } } return true; }
From source file:com.tremolosecurity.unison.openstack.KeystoneProvisioningTarget.java
@Override public void syncUser(User user, boolean addOnly, Set<String> attributes, Map<String, Object> request) throws ProvisioningException { int approvalID = 0; if (request.containsKey("APPROVAL_ID")) { approvalID = (Integer) request.get("APPROVAL_ID"); }/* ww w . j a v a 2 s .c om*/ Workflow workflow = (Workflow) request.get("WORKFLOW"); HttpCon con = null; Gson gson = new Gson(); try { con = this.createClient(); KSToken token = this.getToken(con); UserAndID fromKS = this.lookupUser(user.getUserID(), attributes, request, token, con); if (fromKS == null) { this.createUser(user, attributes, request); } else { //check attributes HashMap<String, String> attrsUpdate = new HashMap<String, String>(); KSUser toPatch = new KSUser(); if (!rolesOnly) { if (attributes.contains("email")) { String fromKSVal = null; String newVal = null; if (fromKS.getUser().getAttribs().get("email") != null) { fromKSVal = fromKS.getUser().getAttribs().get("email").getValues().get(0); } if (user.getAttribs().get("email") != null) { newVal = user.getAttribs().get("email").getValues().get(0); } if (newVal != null && (fromKSVal == null || !fromKSVal.equalsIgnoreCase(newVal))) { toPatch.setEmail(newVal); attrsUpdate.put("email", newVal); } else if (!addOnly && newVal == null && fromKSVal != null) { toPatch.setEmail(""); attrsUpdate.put("email", ""); } } if (attributes.contains("enabled")) { String fromKSVal = null; String newVal = null; if (fromKS.getUser().getAttribs().get("enabled") != null) { fromKSVal = fromKS.getUser().getAttribs().get("enabled").getValues().get(0); } if (user.getAttribs().get("enabled") != null) { newVal = user.getAttribs().get("enabled").getValues().get(0); } if (newVal != null && (fromKSVal == null || !fromKSVal.equalsIgnoreCase(newVal))) { toPatch.setName(newVal); attrsUpdate.put("enabled", newVal); } else if (!addOnly && newVal == null && fromKSVal != null) { toPatch.setEnabled(false); attrsUpdate.put("enabled", ""); } } if (attributes.contains("description")) { String fromKSVal = null; String newVal = null; if (fromKS.getUser().getAttribs().get("description") != null) { fromKSVal = fromKS.getUser().getAttribs().get("description").getValues().get(0); } if (user.getAttribs().get("description") != null) { newVal = user.getAttribs().get("description").getValues().get(0); } if (newVal != null && (fromKSVal == null || !fromKSVal.equalsIgnoreCase(newVal))) { toPatch.setDescription(newVal); attrsUpdate.put("description", newVal); } else if (!addOnly && newVal == null && fromKSVal != null) { toPatch.setDescription(""); attrsUpdate.put("description", ""); } } if (!attrsUpdate.isEmpty()) { UserHolder holder = new UserHolder(); holder.setUser(toPatch); String json = gson.toJson(holder); StringBuffer b = new StringBuffer(); b.append(this.url).append("/users/").append(fromKS.getId()); json = this.callWSPotch(token.getAuthToken(), con, b.toString(), json); for (String attr : attrsUpdate.keySet()) { String val = attrsUpdate.get(attr); this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Replace, approvalID, workflow, attr, val); } } for (String group : user.getGroups()) { if (!fromKS.getUser().getGroups().contains(group)) { String groupID = this.getGroupID(token.getAuthToken(), con, group); StringBuffer b = new StringBuffer(); b.append(this.url).append("/groups/").append(groupID).append("/users/") .append(fromKS.getId()); if (this.callWSPutNoData(token.getAuthToken(), con, b.toString())) { this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Add, approvalID, workflow, "group", group); } else { throw new ProvisioningException("Could not add group " + group); } } } if (!addOnly) { for (String group : fromKS.getUser().getGroups()) { if (!user.getGroups().contains(group)) { String groupID = this.getGroupID(token.getAuthToken(), con, group); StringBuffer b = new StringBuffer(); b.append(this.url).append("/groups/").append(groupID).append("/users/") .append(fromKS.getId()); this.callWSDelete(token.getAuthToken(), con, b.toString()); this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Delete, approvalID, workflow, "group", group); } } } } if (attributes.contains("roles")) { HashSet<Role> currentRoles = new HashSet<Role>(); if (fromKS.getUser().getAttribs().get("roles") != null) { Attribute attr = fromKS.getUser().getAttribs().get("roles"); for (String jsonRole : attr.getValues()) { currentRoles.add(gson.fromJson(jsonRole, Role.class)); } } if (user.getAttribs().containsKey("roles")) { StringBuffer b = new StringBuffer(); Attribute attr = user.getAttribs().get("roles"); for (String jsonRole : attr.getValues()) { Role role = gson.fromJson(jsonRole, Role.class); if (!currentRoles.contains(role)) { if (role.getScope().equalsIgnoreCase("project")) { String projectid = this.getProjectID(token.getAuthToken(), con, role.getProject()); if (projectid == null) { throw new ProvisioningException( "Project " + role.getDomain() + " does not exist"); } String roleid = this.getRoleID(token.getAuthToken(), con, role.getName()); if (roleid == null) { throw new ProvisioningException( "Role " + role.getName() + " does not exist"); } b.setLength(0); b.append(this.url).append("/projects/").append(projectid).append("/users/") .append(fromKS.getId()).append("/roles/").append(roleid); if (this.callWSPutNoData(token.getAuthToken(), con, b.toString())) { this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Add, approvalID, workflow, "role", jsonRole); } else { throw new ProvisioningException("Could not add role " + jsonRole); } } else { String domainid = this.getDomainID(token.getAuthToken(), con, role.getDomain()); if (domainid == null) { throw new ProvisioningException( "Domain " + role.getDomain() + " does not exist"); } String roleid = this.getRoleID(token.getAuthToken(), con, role.getName()); if (roleid == null) { throw new ProvisioningException( "Role " + role.getName() + " does not exist"); } b.setLength(0); b.append(this.url).append("/domains/").append(domainid).append("/users/") .append(fromKS.getId()).append("/roles/").append(roleid); if (this.callWSPutNoData(token.getAuthToken(), con, b.toString())) { this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Add, approvalID, workflow, "role", jsonRole); } else { throw new ProvisioningException("Could not add role " + jsonRole); } } } } } } if (!addOnly) { if (attributes.contains("roles")) { HashSet<Role> currentRoles = new HashSet<Role>(); if (user.getAttribs().get("roles") != null) { Attribute attr = user.getAttribs().get("roles"); for (String jsonRole : attr.getValues()) { currentRoles.add(gson.fromJson(jsonRole, Role.class)); } } if (fromKS.getUser().getAttribs().containsKey("roles")) { StringBuffer b = new StringBuffer(); Attribute attr = fromKS.getUser().getAttribs().get("roles"); for (String jsonRole : attr.getValues()) { Role role = gson.fromJson(jsonRole, Role.class); if (!currentRoles.contains(role)) { if (role.getScope().equalsIgnoreCase("project")) { String projectid = this.getProjectID(token.getAuthToken(), con, role.getProject()); if (projectid == null) { throw new ProvisioningException( "Project " + role.getDomain() + " does not exist"); } String roleid = this.getRoleID(token.getAuthToken(), con, role.getName()); if (roleid == null) { throw new ProvisioningException( "Role " + role.getName() + " does not exist"); } b.setLength(0); b.append(this.url).append("/projects/").append(projectid).append("/users/") .append(fromKS.getId()).append("/roles/").append(roleid); this.callWSDelete(token.getAuthToken(), con, b.toString()); this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Delete, approvalID, workflow, "role", jsonRole); } else { String domainid = this.getDomainID(token.getAuthToken(), con, role.getDomain()); if (domainid == null) { throw new ProvisioningException( "Domain " + role.getDomain() + " does not exist"); } String roleid = this.getRoleID(token.getAuthToken(), con, role.getName()); if (roleid == null) { throw new ProvisioningException( "Role " + role.getName() + " does not exist"); } b.setLength(0); b.append(this.url).append("/domains/").append(domainid).append("/users/") .append(fromKS.getId()).append("/roles/").append(roleid); this.callWSDelete(token.getAuthToken(), con, b.toString()); this.cfgMgr.getProvisioningEngine().logAction(user.getUserID(), false, ActionType.Delete, approvalID, workflow, "role", jsonRole); } } } } } } } } catch (Exception e) { throw new ProvisioningException("Could not work with keystone", e); } finally { if (con != null) { con.getBcm().shutdown(); } } }
From source file:com.termmed.statistics.Processor.java
private void printInternPriorityList(IReportDetail file, HashSet<Long> conceptList, File outputFold) throws IOException { File exclFile = new File(I_Constants.EXCLUYENT_OUTPUT_FOLDER + "/" + file.getFile() + (file.getFile().toLowerCase().endsWith(".csv") ? "" : ".csv")); File completeDetailFile = new File(I_Constants.STATS_OUTPUT_FOLDER + "/" + file.getFile() + (file.getFile().toLowerCase().endsWith(".csv") ? "" : ".csv")); TreeSet<Long> order = getOrder(file, completeDetailFile); BufferedWriter bw = FileHelper.getWriter(exclFile); Integer sctIdIndex = file.getSctIdIndex(); if (sctIdIndex == null) { sctIdIndex = 1;/*from w w w . ja v a 2 s . c om*/ } Integer priorityIndex = file.getPriorityListColumnIndex(); if (priorityIndex == null) { priorityIndex = 5; } boolean first = true; String line; String[] spl; for (Long ord : order) { BufferedReader br = FileHelper.getReader(completeDetailFile); if (first) { bw.append(br.readLine()); bw.append("\r\n"); first = false; } else { br.readLine(); } while ((line = br.readLine()) != null) { spl = line.split(",", -1); Long prior = Long.parseLong(spl[priorityIndex]); if (!prior.equals(ord)) { continue; } Long cid = Long.parseLong(spl[sctIdIndex]); if (conceptList.contains(cid)) { continue; } bw.append(line); bw.append("\r\n"); conceptList.add(cid); } br.close(); } bw.close(); }
From source file:com.vgi.mafscaling.LogView.java
private void view3dPlots() { if (xAxisColumn.getSelectedItem() == null || xAxisColumn.getSelectedItem().toString().isEmpty() || yAxisColumn.getSelectedItem() == null || yAxisColumn.getSelectedItem().toString().isEmpty() || plotsColumn.getSelectedItems() == null) return;//from w ww . ja v a 2s . c om plot3d.removeAllPlots(); String val; String xAxisColName = (String) xAxisColumn.getSelectedItem(); String yAxisColName = (String) yAxisColumn.getSelectedItem(); List<String> dataColNames = plotsColumn.getSelectedItems(); if (dataColNames.size() > 5) { JOptionPane.showMessageDialog(null, "Sorry, only 5 plots are supported. More plots will make the graph too slow.", "Too many parameters", JOptionPane.ERROR_MESSAGE); return; } int xColIdx = logDataTable.getColumnByHeaderName(xAxisColName).getModelIndex() - 1; xColIdx = logDataTable.getCurrentIndexForOriginalColumn(xColIdx); int yColIdx = logDataTable.getColumnByHeaderName(yAxisColName).getModelIndex() - 1; yColIdx = logDataTable.getCurrentIndexForOriginalColumn(yColIdx); ArrayList<Color> colorsArray = new ArrayList<Color>(); colorsArray.add(Color.BLUE); colorsArray.add(Color.RED); colorsArray.add(Color.GREEN); colorsArray.add(Color.ORANGE); colorsArray.add(Color.GRAY); double x, y, z; XYZ xyz; for (int j = 0; j < dataColNames.size(); ++j) { HashSet<XYZ> uniqueXYZ = new HashSet<XYZ>(); int zColIdx = logDataTable.getColumnByHeaderName(dataColNames.get(j)).getModelIndex() - 1; zColIdx = logDataTable.getCurrentIndexForOriginalColumn(zColIdx); int count = 0; double[][] xyzArrayTemp = new double[logDataTable.getRowCount()][3]; for (int i = 0; i < logDataTable.getRowCount(); ++i) { val = (String) logDataTable.getValueAt(i, xColIdx); x = Double.valueOf(val); val = (String) logDataTable.getValueAt(i, yColIdx); y = Double.valueOf(val); val = (String) logDataTable.getValueAt(i, zColIdx); z = Double.valueOf(val); xyz = new XYZ(x, y, z); if (uniqueXYZ.contains(xyz)) continue; uniqueXYZ.add(xyz); xyzArrayTemp[count][0] = x; xyzArrayTemp[count][1] = y; xyzArrayTemp[count][2] = z; count += 1; } double[][] xyzArray = new double[uniqueXYZ.size()][3]; for (int k = 0; k < xyzArray.length; ++k) System.arraycopy(xyzArrayTemp[k], 0, xyzArray[k], 0, 3); plot3d.addScatterPlot(dataColNames.get(j), colorsArray.get(j), xyzArray); } plot3d.setAxisLabel(0, xAxisColumn.getSelectedItem().toString()); plot3d.setAxisLabel(1, yAxisColumn.getSelectedItem().toString()); plot3d.setAxisLabel(2, plotsColumn.getSelectedItemsString()); }
From source file:com.stimulus.archiva.extraction.MessageExtraction.java
private String prepareHTMLMessage(String baseURL, Hashtable<String, String> inl, Hashtable<String, String> imgs, Hashtable<String, String> nonImgs, Hashtable<String, String> ready, ArrayList<String> mimeTypes) { String str = (String) inl.get("text/html"); boolean alternative = false; for (int i = 0; i < mimeTypes.size(); i++) { if (((String) mimeTypes.get(i)).toLowerCase(Locale.ENGLISH).indexOf("multipart/alternative") > -1) { alternative = true;//from w w w. j a v a 2s. c om break; } } if (!alternative && inl.containsKey("text/plain")) { String plain = activateURLs((String) inl.get("text/plain")).replaceAll("\r", "").replaceAll("\n", "<br>" + System.getProperty("line.separator")) + "<br><br>" + System.getProperty("line.separator") + "<hr><br>"; int bestStart = 0; int next = str.toLowerCase(Locale.ENGLISH).indexOf("<body"); if (next > 0) next = str.indexOf(">", next) + 1; if (next > 0 && next < str.length()) bestStart = next; if (bestStart > 0) str = str.substring(0, bestStart) + plain + str.substring(bestStart); else str = plain + str; } HashSet<String> alreadyUsed = new HashSet<String>(); Enumeration enuma = imgs.keys(); while (enuma.hasMoreElements()) { String repl = (String) enuma.nextElement(); String cidTag = (String) imgs.get(repl); if (cidTag.startsWith("<") && cidTag.endsWith(">")) { cidTag = cidTag.substring(1, cidTag.length() - 1); } if (str.indexOf("cid:" + cidTag) > -1) { alreadyUsed.add(repl); } String st = (String) ready.get(repl); str = Pattern.compile("cid:" + cidTag, Pattern.CASE_INSENSITIVE).matcher(str) .replaceAll(ready.get(repl)); } enuma = nonImgs.keys(); while (enuma.hasMoreElements()) { String repl = (String) enuma.nextElement(); String cidTag = (String) nonImgs.get(repl); if (cidTag.startsWith("<") && cidTag.endsWith(">")) cidTag = cidTag.substring(1, cidTag.length() - 1); if (str.indexOf("cid:" + cidTag) > -1) alreadyUsed.add(repl); String st = (String) ready.get(repl); str = Pattern.compile("cid:" + cidTag, Pattern.CASE_INSENSITIVE).matcher(str) .replaceAll(ready.get(repl)); } StringBuffer buff = new StringBuffer(); enuma = imgs.keys(); while (enuma.hasMoreElements()) { String fl = (String) enuma.nextElement(); if (!alreadyUsed.contains(fl)) { fl = (String) ready.get(fl); if (fl.endsWith(".tif") || fl.endsWith(".tiff")) { buff.append(System.getProperty("line.separator") + "<BR><BR><EMBED SRC=\"" + baseURL.replaceAll("\\\\", "/") + "/temp/" + fl + "\" TYPE=\"image/tiff\">"); } else { buff.append(System.getProperty("line.separator") + "<BR><BR><IMG SRC=\"" + baseURL.replaceAll("\\\\", "/") + "/temp/" + fl + "\">"); } } } String output = ""; int bestStart = 0; int next = str.toLowerCase(Locale.ENGLISH).indexOf("</body>"); if (next > 0 && next < str.length()) bestStart = next; if (bestStart > 0) output = str.substring(0, bestStart) + buff.toString() + str.substring(bestStart); else output = str + buff.toString(); if (output.indexOf("charset=") < 0) { next = output.toLowerCase(Locale.ENGLISH).indexOf("</head>"); if (next > 0) output = output.substring(0, next) + "<META http-equiv=Content-Type content=\"text/html; charset=" + serverEncoding + "\">" + output.substring(next); } else output = output.replaceFirst("charset=.*\"", "charset=" + serverEncoding + "\""); output = output.replaceAll("FONT SIZE=\\d", "FONT"); output = output.replaceAll("font size=\\d", "font"); return writeTempMessage(output, ".html"); }
From source file:com.panet.imeta.job.entries.zipfile.JobEntryZipFile.java
public boolean processRowFile(Job parentJob, Result result, String realZipfilename, String realWildcard, String realWildcardExclude, String realTargetdirectory, String realMovetodirectory, boolean createparentfolder) { LogWriter log = LogWriter.getInstance(); boolean Fileexists = false; File tempFile = null;//from w w w .ja va 2 s .co m File fileZip = null; boolean resultat = false; boolean renameOk = false; boolean orginexist = false; // Check if target file/folder exists! FileObject OriginFile = null; ZipInputStream zin = null; byte[] buffer = null; FileOutputStream dest = null; BufferedOutputStream buff = null; org.apache.tools.zip.ZipOutputStream out = null; org.apache.tools.zip.ZipEntry entry = null; try { OriginFile = KettleVFS.getFileObject(realTargetdirectory); orginexist = OriginFile.exists(); } catch (Exception e) { } finally { if (OriginFile != null) { try { OriginFile.close(); } catch (IOException ex) { } ; } } if (realZipfilename != null && orginexist) { FileObject fileObject = null; try { fileObject = KettleVFS.getFileObject(realZipfilename); // Check if Zip File exists if (fileObject.exists()) { Fileexists = true; if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.Zip_FileExists1.Label") + realZipfilename + Messages.getString("JobZipFiles.Zip_FileExists2.Label")); } // Let's see if we need to create parent folder of destination // zip filename if (createparentfolder) { createParentFolder(realZipfilename); } // Let's start the process now if (ifzipfileexists == 3 && Fileexists) { // the zip file exists and user want to Fail resultat = false; } else if (ifzipfileexists == 2 && Fileexists) { // the zip file exists and user want to do nothing if (addfiletoresult) { // Add file to result files name ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realZipfilename), parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } resultat = true; } else if (afterzip == 2 && realMovetodirectory == null) { // After Zip, Move files..User must give a destination // Folder resultat = false; log.logError(toString(), Messages.getString("JobZipFiles.AfterZip_No_DestinationFolder_Defined.Label")); } else // After Zip, Move files..User must give a destination Folder { // Let's see if we deal with file or folder String[] filelist = null; File f = new File(realTargetdirectory); if (f.isDirectory()) { // Target is a directory // Get all the files in the directory... filelist = f.list(); } else { // Target is a file filelist = new String[1]; filelist[0] = f.getName(); } if (filelist.length == 0) { resultat = false; log.logError(toString(), Messages.getString("JobZipFiles.Log.FolderIsEmpty", realTargetdirectory)); } else if (!checkContainsFile(realTargetdirectory, filelist)) { resultat = false; log.logError(toString(), Messages.getString("JobZipFiles.Log.NoFilesInFolder", realTargetdirectory)); } else { if (ifzipfileexists == 0 && Fileexists) { // the zip file exists and user want to create new // one with unique name // Format Date // do we have already a .zip at the end? if (realZipfilename.toLowerCase().endsWith(".zip")) { // strip this off realZipfilename = realZipfilename.substring(0, realZipfilename.length() - 4); } realZipfilename = realZipfilename + "_" + StringUtil.getFormattedDateTimeNow(true) + ".zip"; if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.Zip_FileNameChange1.Label") + realZipfilename + Messages.getString("JobZipFiles.Zip_FileNameChange1.Label")); } else if (ifzipfileexists == 1 && Fileexists) { // the zip file exists and user want to append // get a temp file fileZip = new File(realZipfilename); tempFile = File.createTempFile(fileZip.getName(), null); // delete it, otherwise we cannot rename existing // zip to it. tempFile.delete(); renameOk = fileZip.renameTo(tempFile); if (!renameOk) { log.logError(toString(), Messages.getString("JobZipFiles.Cant_Rename_Temp1.Label") + fileZip.getAbsolutePath() + Messages.getString("JobZipFiles.Cant_Rename_Temp2.Label") + tempFile.getAbsolutePath() + Messages.getString("JobZipFiles.Cant_Rename_Temp3.Label")); } if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.Zip_FileAppend1.Label") + realZipfilename + Messages.getString("JobZipFiles.Zip_FileAppend2.Label")); } if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobZipFiles.Files_Found1.Label") + filelist.length + Messages.getString("JobZipFiles.Files_Found2.Label") + realTargetdirectory + Messages.getString("JobZipFiles.Files_Found3.Label")); Pattern pattern = null; Pattern patternexclude = null; // Let's prepare pattern..only if target is a folder ! if (f.isDirectory()) { if (!Const.isEmpty(realWildcard)) { pattern = Pattern.compile(realWildcard); } if (!Const.isEmpty(realWildcardExclude)) { patternexclude = Pattern.compile(realWildcardExclude); } } // Prepare Zip File buffer = new byte[18024]; dest = new FileOutputStream(realZipfilename); buff = new BufferedOutputStream(dest); out = new org.apache.tools.zip.ZipOutputStream(buff); HashSet<String> fileSet = new HashSet<String>(); if (renameOk) { // User want to append files to existing Zip file // The idea is to rename the existing zip file to a // temporary file // and then adds all entries in the existing zip // along with the new files, // excluding the zip entries that have the same name // as one of the new files. zin = new ZipInputStream(new FileInputStream(tempFile)); entry = (ZipEntry) zin.getNextEntry(); while (entry != null) { String name = entry.getName(); if (!fileSet.contains(name)) { // Add ZIP entry to output stream. out.putNextEntry(new ZipEntry(name)); // Transfer bytes from the ZIP file to the // output file int len; while ((len = zin.read(buffer)) > 0) { out.write(buffer, 0, len); } fileSet.add(name); } entry = (ZipEntry) zin.getNextEntry(); } // Close the streams zin.close(); } // Set the method out.setMethod(org.apache.tools.zip.ZipOutputStream.DEFLATED); // Set the compression level if (compressionrate == 0) { out.setLevel(Deflater.NO_COMPRESSION); } else if (compressionrate == 1) { out.setLevel(Deflater.DEFAULT_COMPRESSION); } if (compressionrate == 2) { out.setLevel(Deflater.BEST_COMPRESSION); } if (compressionrate == 3) { out.setLevel(Deflater.BEST_SPEED); } // Specify Zipped files (After that we will move,delete // them...) String[] ZippedFiles = new String[filelist.length]; int FileNum = 0; // Get the files in the list... for (int i = 0; i < filelist.length && !parentJob.isStopped(); i++) { boolean getIt = true; boolean getItexclude = false; // First see if the file matches the regular // expression! // ..only if target is a folder ! if (f.isDirectory()) { if (pattern != null) { Matcher matcher = pattern.matcher(filelist[i]); getIt = matcher.matches(); } if (patternexclude != null) { Matcher matcherexclude = patternexclude.matcher(filelist[i]); getItexclude = matcherexclude.matches(); } } // Get processing File String targetFilename = realTargetdirectory + Const.FILE_SEPARATOR + filelist[i]; if (f.isFile()) targetFilename = realTargetdirectory; File file = new File(targetFilename); if (getIt && !getItexclude && !file.isDirectory() && !fileSet.contains(filelist[i])) { // We can add the file to the Zip Archive if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.Add_FilesToZip1.Label") + filelist[i] + Messages.getString("JobZipFiles.Add_FilesToZip2.Label") + realTargetdirectory + Messages.getString("JobZipFiles.Add_FilesToZip3.Label")); // Associate a file input stream for the current // file FileInputStream in = new FileInputStream(targetFilename); // Add ZIP entry to output stream. out.putNextEntry(new ZipEntry(filelist[i])); int len; while ((len = in.read(buffer)) > 0) { out.write(buffer, 0, len); } out.flush(); out.closeEntry(); // Close the current file input stream in.close(); // Get Zipped File ZippedFiles[FileNum] = filelist[i]; FileNum = FileNum + 1; } } // Close the ZipOutPutStream out.close(); buff.close(); dest.close(); if (log.isBasic()) log.logBasic(toString(), Messages.getString("JobZipFiles.Log.TotalZippedFiles", "" + ZippedFiles.length)); // Delete Temp File if (tempFile != null) { tempFile.delete(); } // -----Get the list of Zipped Files and Move or Delete // Them if (afterzip == 1 || afterzip == 2) { // iterate through the array of Zipped files for (int i = 0; i < ZippedFiles.length; i++) { if (ZippedFiles[i] != null) { // Delete File FileObject fileObjectd = KettleVFS.getFileObject( realTargetdirectory + Const.FILE_SEPARATOR + ZippedFiles[i]); if (f.isFile()) fileObjectd = KettleVFS.getFileObject(realTargetdirectory); // Here gc() is explicitly called if e.g. // createfile is used in the same // job for the same file. The problem is // that after creating the file the // file object is not properly garbaged // collected and thus the file cannot // be deleted anymore. This is a known // problem in the JVM. System.gc(); // Here we can move, delete files if (afterzip == 1) { // Delete File boolean deleted = fileObjectd.delete(); if (!deleted) { resultat = false; log.logError(toString(), Messages .getString("JobZipFiles.Cant_Delete_File1.Label") + realTargetdirectory + Const.FILE_SEPARATOR + ZippedFiles[i] + Messages.getString("JobZipFiles.Cant_Delete_File2.Label")); } // File deleted if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.File_Deleted1.Label") + realTargetdirectory + Const.FILE_SEPARATOR + ZippedFiles[i] + Messages .getString("JobZipFiles.File_Deleted2.Label")); } else if (afterzip == 2) { // Move File try { FileObject fileObjectm = KettleVFS.getFileObject( realMovetodirectory + Const.FILE_SEPARATOR + ZippedFiles[i]); fileObjectd.moveTo(fileObjectm); } catch (IOException e) { log.logError(toString(), Messages.getString("JobZipFiles.Cant_Move_File1.Label") + ZippedFiles[i] + Messages .getString("JobZipFiles.Cant_Move_File2.Label") + e.getMessage()); resultat = false; } // File moved if (log.isDebug()) log.logDebug(toString(), Messages.getString("JobZipFiles.File_Moved1.Label") + ZippedFiles[i] + Messages.getString("JobZipFiles.File_Moved2.Label")); } } } } if (addfiletoresult) { // Add file to result files name ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realZipfilename), parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } resultat = true; } } } catch (Exception e) { log.logError(toString(), Messages.getString("JobZipFiles.Cant_CreateZipFile1.Label") + realZipfilename + Messages.getString("JobZipFiles.Cant_CreateZipFile2.Label") + e.getMessage()); // result.setResult( false ); // result.setNrErrors(1); resultat = false; } finally { if (fileObject != null) { try { fileObject.close(); } catch (IOException ex) { } ; } // Close the ZipOutPutStream try { if (out != null) out.close(); if (buff != null) buff.close(); if (dest != null) dest.close(); if (zin != null) zin.close(); if (entry != null) entry = null; } catch (IOException ex) { } ; } } else { resultat = true; if (realZipfilename == null) log.logError(toString(), Messages.getString("JobZipFiles.No_ZipFile_Defined.Label")); if (!orginexist) log.logError(toString(), Messages.getString("JobZipFiles.No_FolderCible_Defined.Label", realTargetdirectory)); } // return a verifier return resultat; }
From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule.java
@Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue(); if (op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN && op.getOperatorTag() != LogicalOperatorTag.UNNEST) { return false; }//from ww w . j a v a 2 s . com //stop rewriting if the operators originates from a nested tuple source if (insideSubplan(opRef)) { return false; } // We may pull selects above the join we create in order to eliminate possible dependencies between // the outer and inner input plans of the join. List<ILogicalOperator> topSelects = new ArrayList<ILogicalOperator>(); // Keep track of the operators and used variables participating in the inner input plan. HashSet<LogicalVariable> innerUsedVars = new HashSet<LogicalVariable>(); List<ILogicalOperator> innerOps = new ArrayList<ILogicalOperator>(); HashSet<LogicalVariable> outerUsedVars = new HashSet<LogicalVariable>(); List<ILogicalOperator> outerOps = new ArrayList<ILogicalOperator>(); innerOps.add(op); VariableUtilities.getUsedVariables(op, innerUsedVars); Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0); AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue(); // Find an unnest or join and partition the plan between the first unnest and that operator into independent parts. if (!findPlanPartition(op2, innerUsedVars, outerUsedVars, innerOps, outerOps, topSelects, false)) { // We could not find an unnest or join. return false; } // The last operator must be an unnest or join. AbstractLogicalOperator unnestOrJoin = (AbstractLogicalOperator) outerOps.get(outerOps.size() - 1); ILogicalOperator outerRoot = null; ILogicalOperator innerRoot = null; EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator(); // If we found a join, simply use it as the outer root. if (unnestOrJoin.getOperatorTag() != LogicalOperatorTag.INNERJOIN && unnestOrJoin.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) { // We've found a second unnest. First, sanity check that the unnest does not produce any vars that are used by the plan above (until the first unnest). List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>(); VariableUtilities.getProducedVariables(unnestOrJoin, producedVars); for (LogicalVariable producedVar : producedVars) { if (innerUsedVars.contains(producedVar)) { return false; } } // Continue finding a partitioning of the plan such that the inner and outer partitions are independent, in order to feed a join. // Now, we look below the second unnest or join. VariableUtilities.getUsedVariables(unnestOrJoin, outerUsedVars); AbstractLogicalOperator unnestChild = (AbstractLogicalOperator) unnestOrJoin.getInputs().get(0) .getValue(); if (!findPlanPartition(unnestChild, innerUsedVars, outerUsedVars, innerOps, outerOps, topSelects, true)) { // We could not find a suitable partitioning. return false; } } innerRoot = buildOperatorChain(innerOps, ets, context); context.computeAndSetTypeEnvironmentForOperator(innerRoot); outerRoot = buildOperatorChain(outerOps, null, context); context.computeAndSetTypeEnvironmentForOperator(outerRoot); InnerJoinOperator product = new InnerJoinOperator( new MutableObject<ILogicalExpression>(ConstantExpression.TRUE)); // Outer branch. product.getInputs().add(new MutableObject<ILogicalOperator>(outerRoot)); // Inner branch. product.getInputs().add(new MutableObject<ILogicalOperator>(innerRoot)); context.computeAndSetTypeEnvironmentForOperator(product); // Put the selects on top of the join. ILogicalOperator topOp = product; if (!topSelects.isEmpty()) { topOp = buildOperatorChain(topSelects, product, context); } // Plug the selects + product in the plan. opRef.setValue(topOp); context.computeAndSetTypeEnvironmentForOperator(topOp); return true; }
From source file:com.beyondj.gateway.handlers.detecting.DetectingGateway.java
public void route(final SocketWrapper socket, ConnectionParameters params, final Buffer received) { NetClient client = null;//from ww w.j a va2s. c o m if (params.protocolVirtualHost == null) { params.protocolVirtualHost = defaultVirtualHost; } HashSet<String> schemes = new HashSet<String>(Arrays.asList(params.protocolSchemes)); if (params.protocolVirtualHost != null) { List<ServiceDetails> services = serviceMap.getServices(params.protocolVirtualHost); // Lets try again with the defaultVirtualHost if (services.isEmpty() && !params.protocolVirtualHost.equals(defaultVirtualHost)) { params.protocolVirtualHost = defaultVirtualHost; services = serviceMap.getServices(params.protocolVirtualHost); } if (LOG.isDebugEnabled()) LOG.debug(String.format("%d services match the virtual host", services.size())); if (!services.isEmpty()) { ClientRequestFacade clientRequestFacade = clientRequestFacadeFactory.create(socket, params); ServiceDetails serviceDetails = serviceLoadBalancer.choose(services, clientRequestFacade); if (serviceDetails != null) { List<String> urlStrings = serviceDetails.getServices(); if (LOG.isDebugEnabled()) LOG.debug("Selected service exposes the following URLS: {}", urlStrings); for (String urlString : urlStrings) { if (StringUtils.isNotEmpty(urlString)) { // lets create a client for this request... try { URI uri = new URI(urlString); //URL url = new URL(urlString); String urlProtocol = uri.getScheme(); if (schemes.contains(urlProtocol)) { if (!socket.remoteAddress().toString() .equals(clientRequestFacade.getClientRequestKey())) { LOG.info(String.format( "Connecting client from '%s' (with key '%s') requesting virtual host '%s' to '%s:%d' using the %s protocol", socket.remoteAddress(), clientRequestFacade.getClientRequestKey(), params.protocolVirtualHost, uri.getHost(), uri.getPort(), params.protocol)); } else { LOG.info(String.format( "Connecting client from '%s' requesting virtual host '%s' to '%s:%d' using the %s protocol", socket.remoteAddress(), params.protocolVirtualHost, uri.getHost(), uri.getPort(), params.protocol)); } client = createClient(params, socket, uri, received); break; } } catch (URISyntaxException e) { LOG.warn("Failed to parse URI: " + urlString + ". " + e, e); } } } } } } if (client == null) { // failed to route handleConnectFailure(socket, String.format("No endpoint available for virtual host '%s' and protocol %s", params.protocolVirtualHost, params.protocol)); } }