List of usage examples for java.util HashSet size
public int size()
From source file:org.apache.hadoop.hive.ql.MultiDriver.java
private void doAuthorization(BaseSemanticAnalyzer sem) throws HiveException, AuthorizationException { HashSet<ReadEntity> inputs = sem.getInputs(); HashSet<WriteEntity> outputs = sem.getOutputs(); SessionState ss = SessionState.get(); HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb();//from w ww. j a v a 2s . c om if (op != null) { if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { ss.getAuthorizer().authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { ss.getAuthorizer().authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } } } if (outputs != null && outputs.size() > 0) { for (WriteEntity write : outputs) { if (write.getType() == WriteEntity.Type.PARTITION) { Partition part = db.getPartition(write.getTable(), write.getPartition().getSpec(), false); if (part != null) { ss.getAuthorizer().authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; } } if (write.getTable() != null) { ss.getAuthorizer().authorize(write.getTable(), null, op.getOutputRequiredPrivileges()); } } } } if (inputs != null && inputs.size() > 0) { Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>(); Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>(); Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>(); for (ReadEntity read : inputs) { Table tbl = read.getTable(); if ((read.getPartition() != null) || (tbl.isPartitioned())) { String tblName = tbl.getTableName(); if (tableUsePartLevelAuth.get(tblName) == null) { boolean usePartLevelPriv = (tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE".equalsIgnoreCase(tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE")))); if (usePartLevelPriv) { tableUsePartLevelAuth.put(tblName, Boolean.TRUE); } else { tableUsePartLevelAuth.put(tblName, Boolean.FALSE); } } } } if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.QUERY)) { SemanticAnalyzer querySem = (SemanticAnalyzer) sem; ParseContext parseCtx = querySem.getParseContext(); Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable(); for (Map.Entry<String, Operator<? extends OperatorDesc>> topOpMap : querySem.getParseContext() .getTopOps().entrySet()) { Operator<? extends OperatorDesc> topOp = topOpMap.getValue(); if (topOp instanceof TableScanOperator && tsoTopMap.containsKey(topOp)) { TableScanOperator tableScanOp = (TableScanOperator) topOp; Table tbl = tsoTopMap.get(tableScanOp); List<Integer> neededColumnIds = tableScanOp.getNeededColumnIDs(); List<FieldSchema> columns = tbl.getCols(); List<String> cols = new ArrayList<String>(); if (neededColumnIds != null && neededColumnIds.size() > 0) { for (int i = 0; i < neededColumnIds.size(); i++) { cols.add(columns.get(neededColumnIds.get(i)).getName()); } } else { for (int i = 0; i < columns.size(); i++) { cols.add(columns.get(i).getName()); } } // map may not contain all sources, since input list may have been optimized out // or non-existent tho such sources may still be referenced by the TableScanOperator // if it's null then the partition probably doesn't exist so let's use table permission if (tbl.isPartitioned() && tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) { String alias_id = topOpMap.getKey(); PrunedPartitionList partsList = PartitionPruner.prune(tableScanOp, parseCtx, alias_id); Set<Partition> parts = partsList.getPartitions(); for (Partition part : parts) { List<String> existingCols = part2Cols.get(part); if (existingCols == null) { existingCols = new ArrayList<String>(); } existingCols.addAll(cols); part2Cols.put(part, existingCols); } } else { List<String> existingCols = tab2Cols.get(tbl); if (existingCols == null) { existingCols = new ArrayList<String>(); } existingCols.addAll(cols); tab2Cols.put(tbl, existingCols); } } } } // cache the results for table authorization Set<String> tableAuthChecked = new HashSet<String>(); for (ReadEntity read : inputs) { Table tbl = read.getTable(); if (read.getPartition() != null) { Partition partition = read.getPartition(); tbl = partition.getTable(); // use partition level authorization if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) { List<String> cols = part2Cols.get(partition); if (cols != null && cols.size() > 0) { ss.getAuthorizer().authorize(partition.getTable(), partition, cols, op.getInputRequiredPrivileges(), null); } else { ss.getAuthorizer().authorize(partition, op.getInputRequiredPrivileges(), null); } continue; } } // if we reach here, it means it needs to do a table authorization // check, and the table authorization may already happened because of other // partitions if (tbl != null && !tableAuthChecked.contains(tbl.getTableName()) && !(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) { List<String> cols = tab2Cols.get(tbl); if (cols != null && cols.size() > 0) { ss.getAuthorizer().authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null); } else { ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(), null); } tableAuthChecked.add(tbl.getTableName()); } } } }
From source file:admixture.parameter.Parameter.java
public void commandListenor(String[] args) { CommandLine cl = null;/*from ww w .ja v a2s .c om*/ try { cl = parser.parse(ops, args); } catch (ParseException E) { System.err.println(E.getMessage()); System.exit(0); } if (cl.hasOption(cmd_help)) { help = true; } if (cl.hasOption(cmd_trans)) { transFlag = true; } if (cl.hasOption(cmd_out)) { out = cl.getOptionValue(cmd_out); } if (cl.hasOption(cmd_collapse)) { collapseFlag = true; } if (cl.hasOption(cmd_cc)) { ccFlag = true; piFlag = false; piiFlag = false; uiFlag = false; uiiFlag = false; } if (cl.hasOption(cmd_pi)) { piFlag = true; ccFlag = false; piiFlag = false; uiFlag = false; uiiFlag = false; } if (cl.hasOption(cmd_pii)) { piiFlag = true; ccFlag = false; piFlag = false; uiFlag = false; uiiFlag = false; } if (cl.hasOption(cmd_ui)) { uiFlag = true; ccFlag = false; piFlag = false; piiFlag = false; uiiFlag = false; } if (cl.hasOption(cmd_uii)) { uiiFlag = true; ccFlag = false; piFlag = false; piiFlag = false; uiFlag = false; } // file if (cl.hasOption(cmd_file)) { StringBuffer sb1 = new StringBuffer(); StringBuffer sb2 = new StringBuffer(); sb1.append(cl.getOptionValue(cmd_file)); sb1.append(".ped"); sb2.append(cl.getOptionValue(cmd_file)); sb2.append(".map"); ped = sb1.toString(); map = sb2.toString(); } // if (cl.hasOption(cmd_ped)) { // ped = cl.getOptionValue(cmd_ped); // } // if (cl.hasOption(cmd_map)) { // map = cl.getOptionValue(cmd_map); // } if (ped != null && map != null) { File fped = new File(ped); if (!fped.exists()) { System.err.println("could not open " + ped + "."); Test.LOG.append("could not open " + ped + ".\n"); Test.printLog(); System.exit(0); } File fmap = new File(map); if (!fmap.exists()) { System.err.println("could not open " + map + "."); Test.LOG.append("could not open " + map + ".\n"); Test.printLog(); System.exit(0); } fileFlag = true; } // bfile if (cl.hasOption(cmd_bfile)) { StringBuffer sb1 = new StringBuffer(); StringBuffer sb2 = new StringBuffer(); StringBuffer sb3 = new StringBuffer(); sb1.append(cl.getOptionValue(cmd_bfile)); sb1.append(".bed"); sb2.append(cl.getOptionValue(cmd_bfile)); sb2.append(".bim"); sb3.append(cl.getOptionValue(cmd_bfile)); sb3.append(".fam"); bed = sb1.toString(); bim = sb2.toString(); fam = sb3.toString(); } // if (cl.hasOption(cmd_bed)) { // bed = cl.getOptionValue(cmd_bed); // } // if (cl.hasOption(cmd_bim)) { // bim = cl.getOptionValue(cmd_bim); // } // if (cl.hasOption(cmd_fam)) { // fam = cl.getOptionValue(cmd_fam); // } if (bed != null && bim != null && fam != null) { File fbed = new File(bed); if (!fbed.exists()) { System.err.println("could not open " + bed + "."); Test.LOG.append("could not open " + bed + ".\n"); Test.printLog(); System.exit(0); } File fbim = new File(bim); if (!fbim.exists()) { System.err.println("could not open " + bim + "."); Test.LOG.append("could not open " + bim + ".\n"); Test.printLog(); System.exit(0); } File ffam = new File(fam); if (!ffam.exists()) { System.err.println("could not open " + fam + "."); Test.LOG.append("could not open " + fam + ".\n"); Test.printLog(); System.exit(0); } bfileFlag = true; } if (cl.hasOption(cmd_covar)) { pheno = cl.getOptionValue(cmd_covar); File fpheno = new File(pheno); if (!fpheno.exists()) { System.err.println("could not open " + fpheno + "."); Test.LOG.append("could not open " + fpheno + ".\n"); Test.printLog(); System.exit(0); } } if (cl.hasOption(cmd_covar_header)) { covar_header_flag = true; } if (cl.hasOption(cmd_header)) { header = true; } if (cl.hasOption(cmd_linear)) { linkfunction = 0; } if (cl.hasOption(cmd_logistic)) { linkfunction = 1; } if (cl.hasOption(cmd_pheno_number)) { response = Integer.parseInt(cl.getOptionValue(cmd_pheno_number)) - 1; if (response < -1) { System.err.println("bad parameter for --" + cmd_pheno_number_long + ": " + (response + 1) + "."); Test.LOG.append("bad parameter for --" + cmd_pheno_number_long + ": " + (response + 1) + ".\n"); Test.printLog(); System.exit(0); } } if (cl.hasOption(cmd_covar_number)) { String[] p = cl.getOptionValues(cmd_covar_number); HashSet<Integer> idx = NewIt.newHashSet(); for (int i = 0, len = p.length; i < len; i++) { if (p[i].contains("-")) { String[] pp = p[i].split("-"); if (pp.length != 2) { System.err .println("bad parameter for option --" + cmd_covar_number_long + ": " + p[i] + "."); Test.LOG.append( "bad parameter for option --" + cmd_covar_number_long + ": " + p[i] + ".\n"); Test.printLog(); System.exit(0); } for (int j = Integer.parseInt(pp[0]); j <= Integer.parseInt(pp[1]); j++) { idx.add(new Integer(j)); } } else { idx.add(new Integer(Integer.parseInt(p[i]))); } } predictor = new int[idx.size()]; int c = 0; for (Iterator<Integer> e = idx.iterator(); e.hasNext();) { predictor[c] = e.next().intValue() - 1; if (predictor[c] < 0) { System.err.println( "bad parameter for option --" + cmd_covar_number_long + ": " + predictor[c] + "."); Test.LOG.append( "bad parameter for option --" + cmd_covar_number_long + ": " + predictor[c] + ".\n"); Test.printLog(); System.exit(0); } c++; } } // if (cl.hasOption(cmd_covar_name)) { // HashSet<String> cn = NewIt.newHashSet(); // String[] p = cl.getOptionValues(cmd_covar_name); // for (int i = 0; i < p.length; i++) { // if (p[i].contains("-")) { // String[] pp = predictor_name[i].split("-"); // if (pp.length != 2) { // System.err.println("bad parameter for option --" + cmd_covar_name_long + ": " + p[i] + "."); // Test.LOG.append("bad parameter for option --" + cmd_covar_name_long + ": " + p[i] + ".\n"); // Test.printLog(); // System.exit(0); // } // for (int j = 0; j < pp.length; j++) { // cn.add(pp[j]); // } // } else { // cn.add(p[i]); // } // } // predictor_name = (String[]) cn.toArray(new String[0]); // } if (cl.hasOption(cmd_bgsnp)) { String[] bg = cl.getOptionValues(cmd_bgsnp); HashSet<String> bgSet = NewIt.newHashSet(); for (int i = 0; i < bg.length; i++) { bgSet.add(bg[i]); } if (bgSet.size() != bg.length) { System.err.println("bad parameter for --" + cmd_bgsnp + "."); Test.LOG.append("bad parameter for --" + cmd_bgsnp + ".\n"); Test.printLog(); System.exit(0); } bgsnp = cl.getOptionValues(cmd_bgsnp); bgsnpFlag = true; } if (cl.hasOption(cmd_hg18)) { hg18Flag = true; hg19Flag = false; hgFile = "/gene36.txt"; } if (cl.hasOption(cmd_hg19)) { hg19Flag = true; hg18Flag = false; hgFile = "/gene37.txt"; } if (cl.hasOption(cmd_hg)) { hg19Flag = false; hg18Flag = false; hgFile = cl.getOptionValue(cmd_hg); } if (cl.hasOption(cmd_snp2genelist)) { snp2genefileFlag = true; } if (cl.hasOption(cmd_snp2genemlist)) { snp2genefilesFlag = true; } if (cl.hasOption(cmd_region)) { String[] r = cl.getOptionValues(cmd_region); ArrayList<String> chr = NewIt.newArrayList(); ArrayList<String> b = NewIt.newArrayList(); ArrayList<String> e = NewIt.newArrayList(); for (int i = 0; i < r.length; i++) { String[] s = r[i].split(","); if (s.length != 3) { System.err.println("bad parameter for --" + cmd_region + ": " + r[i] + "."); Test.LOG.append("bad parameter for --" + cmd_region + ": " + r[i] + ".\n"); Test.printLog(); System.exit(0); } chr.add(s[0]); b.add(s[1]); e.add(s[2]); } chr_reg = (String[]) chr.toArray(new String[0]); begin = new double[b.size()]; end = new double[e.size()]; for (int i = 0; i < r.length; i++) { begin[i] = Double.parseDouble(b.get(i)); end[i] = Double.parseDouble(e.get(i)); } regionFlag = true; } if (cl.hasOption(cmd_gene_list)) { String gl = cl.getOptionValue(cmd_gene_list); File f = new File(gl); if (!f.exists()) { System.err.println("could not find file for --option " + cmd_gene_list_long + ": " + gl + "."); Test.LOG.append("could not find file for --option " + cmd_gene_list_long + ": " + gl + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader0 = null; try { reader0 = new BufferedReader(new FileReader(f)); } catch (IOException E) { System.err.println("could not open gene list " + gl + "."); Test.LOG.append("could not open gene list " + gl + ".\n"); Test.printLog(); System.exit(0); } String line0 = null; HashSet<String> gSet = NewIt.newHashSet(); try { while ((line0 = reader0.readLine()) != null) { String[] gn = line0.split(delim); gSet.add(gn[0]); } } catch (IOException e1) { e1.printStackTrace(); } String[] g = (String[]) gSet.toArray(new String[0]); boolean[] gflag = new boolean[gSet.size()]; Arrays.fill(gflag, false); ArrayList<String> ge = NewIt.newArrayList(); ArrayList<String> g_chr = NewIt.newArrayList(); ArrayList<String> g_begin = NewIt.newArrayList(); ArrayList<String> g_end = NewIt.newArrayList(); BufferedReader reader = null; if (hg18Flag || hg19Flag) { InputStream is = getClass().getResourceAsStream(hgFile); DataInputStream in = new DataInputStream(is); reader = new BufferedReader(new InputStreamReader(in)); } else { File fhg = new File(hgFile); if (!fhg.exists()) { System.err.println("could not find file for --option " + cmd_hg + ": " + hgFile + "."); Test.LOG.append("could not find file for --option " + cmd_hg + ": " + hgFile + ".\n"); Test.printLog(); System.exit(0); } try { reader = new BufferedReader(new FileReader(fhg)); } catch (IOException E) { System.err.println("could not open gene list " + hgFile + "."); Test.LOG.append("could not open gene list " + hgFile + ".\n"); Test.printLog(); System.exit(0); } } String line = null; try { while ((line = reader.readLine()) != null) { String[] s = line.split("\\s+"); // System.err.println(line); if (s.length != 4) { continue; } for (int i = 0; i < g.length; i++) { if (s[0].compareTo(g[i]) == 0) { ge.add(s[0]); g_chr.add(s[1]); g_begin.add(s[2]); g_end.add(s[3]); gflag[i] = true; } } } reader.close(); } catch (IOException e) { e.printStackTrace(); } boolean flag = true; int count = 0; for (int i = 0; i < gflag.length; i++) { if (!gflag[i]) { System.err.println("could not fine gene " + g[i] + "."); Test.LOG.append("could not find gene " + g[i] + ".\n"); flag = false; count++; } } System.err.println("of " + gflag.length + " genes " + (gflag.length - count) + " was found."); Test.LOG.append("of " + gflag.length + " genes " + (gflag.length - count) + " was found.\n"); if (!snp2genefileFlag && !snp2genefilesFlag) { if (!flag) { Test.printLog(); System.exit(0); } } gene = (String[]) ge.toArray(new String[0]); gene_chr = (String[]) g_chr.toArray(new String[0]); gene_begin = new double[gene_chr.length]; gene_end = new double[gene_chr.length]; for (int i = 0; i < gene_chr.length; i++) { gene_begin[i] = Double.parseDouble(g_begin.get(i)) / 1000; gene_end[i] = Double.parseDouble(g_end.get(i)) / 1000; System.err.println( gene[i] + ": chr" + gene_chr[i] + " " + gene_begin[i] + "k ~ " + gene_end[i] + "k."); Test.LOG.append( gene[i] + ": chr" + gene_chr[i] + " " + gene_begin[i] + "k ~ " + gene_end[i] + "k.\n"); } geneFlag = true; } if (cl.hasOption(cmd_gene_window)) { double gw = Double.parseDouble(cl.getOptionValue(cmd_gene_window)); if (gw < 0) { System.err.println("bad parameter for option --" + cmd_gene_window_long + ": " + gw + "."); Test.LOG.append("bad parameter for option --" + cmd_gene_window_long + ": " + gw + ".\n"); Test.printLog(); System.exit(0); } genewindow = gw; } // if (cl.hasOption(cmd_gene)) { // String[] g = cl.getOptionValues(cmd_gene); // boolean[] gflag = new boolean[g.length]; // Arrays.fill(gflag, false); // ArrayList<String> ge = NewIt.newArrayList(); // ArrayList<String> g_chr = NewIt.newArrayList(); // ArrayList<String> g_begin = NewIt.newArrayList(); // ArrayList<String> g_end = NewIt.newArrayList(); // // BufferedReader reader = null; // if(hg18Flag || hg19Flag) { // InputStream is = getClass().getResourceAsStream(hgFile); // DataInputStream in = new DataInputStream(is); // reader = new BufferedReader(new InputStreamReader(in)); // } else { // File fhg = new File(hgFile); // if (!fhg.exists()) { // System.err.println("could not find file for --option " + cmd_hg + ": " + hgFile +"."); // Test.LOG.append("could not find file for --option " + cmd_hg + ": " + hgFile + ".\n"); // Test.printLog(); // System.exit(0); // } // try { // reader = new BufferedReader(new FileReader(fhg)); // } catch (IOException E) { // System.err.println("could not open gene list " + hgFile + "."); // Test.LOG.append("could not open gene list " + hgFile + ".\n"); // Test.printLog(); // System.exit(0); // } // // } // // String line = null; // try { // while ((line = reader.readLine()) != null) { // // String[] s = line.split("\\s+"); //// System.err.println(line); // if (s.length != 4) { // continue; // } // // for (int i = 0; i < g.length; i++) { // if (s[0].compareTo(g[i]) == 0) { // ge.add(s[0]); // g_chr.add(s[1]); // g_begin.add(s[2]); // g_end.add(s[3]); // gflag[i] = true; // } // } // // } // reader.close(); // } catch (IOException e) { // e.printStackTrace(); // } // boolean flag = true; // int count = 0; // for(int i = 0; i < gflag.length; i++) { // if(!gflag[i]) { // System.err.println("could not find gene " + g[i] + "."); // Test.LOG.append("could not find gene " + g[i] + ".\n"); // flag = false; // count++; // } // } // System.err.println("of " + gflag.length + " genes " + (gflag.length - count) + " was found."); // Test.LOG.append("of " + gflag.length + " genes " + (gflag.length - count) + " was found.\n"); // if (!snp2genefileFlag && !snp2genefilesFlag) { // if(!flag) { // Test.printLog(); // System.exit(0); // } // } // // gene = (String[]) ge.toArray(new String[0]); // gene_chr = (String[]) g_chr.toArray(new String[0]); // gene_begin = new double[gene_chr.length]; // gene_end = new double[gene_chr.length]; // // for (int i = 0; i < gene_chr.length; i++) { // gene_begin[i] = Double.parseDouble(g_begin.get(i)) / 1000; // gene_end[i] = Double.parseDouble(g_end.get(i)) / 1000; // System.err.println(gene[i] + ": chr" + gene_chr[i] + " " +gene_begin[i] + "k ~ " + gene_end[i] + "k"); // Test.LOG.append(gene[i] + ": chr" + gene_chr[i] + " " +gene_begin[i] + "k ~ " + gene_end[i] + "k.\n"); // } // geneFlag = true; // } if (cl.hasOption(cmd_extract)) { if (!transFlag) { includesnpFile = cl.getOptionValues(cmd_extract); ArrayList<String> includesnpList = NewIt.newArrayList(); for (int h = 0; h < includesnpFile.length; h++) { File f = new File(includesnpFile[h]); if (!f.exists()) { System.err.println("could not find --" + cmd_extract + ": " + includesnpFile[h] + "."); Test.LOG.append("could not fine --" + cmd_extract + ": " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(f)); } catch (IOException E) { System.err.println("could not read --" + cmd_extract + ": " + includesnpFile[h] + "."); Test.LOG.append("could not read --" + cmd_extract + ": " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } ArrayList<String> snp = NewIt.newArrayList(); String line = null; try { while ((line = reader.readLine()) != null) { String[] s = line.split(delim); snp.add(s[0]); } reader.close(); } catch (IOException E) { System.err.println("bad lines in " + includesnpFile[h] + "."); Test.LOG.append("bad lines in " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } if (snp.size() > 0) { ArrayList<String> insnp = NewIt.newArrayList(); for (int i = 0; i < snp.size(); i++) { String subSNP = snp.get(i); insnp.add(subSNP); } if (insnp.size() > 0) { includesnpList.addAll(insnp); snpFlag = true; } } if (includesnpList.size() > 0) { includesnp = (String[]) includesnpList.toArray(new String[0]); } } } else { includesnpFile = cl.getOptionValues(cmd_extract); xincludesnp = new String[includesnpFile.length][]; for (int h = 0; h < includesnpFile.length; h++) { File f = new File(includesnpFile[h]); if (!f.exists()) { System.err.println("could not find " + includesnpFile[h] + "."); Test.LOG.append("could not find " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(f)); } catch (IOException E) { System.err.println("could not read " + includesnpFile[h] + "."); Test.LOG.append("could not read " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } ArrayList<String> snp = NewIt.newArrayList(); String line = null; try { while ((line = reader.readLine()) != null) { String[] s = line.split(delim); snp.add(s[0]); } reader.close(); } catch (IOException E) { System.err.println("bad lines in " + includesnpFile[h] + "."); Test.LOG.append("bad lines in " + includesnpFile[h] + ".\n"); Test.printLog(); System.exit(0); } if (snp.size() > 0) { ArrayList<String> insnp = NewIt.newArrayList(); for (int i = 0; i < snp.size(); i++) { String subSNP = snp.get(i); insnp.add(subSNP); } if (insnp.size() > 0) { xincludesnp[h] = (String[]) insnp.toArray(new String[0]); snpFlag = true; } } } } } if (cl.hasOption(cmd_exclude)) { if (!transFlag) { String snps_file = cl.getOptionValue(cmd_exclude); ArrayList<String> excludesnpList = NewIt.newArrayList(); for (int h = 0; h < 1; h++) { File f = new File(snps_file); if (!f.exists()) { System.err.println("could not find --" + cmd_extract + ": " + snps_file + "."); Test.LOG.append("could not fine --" + cmd_extract + ": " + snps_file + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(f)); } catch (IOException E) { System.err.println("could not read --" + cmd_extract + ": " + snps_file + "."); Test.LOG.append("could not read --" + cmd_extract + ": " + snps_file + ".\n"); Test.printLog(); System.exit(0); } ArrayList<String> snp = NewIt.newArrayList(); String line = null; try { while ((line = reader.readLine()) != null) { String[] s = line.split(delim); snp.add(s[0]); } reader.close(); } catch (IOException E) { System.err.println("bad lines in " + snps_file + "."); Test.LOG.append("bad lines in " + snps_file + ".\n"); Test.printLog(); System.exit(0); } if (snp.size() > 0) { ArrayList<String> exsnp = NewIt.newArrayList(); for (int i = 0; i < snp.size(); i++) { String subSNP = snp.get(i); exsnp.add(subSNP); } if (exsnp.size() > 0) { excludesnpList.addAll(exsnp); snpFlag = true; } } if (excludesnpList.size() > 0) { excludesnp = (String[]) excludesnpList.toArray(new String[0]); } } } } if (cl.hasOption(cmd_keep_male)) { keep_maleFlag = true; } if (cl.hasOption(cmd_keep_female)) { keep_femaleFlag = true; } if (cl.hasOption(cmd_ex_nosex)) { ex_nosexFlag = true; } if (cl.hasOption(cmd_remove)) { String file = cl.getOptionValue(cmd_remove); File f = new File(file); if (!f.exists()) { System.err.println("could not open " + file + "."); Test.LOG.append("could not open " + file + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(new File(file))); } catch (IOException E) { System.err.println("could not read " + file + "."); Test.LOG.append("coudl not read " + file + ".\n"); Test.printLog(); System.exit(0); } ArrayList<String> famList = NewIt.newArrayList(); ArrayList<String> indList = NewIt.newArrayList(); String line = null; try { while ((line = reader.readLine()) != null) { String[] l = line.split(MDRConstant.delim); if (l.length < 2) continue; famList.add(l[0]); indList.add(l[1]); } } catch (IOException e) { e.printStackTrace(System.err); System.exit(0); } ex_family = new String[2][]; ex_family[0] = (String[]) famList.toArray(new String[0]); ex_family[1] = (String[]) indList.toArray(new String[0]); removeFlag = true; } if (cl.hasOption(cmd_keep)) { String file = cl.getOptionValue(cmd_keep); File f = new File(file); if (!f.exists()) { System.err.println("could not open " + file + "."); Test.LOG.append("could not open " + file + ".\n"); Test.printLog(); System.exit(0); } BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(new File(file))); } catch (IOException E) { System.err.println("could not read " + file + "."); Test.LOG.append("coudl not read " + file + ".\n"); Test.printLog(); System.exit(0); } ArrayList<String> famList = NewIt.newArrayList(); ArrayList<String> indList = NewIt.newArrayList(); String line = null; try { while ((line = reader.readLine()) != null) { String[] l = line.split(MDRConstant.delim); if (l.length < 2) continue; famList.add(l[0]); indList.add(l[1]); } } catch (IOException e) { e.printStackTrace(System.err); System.exit(0); } indKeep = new String[2][]; indKeep[0] = (String[]) famList.toArray(new String[0]); indKeep[1] = (String[]) indList.toArray(new String[0]); keepFlag = true; } if (cl.hasOption(cmd_chr)) { String[] chr = cl.getOptionValues(cmd_chr); HashSet<String> chrSet = NewIt.newHashSet(); HashSet<String> exSet = NewIt.newHashSet(); for (int i = 0; i < chr.length; i++) { if (chr[i].startsWith("-")) { exSet.add(chr[i].substring(1, chr[i].length())); } else { chrSet.add(chr[i]); } } if (chr.length != chrSet.size() + exSet.size()) { System.err.println("bad parameter for optin --" + cmd_chr + "."); Test.LOG.append("bad parameter for option --" + cmd_chr + ".\n"); Test.printLog(); System.exit(0); } if (chrSet.size() > 0) { in_chr = (String[]) chrSet.toArray(new String[0]); inchrFlag = true; } if (exSet.size() > 0) { ex_chr = (String[]) exSet.toArray(new String[0]); exchrFlag = true; } } if (cl.hasOption(cmd_snpwindow)) { String[] s = cl.getOptionValues(cmd_snpwindow); snpwindow = new String[s.length]; snp_window = new double[s.length][2]; for (int i = 0; i < s.length; i++) { String[] ss = s[i].split(incommand_separator); if (ss.length != 3) { System.err.println("bad parameter for optin --" + cmd_snpwindow_long + " " + s[i] + "."); Test.LOG.append("bad parameter for option --" + cmd_snpwindow_long + " " + s[i] + ".\n"); Test.printLog(); System.exit(0); } snpwindow[i] = ss[0]; snp_window[i][0] = Double.parseDouble(ss[1]) * -1000; if (Double.parseDouble(ss[2]) > 0) { snp_window[i][1] = Double.parseDouble(ss[2]) * 1000; } else { snp_window[i][1] = Double.MAX_VALUE; } } snpwindowFlag = true; } if (cl.hasOption(cmd_maf)) { maf = Double.parseDouble(cl.getOptionValue(cmd_maf)); if (maf < 0) { System.err.println("bad parameter for optin --" + cmd_maf + " " + maf + "."); Test.LOG.append("bad parameter for option --" + cmd_maf + " " + maf + ".\n"); Test.printLog(); System.exit(0); } mafFlag = true; } if (cl.hasOption(cmd_max_maf)) { max_maf = Double.parseDouble(cl.getOptionValue(cmd_max_maf)); if (max_maf < 0) { System.err.println("bad parameter for optin --" + cmd_max_maf_long + " " + max_maf + "."); Test.LOG.append("bad parameter for option --" + cmd_max_maf_long + " " + max_maf + ".\n"); Test.printLog(); System.exit(0); } maxmafFlag = true; } if (cl.hasOption(cmd_geno)) { geno = Double.parseDouble(cl.getOptionValue(cmd_geno)); if (geno < 0) { System.err.println("bad parameter for optin --" + cmd_geno + " " + geno + "."); Test.LOG.append("bad parameter for option --" + cmd_geno + " " + geno + ".\n"); Test.printLog(); System.exit(0); } genoFlag = true; } // // if (cl.hasOption(cmd_hwe)) { // hwe = Double.parseDouble(cl.getOptionValue(cmd_hwe)); // if (hwe < 0) { // throw new IllegalArgumentException("bad parameter for --hwe: " + // hwe); // } // hweFlag = true; // } // if (cl.hasOption(cmd_reg)) { // linkfunction = Integer.parseInt(cl.getOptionValue(cmd_reg)); // } if (cl.hasOption(cmd_cv)) { cv = Integer.parseInt(cl.getOptionValue(cmd_cv)); if (cv < 2) { System.err.println("bad parameter for optin --" + cmd_cv + " " + cv + "."); Test.LOG.append("bad parameter for option --" + cmd_cv + " " + cv + ".\n"); Test.printLog(); System.exit(0); } cvFlag = true; } // if (cl.hasOption(cmd_trgroup)) { // trgroup = Double.parseDouble(cl.getOptionValue(cmd_trgroup)); // trgroupFlag = true; // } // if (cl.hasOption(cmd_trsex)) { // trsex = Integer.parseInt(cl.getOptionValue(cmd_trsex)); // if (trsex != 1 && trsex != 2) { // throw new // IllegalArgumentException("unknown value for option --trsex."); // } // trsexFlag = true; // } // if (cl.hasOption(cmd_ttfile)) { // String tf = cl.getOptionValue(cmd_ttfile); // File ttfile = new File(tf); // if (!ttfile.exists()) { // throw new IllegalArgumentException("could not open ttfile " + tf); // } // // ArrayList<String> Farray = NewIt.newArrayList(); // ArrayList<String> Iarray = NewIt.newArrayList(); // BufferedReader reader = null; // try { // reader = new BufferedReader(new FileReader(new File(tf))); // } catch (IOException E) { // throw new IllegalArgumentException("failed in reading " + tf); // } // String line = null; // try { // while ((line = reader.readLine()) != null) { // String[] l = line.split(delim); // Farray.add(l[0]); // Iarray.add(l[1]); // } // } catch (IOException e) { // e.printStackTrace(); // } // // ttArray = new String[2][Farray.size()]; // ttArray[0] = (String[]) Farray.toArray(new String[0]); // ttArray[1] = (String[]) Iarray.toArray(new String[0]); // ttfileFlag = true; // } // if (cl.hasOption(cmd_border)) { // String[] h = cl.getOptionValues(cmd_border); // if (h.length != 2) { // throw new // IllegalArgumentException("bad parameter for option --border."); // } // boolean rflag = false; // if (h[0].startsWith("-")) { // border_fid = h[0].substring(1, h[0].length()); // rflag = true; // } else { // border_fid = h[0]; // } // if (h[1].startsWith("-")) { // border_iid = h[1].substring(1, h[1].length()); // rflag = true; // } else { // border_iid = h[1]; // } // borderFlag = true; // reverseborderFlag = rflag; // } if (cl.hasOption(cmd_seed)) { seed = Integer.parseInt(cl.getOptionValue(cmd_seed)); } if (cl.hasOption(cmd_tie)) { String t = cl.getOptionValue(cmd_tie); if (t.compareTo("h") == 0) { tie = 1; } else if (t.compareTo("l") == 0) { tie = 0; } else { tie = -1; } } // if (cl.hasOption(cmd_simu)) { // simu = Integer.parseInt(cl.getOptionValue(cmd_simu)); // } if (cl.hasOption(cmd_perm)) { perm = Integer.parseInt(cl.getOptionValue(cmd_perm)); permFlag = true; } /* if (cl.hasOption(cmd_ep)) { ep = Double.parseDouble(cl.getOptionValue(cmd_ep)); if (ep >= 1 || ep < 0) { System.err.println("bad parameter for optin --" + ep + " " + ep + "."); Test.LOG.append("bad parameter for option --" + ep + " " + ep + ".\n"); Test.printLog(); System.exit(0); } epFlag = true; } */ // if (cl.hasOption(cmd_perm_scheme)) { // permu_scheme = true; // } // if (cl.hasOption(cmd_unrelated_only)) { // unrelated_only = true; // } if (cl.hasOption(cmd_order)) { order = Integer.parseInt(cl.getOptionValue(cmd_order)); } if (cl.hasOption(cmd_thin)) { thin = Double.parseDouble(cl.getOptionValue(cmd_thin)); if (thin < 0) { System.err.println("bad parameter for optin --" + cmd_thin + " " + thin + "."); Test.LOG.append("bad parameter for option --" + cmd_thin + " " + thin + ".\n"); Test.printLog(); System.exit(0); } } if (cl.hasOption(cmd_slice)) { String[] s = cl.getOptionValue(cmd_slice).split("/"); slice = Integer.parseInt(s[0]); sliceN = Integer.parseInt(s[1]); if (slice <= 0 || sliceN <= 0 || slice > sliceN) { System.err.println("bad parameter for optin --" + cmd_slice + " " + slice + "."); Test.LOG.append("bad parameter for option --" + cmd_slice + " " + slice + ".\n"); Test.printLog(); System.exit(0); } sliceFlag = true; } if (cl.hasOption(cmd_missing_phenotype)) { String[] s = cl.getOptionValues(cmd_missing_phenotype); na = s; // missing_phenotype = cl.getOptionValue(cmd_missing_phenotype); } // if (cl.hasOption(cmd_missing_genotype)) { // missing_genotype = cl.getOptionValue(cmd_missing_genotype); // } if (cl.hasOption(cmd_missing_allele)) { missing_allele = cl.getOptionValue(cmd_missing_allele); } if (cl.hasOption(cmd_status_shift)) { status_shift = 0; status_shiftFlag = true; } /* if (cl.hasOption(cmd_Vc)) { vc = Double.parseDouble(cl.getOptionValue(cmd_Vc)); vcFlag = true; } if (cl.hasOption(cmd_training)) { threshold_training = Double.parseDouble(cl .getOptionValue(cmd_training)); trainingFlag = true; } if (cl.hasOption(cmd_testing)) { threshold_testing = Double.parseDouble(cl .getOptionValue(cmd_testing)); testingFlag = true; } */ if (cl.hasOption(cmd_version)) { System.err.println(); Test.printLog(); System.exit(1); } if (cl.hasOption(cmd_testdrive)) { testdrive = true; } if (cl.hasOption(cmd_node)) { node = Integer.parseInt(cl.getOptionValue(cmd_node)); nodeFlag = true; clusterFlag = true; } if (cl.hasOption(cmd_email)) { email = cl.getOptionValue(cmd_email); emailFlag = true; clusterFlag = true; } if (cl.hasOption(cmd_memory)) { memory = cl.getOptionValue(cmd_memory); memoryFlag = true; clusterFlag = true; } if (cl.hasOption(cmd_walltime)) { walltime = Integer.parseInt(cl.getOptionValue(cmd_walltime)); walltimeFlag = true; clusterFlag = true; } if (cl.hasOption(cmd_submit)) { submit = true; clusterFlag = true; } if (help) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("UGMDR", ops); System.exit(1); } }
From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java
private HashMap<String, HashSet<Integer>> discoverAllExports(StorageSystem storageSystem, final List<IsilonAccessZone> isilonAccessZones) throws IsilonCollectionException { // Discover All FileSystem HashMap<String, HashSet<Integer>> allExports = new HashMap<String, HashSet<Integer>>(); URI storageSystemId = storageSystem.getId(); String resumeToken = null;//from w ww . j av a 2 s .c om try { _log.info("discoverAllExports for storage system {} - start", storageSystemId); IsilonApi isilonApi = getIsilonDevice(storageSystem); for (IsilonAccessZone isilonAccessZone : isilonAccessZones) { do { IsilonApi.IsilonList<IsilonExport> isilonExports = isilonApi.listExports(resumeToken, isilonAccessZone.getName()); List<IsilonExport> exports = isilonExports.getList(); for (IsilonExport exp : exports) { _log.info("Discovered fS export {}", exp.toString()); HashSet<Integer> exportIds = new HashSet<Integer>(); for (String path : exp.getPaths()) { exportIds = allExports.get(path); if (exportIds == null) { exportIds = new HashSet<Integer>(); } exportIds.add(exp.getId()); allExports.put(path, exportIds); _log.debug("Discovered fS put export Path {} Export id {}", path, exportIds.size() + ":" + exportIds); } } resumeToken = isilonExports.getToken(); } while (resumeToken != null); _log.info("discoverd All NFS Exports for access zone {} ", isilonAccessZone.getName()); resumeToken = null; } return allExports; } catch (IsilonException ie) { _log.error("discoverAllExports failed. Storage system: {}", storageSystemId, ie); IsilonCollectionException ice = new IsilonCollectionException( "discoverAllExports failed. Storage system: " + storageSystemId); ice.initCause(ie); throw ice; } catch (Exception e) { _log.error("discoverAllExports failed. Storage system: {}", storageSystemId, e); IsilonCollectionException ice = new IsilonCollectionException( "discoverAllExports failed. Storage system: " + storageSystemId); ice.initCause(e); throw ice; } }
From source file:org.kuali.kfs.module.cab.service.impl.CapitalAssetBuilderModuleServiceImpl.java
/** * Capital Asset validation: An item cannot have among its associated accounting lines both object codes that indicate it is a * Capital Asset, and object codes that indicate that the item is not a Capital Asset. Whether an object code indicates that the * item is a Capital Asset is determined by whether its level is among a specific set of levels that are deemed acceptable for * such items./*from ww w . j a va 2s .c om*/ * * @param capitalOrExpenseSet A HashSet containing the distinct values of either "Capital" or "Expense" that have been added to * it. * @param warn A boolean which should be set to true if warnings are to be set on the calling document * @param itemIdentifier A String identifying the item for error display * @param objectCode An ObjectCode, for error display * @return True if the given HashSet contains at most one of either "Capital" or "Expense" */ protected boolean validateAccountingLinesNotCapitalAndExpense(HashSet<String> capitalOrExpenseSet, String itemIdentifier, ObjectCode objectCode) { boolean valid = true; // If the set contains more than one distinct string, fail. if (capitalOrExpenseSet.size() > 1) { GlobalVariables.getMessageMap().putError(KFSConstants.FINANCIAL_OBJECT_LEVEL_CODE_PROPERTY_NAME, CabKeyConstants.ERROR_ITEM_CAPITAL_AND_EXPENSE, itemIdentifier, objectCode.getFinancialObjectCodeName()); valid &= false; } return valid; }
From source file:com.ibm.bi.dml.parser.ParForStatementBlock.java
@Override public VariableSet validate(DMLProgram dmlProg, VariableSet ids, HashMap<String, ConstIdentifier> constVars, boolean conditional) throws LanguageException, ParseException, IOException { LOG.trace("PARFOR(" + _ID + "): validating ParForStatementBlock."); //create parent variable set via cloning _vsParent = new VariableSet(ids); if (LOG.isTraceEnabled()) //note: A is matrix, and A[i,1] is scalar for (DataIdentifier di : _vsParent.getVariables().values()) LOG.trace("PARFOR: non-local " + di._name + ": " + di.getDataType().toString() + " with rowDim = " + di.getDim1());/*www.j a va2 s .co m*/ //normal validate via ForStatement (sequential) //NOTES: // * validate/dependency checking of nested parfor-loops happens at this point // * validate includes also constant propagation for from, to, incr expressions // * this includes also function inlining VariableSet vs = super.validate(dmlProg, ids, constVars, conditional); //check of correctness of specified parfor parameter names and //set default parameter values for all not specified parameters ParForStatement pfs = (ParForStatement) _statements.get(0); IterablePredicate predicate = pfs.getIterablePredicate(); HashMap<String, String> params = predicate.getParForParams(); if (params != null) //if parameter specified { //check for valid parameter types for (String key : params.keySet()) if (!_paramNames.contains(key)) { //always unconditional raiseValidateError( "PARFOR: The specified parameter '" + key + "' is no valid parfor parameter.", false); } //set defaults for all non-specified values //(except if CONSTRAINT optimizer, in order to distinguish specified parameters) boolean constrained = (params.containsKey(OPT_MODE) && params.get(OPT_MODE).equals(POptMode.CONSTRAINED.toString())); for (String key : _paramNames) if (!params.containsKey(key)) { if (constrained) { params.put(key, _paramDefaults2.get(key)); } //special treatment for degree of parallelism else if (key.equals(PAR) && params.containsKey(EXEC_MODE) && params.get(EXEC_MODE).equals(PExecMode.REMOTE_MR.toString())) { int maxPMap = InfrastructureAnalyzer.getRemoteParallelMapTasks(); //correction max number of reducers on yarn clusters if (InfrastructureAnalyzer.isYarnEnabled()) maxPMap = (int) Math.max(maxPMap, YarnClusterAnalyzer.getNumCores()); params.put(key, String.valueOf(maxPMap)); } else if (key.equals(PAR) && params.containsKey(EXEC_MODE) && params.get(EXEC_MODE).equals(PExecMode.REMOTE_MR_DP.toString())) { int maxPRed = InfrastructureAnalyzer.getRemoteParallelReduceTasks(); //correction max number of reducers on yarn clusters if (InfrastructureAnalyzer.isYarnEnabled()) maxPRed = (int) Math.max(maxPRed, YarnClusterAnalyzer.getNumCores() / 2); params.put(key, String.valueOf(maxPRed)); } else //default case params.put(key, _paramDefaults.get(key)); } //check for disabled parameters values if (params.containsKey(OPT_MODE)) { String optStr = params.get(OPT_MODE); if (optStr.equals(POptMode.HEURISTIC.toString()) || optStr.equals(POptMode.GREEDY.toString()) || optStr.equals(POptMode.FULL_DP.toString())) { //always unconditional raiseValidateError( "Sorry, parfor optimization mode '" + optStr + "' is disabled for external usage.", false); } } } else { //set all defaults params = new HashMap<String, String>(); params.putAll(_paramDefaults); predicate.setParForParams(params); } //start time measurement for normalization and dependency analysis Timing time = new Timing(true); // LOOP DEPENDENCY ANALYSIS (test for dependency existence) // no false negative guaranteed, but possibly false positives /* Basic intuition: WRITES to NON-local variables are only permitted iff * - no data dep (no read other than own iteration w i < r j) * - no anti dep (no read other than own iteration w i > r j) * - no output dep (no write other than own iteration) * * ALGORITHM: * 1) Determine candidates C (writes to non-local variables) * 2) Prune all c from C where no dependencies --> C' * 3) Raise an exception/warning if C' not the empty set * * RESTRICTIONS: * - array subscripts of non-local variables must be linear functions of the form * a0+ a1*i + ... + a2*j, where i and j are for or parfor indexes. * - for and parfor increments must be integer values * - only static (integer lower, upper bounds) range indexing * - only input variables considered as potential candidates for checking * * (TODO: in order to remove the last restriction, dependencies must be checked again after * live variable analysis against LIVEOUT) * * NOTE: validity is only checked during compilation, i.e., for dynamic from, to, incr MIN MAX values assumed. */ LOG.trace("PARFOR: running loop dependency analysis ..."); //### Step 1 ###: determine candidate set C HashSet<Candidate> C = new HashSet<Candidate>(); HashSet<Candidate> C2 = new HashSet<Candidate>(); Integer sCount = 0; //object for call by ref rDetermineCandidates(pfs.getBody(), C, sCount); boolean check = (Integer.parseInt(params.get(CHECK)) == 1); if (check) { //### Step 2 ###: prune c without dependencies _bounds = new Bounds(); for (FunctionStatementBlock fsb : dmlProg.getFunctionStatementBlocks()) rDetermineBounds(fsb, false); //writes to _bounds rDetermineBounds(dmlProg.getStatementBlocks(), false); //writes to _bounds for (Candidate c : C) { DataType cdt = _vsParent.getVariables().get(c._var).getDataType(); //might be different in DataIdentifier //assume no dependency sCount = 0; boolean[] dep = new boolean[] { false, false, false }; //output, data, anti rCheckCandidates(c, cdt, pfs.getBody(), sCount, dep); if (LOG.isTraceEnabled()) { if (dep[0]) LOG.trace("PARFOR: output dependency detected for var '" + c._var + "'."); if (dep[1]) LOG.trace("PARFOR: data dependency detected for var '" + c._var + "'."); if (dep[2]) LOG.trace("PARFOR: anti dependency detected for var '" + c._var + "'."); } if (dep[0] || dep[1] || dep[2]) { C2.add(c); if (ABORT_ON_FIRST_DEPENDENCY) break; } } //### Step 3 ###: raise an exception / warning if (C2.size() > 0) { LOG.trace("PARFOR: loop dependencies detected."); StringBuilder depVars = new StringBuilder(); for (Candidate c : C2) { if (depVars.length() > 0) depVars.append(", "); depVars.append(c._var); } //always unconditional (to ensure we always raise dependency issues) raiseValidateError( "PARFOR loop dependency analysis: " + "inter-iteration (loop-carried) dependencies detected for variable(s): " + depVars.toString() + ". \n " + "Please, ensure independence of iterations.", false); } else { LOG.trace("PARFOR: no loop dependencies detected."); } } else { LOG.debug("INFO: PARFOR(" + _ID + "): loop dependency analysis skipped."); } //if successful, prepare result variables (all distinct vars in all candidates) //a) add own candidates for (Candidate var : C) if (check || var._dat.getDataType() != DataType.SCALAR) addToResultVariablesNoDup(var._var); //b) get and add child result vars (if required) ArrayList<String> tmp = new ArrayList<String>(); rConsolidateResultVars(pfs.getBody(), tmp); for (String var : tmp) if (_vsParent.containsVariable(var)) addToResultVariablesNoDup(var); if (LDEBUG) for (String rvar : _resultVars) LOG.debug("INFO: PARFOR final result variable: " + rvar); //cleanup function cache in order to prevent side effects between parfor statements if (USE_FN_CACHE) _fncache.clear(); LOG.debug("INFO: PARFOR(" + _ID + "): validate successful (no dependencies) in " + time.stop() + "ms."); return vs; }
From source file:net.countercraft.movecraft.async.translation.TranslationTask.java
@Override public void excecute() { MovecraftLocation[] blocksList = data.getBlockList(); final int[] fallThroughBlocks = new int[] { 0, 8, 9, 10, 11, 31, 37, 38, 39, 40, 50, 51, 55, 59, 63, 65, 68, 69, 70, 72, 75, 76, 77, 78, 83, 85, 93, 94, 111, 141, 142, 143, 171 }; // blockedByWater=false means an ocean-going vessel boolean waterCraft = !getCraft().getType().blockedByWater(); boolean hoverCraft = getCraft().getType().getCanHover(); boolean airCraft = getCraft().getType().blockedByWater(); int hoverLimit = getCraft().getType().getHoverLimit(); Player craftPilot = CraftManager.getInstance().getPlayerFromCraft(getCraft()); int[][][] hb = getCraft().getHitBox(); if (hb == null) return;// w w w . jav a2s . com // start by finding the crafts borders int minY = 65535; int maxY = -65535; for (int[][] i1 : hb) { for (int[] i2 : i1) { if (i2 != null) { if (i2[0] < minY) { minY = i2[0]; } if (i2[1] > maxY) { maxY = i2[1]; } } } } int maxX = getCraft().getMinX() + hb.length; int maxZ = getCraft().getMinZ() + hb[0].length; // safe because if the first x array doesn't have a z array, then it wouldn't be the first x array int minX = getCraft().getMinX(); int minZ = getCraft().getMinZ(); // treat sinking crafts specially if (getCraft().getSinking()) { waterCraft = true; hoverCraft = false; } if (getCraft().getDisabled() && (!getCraft().getSinking())) { fail(String.format(I18nSupport.getInternationalisedString("Craft is disabled!"))); } // check the maxheightaboveground limitation, move 1 down if that limit is exceeded if (getCraft().getType().getMaxHeightAboveGround() > 0 && data.getDy() >= 0) { int x = getCraft().getMaxX() + getCraft().getMinX(); x = x >> 1; int y = getCraft().getMaxY(); int z = getCraft().getMaxZ() + getCraft().getMinZ(); z = z >> 1; int cy = getCraft().getMinY(); boolean done = false; while (!done) { cy = cy - 1; if (getCraft().getW().getBlockTypeIdAt(x, cy, z) != 0) done = true; if (cy <= 1) done = true; } if (y - cy > getCraft().getType().getMaxHeightAboveGround()) { data.setDy(-1); } } // Find the waterline from the surrounding terrain or from the static level in the craft type int waterLine = 0; if (waterCraft) { if (getCraft().getType().getStaticWaterLevel() != 0) { if (waterLine <= maxY + 1) { waterLine = getCraft().getType().getStaticWaterLevel(); } } else { // figure out the water level by examining blocks next to the outer boundaries of the craft for (int posY = maxY + 1; (posY >= minY - 1) && (waterLine == 0); posY--) { int numWater = 0; int numAir = 0; int posX; int posZ; posZ = minZ - 1; for (posX = minX - 1; (posX <= maxX + 1) && (waterLine == 0); posX++) { int typeID = getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId(); if (typeID == 9) numWater++; if (typeID == 0) numAir++; } posZ = maxZ + 1; for (posX = minX - 1; (posX <= maxX + 1) && (waterLine == 0); posX++) { int typeID = getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId(); if (typeID == 9) numWater++; if (typeID == 0) numAir++; } posX = minX - 1; for (posZ = minZ; (posZ <= maxZ) && (waterLine == 0); posZ++) { int typeID = getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId(); if (typeID == 9) numWater++; if (typeID == 0) numAir++; } posX = maxX + 1; for (posZ = minZ; (posZ <= maxZ) && (waterLine == 0); posZ++) { int typeID = getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId(); if (typeID == 9) numWater++; if (typeID == 0) numAir++; } if (numWater > numAir) { waterLine = posY; } } } // now add all the air blocks found within the craft's hitbox immediately above the waterline and below to the craft blocks so they will be translated HashSet<MovecraftLocation> newHSBlockList = new HashSet<MovecraftLocation>(Arrays.asList(blocksList)); int posY = waterLine + 1; for (int posX = minX; posX < maxX; posX++) { for (int posZ = minZ; posZ < maxZ; posZ++) { if (hb[posX - minX] != null) { if (hb[posX - minX][posZ - minZ] != null) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 0 && posY > hb[posX - minX][posZ - minZ][0] && posY < hb[posX - minX][posZ - minZ][1]) { MovecraftLocation l = new MovecraftLocation(posX, posY, posZ); newHSBlockList.add(l); } } } } } // dont check the hitbox for the underwater portion. Otherwise open-hulled ships would flood. for (posY = waterLine; posY >= minY; posY--) { for (int posX = minX; posX < maxX; posX++) { for (int posZ = minZ; posZ < maxZ; posZ++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 0) { MovecraftLocation l = new MovecraftLocation(posX, posY, posZ); newHSBlockList.add(l); } } } } blocksList = newHSBlockList.toArray(new MovecraftLocation[newHSBlockList.size()]); } // check for fuel, burn some from a furnace if needed. Blocks of coal are supported, in addition to coal and charcoal double fuelBurnRate = getCraft().getType().getFuelBurnRate(); // going down doesn't require fuel if (data.getDy() == -1 && data.getDx() == 0 && data.getDz() == 0) fuelBurnRate = 0.0; if (fuelBurnRate != 0.0 && getCraft().getSinking() == false) { if (getCraft().getBurningFuel() < fuelBurnRate) { Block fuelHolder = null; for (MovecraftLocation bTest : blocksList) { Block b = getCraft().getW().getBlockAt(bTest.getX(), bTest.getY(), bTest.getZ()); if (b.getTypeId() == 61) { InventoryHolder inventoryHolder = (InventoryHolder) b.getState(); if (inventoryHolder.getInventory().contains(263) || inventoryHolder.getInventory().contains(173)) { fuelHolder = b; } } } if (fuelHolder == null) { fail(String.format( I18nSupport.getInternationalisedString("Translation - Failed Craft out of fuel"))); } else { InventoryHolder inventoryHolder = (InventoryHolder) fuelHolder.getState(); if (inventoryHolder.getInventory().contains(263)) { ItemStack iStack = inventoryHolder.getInventory() .getItem(inventoryHolder.getInventory().first(263)); int amount = iStack.getAmount(); if (amount == 1) { inventoryHolder.getInventory().remove(iStack); } else { iStack.setAmount(amount - 1); } getCraft().setBurningFuel(getCraft().getBurningFuel() + 7.0); } else { ItemStack iStack = inventoryHolder.getInventory() .getItem(inventoryHolder.getInventory().first(173)); int amount = iStack.getAmount(); if (amount == 1) { inventoryHolder.getInventory().remove(iStack); } else { iStack.setAmount(amount - 1); } getCraft().setBurningFuel(getCraft().getBurningFuel() + 79.0); } } } else { getCraft().setBurningFuel(getCraft().getBurningFuel() - fuelBurnRate); } } List<MovecraftLocation> tempBlockList = new ArrayList<MovecraftLocation>(); HashSet<MovecraftLocation> existingBlockSet = new HashSet<MovecraftLocation>(Arrays.asList(blocksList)); HashSet<EntityUpdateCommand> entityUpdateSet = new HashSet<EntityUpdateCommand>(); Set<MapUpdateCommand> updateSet = new HashSet<MapUpdateCommand>(); data.setCollisionExplosion(false); Set<MapUpdateCommand> explosionSet = new HashSet<MapUpdateCommand>(); List<Material> harvestBlocks = getCraft().getType().getHarvestBlocks(); List<MovecraftLocation> harvestedBlocks = new ArrayList<MovecraftLocation>(); List<MovecraftLocation> destroyedBlocks = new ArrayList<MovecraftLocation>(); List<Material> harvesterBladeBlocks = getCraft().getType().getHarvesterBladeBlocks(); int hoverOver = data.getDy(); int craftMinY = 0; int craftMaxY = 0; boolean clearNewData = false; boolean hoverUseGravity = getCraft().getType().getUseGravity(); boolean checkHover = (data.getDx() != 0 || data.getDz() != 0);// we want to check only horizontal moves boolean canHoverOverWater = getCraft().getType().getCanHoverOverWater(); boolean townyEnabled = Movecraft.getInstance().getTownyPlugin() != null; boolean explosionBlockedByTowny = false; boolean moveBlockedByTowny = false; boolean validateTownyExplosion = false; String townName = ""; Set<TownBlock> townBlockSet = new HashSet<TownBlock>(); TownyWorld townyWorld = null; TownyWorldHeightLimits townyWorldHeightLimits = null; if (townyEnabled && Settings.TownyBlockMoveOnSwitchPerm) { townyWorld = TownyUtils.getTownyWorld(getCraft().getW()); if (townyWorld != null) { townyEnabled = townyWorld.isUsingTowny(); if (townyEnabled) { townyWorldHeightLimits = TownyUtils.getWorldLimits(getCraft().getW()); if (getCraft().getType().getCollisionExplosion() != 0.0F) { validateTownyExplosion = true; } } } } else { townyEnabled = false; } for (int i = 0; i < blocksList.length; i++) { MovecraftLocation oldLoc = blocksList[i]; MovecraftLocation newLoc = oldLoc.translate(data.getDx(), data.getDy(), data.getDz()); if (newLoc.getY() > data.getMaxHeight() && newLoc.getY() > oldLoc.getY()) { fail(String.format( I18nSupport.getInternationalisedString("Translation - Failed Craft hit height limit"))); break; } else if (newLoc.getY() < data.getMinHeight() && newLoc.getY() < oldLoc.getY() && getCraft().getSinking() == false) { fail(String.format(I18nSupport .getInternationalisedString("Translation - Failed Craft hit minimum height limit"))); break; } boolean blockObstructed = false; boolean harvestBlock = false; boolean bladeOK = true; Material testMaterial; Location plugLoc = new Location(getCraft().getW(), newLoc.getX(), newLoc.getY(), newLoc.getZ()); if (craftPilot != null) { // See if they are permitted to build in the area, if WorldGuard integration is turned on if (Movecraft.getInstance().getWorldGuardPlugin() != null && Settings.WorldGuardBlockMoveOnBuildPerm) { if (Movecraft.getInstance().getWorldGuardPlugin().canBuild(craftPilot, plugLoc) == false) { fail(String.format(I18nSupport.getInternationalisedString( "Translation - Failed Player is not permitted to build in this WorldGuard region") + " @ %d,%d,%d", oldLoc.getX(), oldLoc.getY(), oldLoc.getZ())); break; } } } Player p; if (craftPilot == null) { p = getCraft().getNotificationPlayer(); } else { p = craftPilot; } if (p != null) { if (Movecraft.getInstance().getWorldGuardPlugin() != null && Movecraft.getInstance().getWGCustomFlagsPlugin() != null && Settings.WGCustomFlagsUsePilotFlag) { LocalPlayer lp = Movecraft.getInstance().getWorldGuardPlugin().wrapPlayer(p); WGCustomFlagsUtils WGCFU = new WGCustomFlagsUtils(); if (!WGCFU.validateFlag(plugLoc, Movecraft.FLAG_MOVE, lp)) { fail(String .format(I18nSupport.getInternationalisedString("WGCustomFlags - Translation Failed") + " @ %d,%d,%d", oldLoc.getX(), oldLoc.getY(), oldLoc.getZ())); break; } } if (townyEnabled) { TownBlock townBlock = TownyUtils.getTownBlock(plugLoc); if (townBlock != null && !townBlockSet.contains(townBlock)) { if (validateTownyExplosion) { if (!explosionBlockedByTowny) { if (!TownyUtils.validateExplosion(townBlock)) { explosionBlockedByTowny = true; } } } if (TownyUtils.validateCraftMoveEvent(p, plugLoc, townyWorld)) { townBlockSet.add(townBlock); } else { int y = plugLoc.getBlockY(); boolean oChange = false; if (craftMinY > y) { craftMinY = y; oChange = true; } if (craftMaxY < y) { craftMaxY = y; oChange = true; } if (oChange) { boolean failed = false; Town town = TownyUtils.getTown(townBlock); if (town != null) { Location locSpawn = TownyUtils.getTownSpawn(townBlock); if (locSpawn != null) { if (!townyWorldHeightLimits.validate(y, locSpawn.getBlockY())) { failed = true; } } else { failed = true; } if (failed) { if (Movecraft.getInstance().getWorldGuardPlugin() != null && Movecraft.getInstance().getWGCustomFlagsPlugin() != null && Settings.WGCustomFlagsUsePilotFlag) { LocalPlayer lp = Movecraft.getInstance().getWorldGuardPlugin() .wrapPlayer(p); ApplicableRegionSet regions = Movecraft.getInstance() .getWorldGuardPlugin().getRegionManager(plugLoc.getWorld()) .getApplicableRegions(plugLoc); if (regions.size() != 0) { WGCustomFlagsUtils WGCFU = new WGCustomFlagsUtils(); if (WGCFU.validateFlag(plugLoc, Movecraft.FLAG_MOVE, lp)) { failed = false; } } } } if (failed) { townName = town.getName(); moveBlockedByTowny = true; } } } } } } } //check for chests around testMaterial = getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getType(); if (testMaterial.equals(Material.CHEST) || testMaterial.equals(Material.TRAPPED_CHEST)) { if (!checkChests(testMaterial, newLoc, existingBlockSet)) { //prevent chests collision fail(String.format( I18nSupport.getInternationalisedString("Translation - Failed Craft is obstructed") + " @ %d,%d,%d,%s", newLoc.getX(), newLoc.getY(), newLoc.getZ(), getCraft().getW() .getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()).getType().toString())); break; } } if (getCraft().getSinking()) { int testID = getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()).getTypeId(); blockObstructed = !(Arrays.binarySearch(fallThroughBlocks, testID) >= 0) && !existingBlockSet.contains(newLoc); } else if (!waterCraft) { // New block is not air or a piston head and is not part of the existing ship testMaterial = getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()).getType(); blockObstructed = (!testMaterial.equals(Material.AIR)) && !existingBlockSet.contains(newLoc); } else { // New block is not air or water or a piston head and is not part of the existing ship testMaterial = getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()).getType(); blockObstructed = (!testMaterial.equals(Material.AIR) && !testMaterial.equals(Material.STATIONARY_WATER) && !testMaterial.equals(Material.WATER)) && !existingBlockSet.contains(newLoc); } boolean ignoreBlock = false; // air never obstructs anything (changed 4/18/2017 to prevent drilling machines) if (getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getType() .equals(Material.AIR) && blockObstructed) { ignoreBlock = true; // blockObstructed=false; } testMaterial = getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()).getType(); if (blockObstructed) { if (hoverCraft || harvestBlocks.size() > 0) { // New block is not harvested block if (harvestBlocks.contains(testMaterial) && !existingBlockSet.contains(newLoc)) { Material tmpType = getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()) .getType(); if (harvesterBladeBlocks.size() > 0) { if (!harvesterBladeBlocks.contains(tmpType)) { bladeOK = false; } } if (bladeOK) { blockObstructed = false; harvestBlock = true; tryPutToDestroyBox(testMaterial, newLoc, harvestedBlocks, destroyedBlocks); harvestedBlocks.add(newLoc); } } } } if (blockObstructed || moveBlockedByTowny) { if (hoverCraft && checkHover) { //we check one up ever, if it is hovercraft and one down if it's using gravity if (hoverOver == 0 && newLoc.getY() + 1 <= data.getMaxHeight()) { //first was checked actual level, now check if we can go up hoverOver = 1; data.setDy(1); clearNewData = true; } else if (hoverOver >= 1) { //check other options to go up if (hoverOver < hoverLimit + 1 && newLoc.getY() + 1 <= data.getMaxHeight()) { data.setDy(hoverOver + 1); hoverOver += 1; clearNewData = true; } else { if (hoverUseGravity && newLoc.getY() - hoverOver - 1 >= data.getMinHeight()) { //we are on the maximum of top //if we can't go up so we test bottom side data.setDy(-1); hoverOver = -1; } else { // no way - back to original dY, turn off hovercraft for this move // and get original data again for all explosions data.setDy(0); hoverOver = 0; hoverCraft = false; hoverUseGravity = false; } clearNewData = true; } } else if (hoverOver <= -1) { //we cant go down for 1 block, check more to hoverLimit if (hoverOver > -hoverLimit - 1 && newLoc.getY() - 1 >= data.getMinHeight()) { data.setDy(hoverOver - 1); hoverOver -= 1; clearNewData = true; } else { // no way - back to original dY, turn off hovercraft for this move // and get original data again for all explosions data.setDy(0); hoverOver = 0; hoverUseGravity = false; clearNewData = true; hoverCraft = false; } } else { // no way - reached MaxHeight during looking new way upstairss if (hoverUseGravity && newLoc.getY() - 1 >= data.getMinHeight()) { //we are on the maximum of top //if we can't go up so we test bottom side data.setDy(-1); hoverOver = -1; } else { // - back to original dY, turn off hovercraft for this move // and get original data again for all explosions data.setDy(0); hoverOver = 0; hoverUseGravity = false; hoverCraft = false; } clearNewData = true; } // End hovercraft stuff } else { // handle sinking ship collisions if (getCraft().getSinking()) { if (getCraft().getType().getExplodeOnCrash() != 0.0F && !explosionBlockedByTowny) { int explosionKey = (int) (0 - (getCraft().getType().getExplodeOnCrash() * 100)); if (System.currentTimeMillis() - getCraft().getOrigPilotTime() > 1000) if (!getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()) .getType().equals(Material.AIR)) { explosionSet .add(new MapUpdateCommand(oldLoc, explosionKey, (byte) 0, getCraft())); data.setCollisionExplosion(true); } } else { // use the explosion code to clean up the craft, but not with enough force to do anything int explosionKey = 0 - 1; if (!getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getType() .equals(Material.AIR)) { explosionSet.add(new MapUpdateCommand(oldLoc, explosionKey, (byte) 0, getCraft())); data.setCollisionExplosion(true); } } } else { // Explode if the craft is set to have a CollisionExplosion. Also keep moving for spectacular ramming collisions if (getCraft().getType().getCollisionExplosion() == 0.0F) { if (moveBlockedByTowny) { fail(String.format( I18nSupport.getInternationalisedString("Towny - Translation Failed") + " %s @ %d,%d,%d", townName, oldLoc.getX(), oldLoc.getY(), oldLoc.getZ())); } else { fail(String.format( I18nSupport.getInternationalisedString( "Translation - Failed Craft is obstructed") + " @ %d,%d,%d,%s", oldLoc.getX(), oldLoc.getY(), oldLoc.getZ(), getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()) .getType().toString())); if (getCraft().getNotificationPlayer() != null) { Location location = getCraft().getNotificationPlayer().getLocation(); } } break; } else if (explosionBlockedByTowny) { int explosionKey = 0 - 1; if (!getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getType() .equals(Material.AIR)) { explosionSet.add(new MapUpdateCommand(oldLoc, explosionKey, (byte) 0, getCraft())); data.setCollisionExplosion(true); } } else if (System.currentTimeMillis() - getCraft().getOrigPilotTime() > 1000) { int explosionKey; float explosionForce = getCraft().getType().getCollisionExplosion(); if (getCraft().getType().getFocusedExplosion() == true) { explosionForce = explosionForce * getCraft().getBlockList().length; } if (oldLoc.getY() < waterLine) { // underwater explosions require more force to do anything explosionForce += 25; } explosionKey = (int) (0 - (explosionForce * 100)); if (!getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getType() .equals(Material.AIR)) { explosionSet.add(new MapUpdateCommand(oldLoc, explosionKey, (byte) 0, getCraft())); data.setCollisionExplosion(true); } if (getCraft().getType().getFocusedExplosion() == true) { // don't handle any further collisions if it is set to focusedexplosion break; } } } } } else { //block not obstructed int oldID = getCraft().getW().getBlockTypeIdAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()); byte oldData = getCraft().getW().getBlockAt(oldLoc.getX(), oldLoc.getY(), oldLoc.getZ()).getData(); int currentID = getCraft().getW().getBlockTypeIdAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()); byte currentData = getCraft().getW().getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()) .getData(); // remove water from sinking crafts if (getCraft().getSinking()) { if ((oldID == 8 || oldID == 9) && oldLoc.getY() > waterLine) oldID = 0; } if (!ignoreBlock) { updateSet.add(new MapUpdateCommand(oldLoc, currentID, currentData, newLoc, oldID, oldData, getCraft())); tempBlockList.add(newLoc); } if (i == blocksList.length - 1) { if ((hoverCraft && hoverUseGravity) || (hoverUseGravity && newLoc.getY() > data.getMaxHeight() && hoverOver == 0)) { //hovecraft using gravity or something else using gravity and flying over its limit int iFreeSpace = 0; //canHoverOverWater adds 1 to dY for better check water under craft // best way should be expand selected region to each first blocks under craft if (hoverOver == 0) { //we go directly forward so we check if we can go down for (int ii = -1; ii > -hoverLimit - 2 - (canHoverOverWater ? 0 : 1); ii--) { if (!isFreeSpace(data.getDx(), hoverOver + ii, data.getDz(), blocksList, existingBlockSet, waterCraft, hoverCraft, harvestBlocks, canHoverOverWater, checkHover)) { break; } iFreeSpace++; } if (data.failed()) { break; } if (iFreeSpace > hoverLimit - (canHoverOverWater ? 0 : 1)) { data.setDy(-1); hoverOver = -1; clearNewData = true; } } else if (hoverOver == 1 && !airCraft) { //prevent fly heigher than hoverLimit for (int ii = -1; ii > -hoverLimit - 2; ii--) { if (!isFreeSpace(data.getDx(), hoverOver + ii, data.getDz(), blocksList, existingBlockSet, waterCraft, hoverCraft, harvestBlocks, canHoverOverWater, checkHover)) { break; } iFreeSpace++; } if (data.failed()) { break; } if (iFreeSpace > hoverLimit) { if (bladeOK) { fail(String.format(I18nSupport.getInternationalisedString( "Translation - Failed Craft hit height limit"))); } else { fail(String.format( I18nSupport.getInternationalisedString( "Translation - Failed Craft is obstructed") + " @ %d,%d,%d,%s", oldLoc.getX(), oldLoc.getY(), oldLoc.getZ(), getCraft().getW() .getBlockAt(newLoc.getX(), newLoc.getY(), newLoc.getZ()) .getType().toString())); } break; } } else if (hoverOver > 1) { //prevent jump thru block for (int ii = 1; ii < hoverOver - 1; ii++) { if (!isFreeSpace(0, ii, 0, blocksList, existingBlockSet, waterCraft, hoverCraft, harvestBlocks, canHoverOverWater, checkHover)) { break; } iFreeSpace++; } if (data.failed()) { break; } if (iFreeSpace + 2 < hoverOver) { data.setDy(-1); hoverOver = -1; clearNewData = true; } } else if (hoverOver < -1) { //prevent jump thru block for (int ii = -1; ii > hoverOver + 1; ii--) { if (!isFreeSpace(0, ii, 0, blocksList, existingBlockSet, waterCraft, hoverCraft, harvestBlocks, canHoverOverWater, checkHover)) { break; } iFreeSpace++; } if (data.failed()) { break; } if (iFreeSpace + 2 < -hoverOver) { data.setDy(0); hoverOver = 0; hoverCraft = false; clearNewData = true; } } if (!canHoverOverWater) { if (hoverOver >= 1) { //others hoverOver values we have checked jet for (int ii = hoverOver - 1; ii > hoverOver - hoverLimit - 2; ii--) { if (!isFreeSpace(0, ii, 0, blocksList, existingBlockSet, waterCraft, hoverCraft, harvestBlocks, canHoverOverWater, checkHover)) { break; } iFreeSpace++; } if (data.failed()) { break; } } } } } } //END OF: if (blockObstructed) if (clearNewData) { i = -1; tempBlockList.clear(); updateSet.clear(); harvestedBlocks.clear(); data.setCollisionExplosion(false); explosionSet.clear(); clearNewData = false; townBlockSet.clear(); craftMinY = 0; craftMaxY = 0; } } //END OF: for ( int i = 0; i < blocksList.length; i++ ) { // now move the scheduled block changes along with the ship HashMap<MapUpdateCommand, Long> newScheduledBlockChanges = new HashMap<MapUpdateCommand, Long>(); HashMap<MapUpdateCommand, Long> oldScheduledBlockChanges = getCraft().getScheduledBlockChanges(); if (oldScheduledBlockChanges != null) { for (MapUpdateCommand muc : oldScheduledBlockChanges.keySet()) { MovecraftLocation newLoc = muc.getNewBlockLocation().translate(data.getDx(), data.getDy(), data.getDz()); // Long newTime=oldScheduledBlockChanges.get(muc); Long newTime = System.currentTimeMillis() + 5000; MapUpdateCommand newMuc = new MapUpdateCommand(newLoc, muc.getTypeID(), muc.getDataID(), getCraft()); newScheduledBlockChanges.put(newMuc, newTime); } data.setScheduledBlockChanges(newScheduledBlockChanges); } if (data.collisionExplosion()) { // mark the craft to check for sinking, remove the exploding blocks from the blocklist, and submit the explosions for map update for (MapUpdateCommand m : explosionSet) { if (existingBlockSet.contains(m.getNewBlockLocation())) { existingBlockSet.remove(m.getNewBlockLocation()); if (Settings.FadeWrecksAfter > 0) { int typeID = getCraft().getW().getBlockAt(m.getNewBlockLocation().getX(), m.getNewBlockLocation().getY(), m.getNewBlockLocation().getZ()).getTypeId(); if (typeID != 0 && typeID != 9) { Movecraft.getInstance().blockFadeTimeMap.put(m.getNewBlockLocation(), System.currentTimeMillis()); Movecraft.getInstance().blockFadeTypeMap.put(m.getNewBlockLocation(), typeID); if (m.getNewBlockLocation().getY() <= waterLine) { Movecraft.getInstance().blockFadeWaterMap.put(m.getNewBlockLocation(), true); } else { Movecraft.getInstance().blockFadeWaterMap.put(m.getNewBlockLocation(), false); } Movecraft.getInstance().blockFadeWorldMap.put(m.getNewBlockLocation(), getCraft().getW()); } } } // if the craft is sinking, remove all solid blocks above the one that hit the ground from the craft for smoothing sinking if (getCraft().getSinking() == true && (getCraft().getType().getExplodeOnCrash() == 0.0 || explosionBlockedByTowny)) { int posy = m.getNewBlockLocation().getY() + 1; int testID = getCraft().getW() .getBlockAt(m.getNewBlockLocation().getX(), posy, m.getNewBlockLocation().getZ()) .getTypeId(); while (posy <= maxY && !(Arrays.binarySearch(fallThroughBlocks, testID) >= 0)) { MovecraftLocation testLoc = new MovecraftLocation(m.getNewBlockLocation().getX(), posy, m.getNewBlockLocation().getZ()); if (existingBlockSet.contains(testLoc)) { existingBlockSet.remove(testLoc); if (Settings.FadeWrecksAfter > 0) { int typeID = getCraft().getW() .getBlockAt(testLoc.getX(), testLoc.getY(), testLoc.getZ()).getTypeId(); if (typeID != 0 && typeID != 9) { Movecraft.getInstance().blockFadeTimeMap.put(testLoc, System.currentTimeMillis()); Movecraft.getInstance().blockFadeTypeMap.put(testLoc, typeID); if (testLoc.getY() <= waterLine) { Movecraft.getInstance().blockFadeWaterMap.put(testLoc, true); } else { Movecraft.getInstance().blockFadeWaterMap.put(testLoc, false); } Movecraft.getInstance().blockFadeWorldMap.put(testLoc, getCraft().getW()); } } } posy = posy + 1; testID = getCraft().getW() .getBlockAt(m.getNewBlockLocation().getX(), posy, m.getNewBlockLocation().getZ()) .getTypeId(); } } } MovecraftLocation[] newBlockList = (MovecraftLocation[]) existingBlockSet .toArray(new MovecraftLocation[0]); data.setBlockList(newBlockList); data.setUpdates(explosionSet.toArray(new MapUpdateCommand[1])); fail(String.format(I18nSupport.getInternationalisedString("Translation - Failed Craft is obstructed"))); if (getCraft().getSinking() == false) { // FROG changed from ==true, think that was a typo if (getCraft().getType().getSinkPercent() != 0.0) { getCraft().setLastBlockCheck(0); } getCraft().setLastCruisUpdate(System.currentTimeMillis() - 30000); } } if (!data.failed()) { MovecraftLocation[] newBlockList = (MovecraftLocation[]) tempBlockList .toArray(new MovecraftLocation[0]); data.setBlockList(newBlockList); //prevents torpedo and rocket pilots :) if (getCraft().getType().getMoveEntities() && getCraft().getSinking() == false) { // Move entities within the craft List<Entity> eList = null; int numTries = 0; while ((eList == null) && (numTries < 100)) { try { eList = getCraft().getW().getEntities(); } catch (java.util.ConcurrentModificationException e) { numTries++; } } Iterator<Entity> i = eList.iterator(); while (i.hasNext()) { Entity pTest = i.next(); // if ( MathUtils.playerIsWithinBoundingPolygon( getCraft().getHitBox(), getCraft().getMinX(), getCraft().getMinZ(), MathUtils.bukkit2MovecraftLoc( pTest.getLocation() ) ) ) { if (MathUtils.locIsNearCraftFast(getCraft(), MathUtils.bukkit2MovecraftLoc(pTest.getLocation()))) { if (pTest.getType() == org.bukkit.entity.EntityType.PLAYER) { Player player = (Player) pTest; getCraft().getMovedPlayers().put(player, System.currentTimeMillis()); Location tempLoc = pTest.getLocation(); // Direct control no longer locks the player in place // if(getCraft().getPilotLocked()==true && pTest==CraftManager.getInstance().getPlayerFromCraft(getCraft())) { // tempLoc.setX(getCraft().getPilotLockedX()); // tempLoc.setY(getCraft().getPilotLockedY()); // tempLoc.setZ(getCraft().getPilotLockedZ()); // } tempLoc = tempLoc.add(data.getDx(), data.getDy(), data.getDz()); Location newPLoc = new Location(getCraft().getW(), tempLoc.getX(), tempLoc.getY(), tempLoc.getZ()); newPLoc.setPitch(pTest.getLocation().getPitch()); newPLoc.setYaw(pTest.getLocation().getYaw()); EntityUpdateCommand eUp = new EntityUpdateCommand(pTest.getLocation().clone(), newPLoc, pTest); entityUpdateSet.add(eUp); // if(getCraft().getPilotLocked()==true && pTest==CraftManager.getInstance().getPlayerFromCraft(getCraft())) { // getCraft().setPilotLockedX(tempLoc.getX()); // getCraft().setPilotLockedY(tempLoc.getY()); // getCraft().setPilotLockedZ(tempLoc.getZ()); // } } if (pTest.getType() == org.bukkit.entity.EntityType.PRIMED_TNT) { Entity ent = (Entity) pTest; Location tempLoc = pTest.getLocation(); tempLoc = tempLoc.add(data.getDx(), data.getDy(), data.getDz()); EntityUpdateCommand eUp = new EntityUpdateCommand(pTest.getLocation().clone(), tempLoc, pTest); entityUpdateSet.add(eUp); } } } } else { //add releaseTask without playermove to manager if (getCraft().getType().getCruiseOnPilot() == false && getCraft().getSinking() == false) // not necessary to release cruiseonpilot crafts, because they will already be released CraftManager.getInstance().addReleaseTask(getCraft()); } // remove water near sinking crafts if (getCraft().getSinking()) { int posX; int posY = maxY; int posZ; if (posY > waterLine) { for (posX = minX - 1; posX <= maxX + 1; posX++) { for (posZ = minZ - 1; posZ <= maxZ + 1; posZ++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 9 || getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 8) { MovecraftLocation loc = new MovecraftLocation(posX, posY, posZ); updateSet.add(new MapUpdateCommand(loc, 0, (byte) 0, getCraft())); } } } } for (posY = maxY + 1; (posY >= minY - 1) && (posY > waterLine); posY--) { posZ = minZ - 1; for (posX = minX - 1; posX <= maxX + 1; posX++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 9 || getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 8) { MovecraftLocation loc = new MovecraftLocation(posX, posY, posZ); updateSet.add(new MapUpdateCommand(loc, 0, (byte) 0, getCraft())); } } posZ = maxZ + 1; for (posX = minX - 1; posX <= maxX + 1; posX++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 9 || getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 8) { MovecraftLocation loc = new MovecraftLocation(posX, posY, posZ); updateSet.add(new MapUpdateCommand(loc, 0, (byte) 0, getCraft())); } } posX = minX - 1; for (posZ = minZ - 1; posZ <= maxZ + 1; posZ++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 9 || getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 8) { MovecraftLocation loc = new MovecraftLocation(posX, posY, posZ); updateSet.add(new MapUpdateCommand(loc, 0, (byte) 0, getCraft())); } } posX = maxX + 1; for (posZ = minZ - 1; posZ <= maxZ + 1; posZ++) { if (getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 9 || getCraft().getW().getBlockAt(posX, posY, posZ).getTypeId() == 8) { MovecraftLocation loc = new MovecraftLocation(posX, posY, posZ); updateSet.add(new MapUpdateCommand(loc, 0, (byte) 0, getCraft())); } } } } //Set blocks that are no longer craft to air // /********************************************************************************************************** // * I had problems with ListUtils (I tryied commons-collections 3.2.1. and 4.0 without success) // * so I replaced Lists with Sets // * // * Caused by: java.lang.NoClassDefFoundError: org/apache/commons/collections/ListUtils // * at net.countercraft.movecraft.async.translation.TranslationTask.excecute(TranslationTask.java:716) // * mwkaicz 24-02-2015 // ***********************************************************************************************************/ // Set<MovecraftLocation> setA = new HashSet(Arrays.asList(blocksList)); // Set<MovecraftLocation> setB = new HashSet(Arrays.asList(newBlockList)); // setA.removeAll(setB); // MovecraftLocation[] arrA = new MovecraftLocation[0]; // arrA = setA.toArray(arrA); // List<MovecraftLocation> airLocation = Arrays.asList(arrA); List<MovecraftLocation> airLocation = ListUtils.subtract(Arrays.asList(blocksList), Arrays.asList(newBlockList)); for (MovecraftLocation l1 : airLocation) { // for watercraft, fill blocks below the waterline with water if (!waterCraft) { if (getCraft().getSinking()) { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft(), getCraft().getType().getSmokeOnSink())); } else { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft())); } } else { if (l1.getY() <= waterLine) { // if there is air below the ship at the current position, don't fill in with water MovecraftLocation testAir = new MovecraftLocation(l1.getX(), l1.getY() - 1, l1.getZ()); while (existingBlockSet.contains(testAir)) { testAir.setY(testAir.getY() - 1); } if (getCraft().getW().getBlockAt(testAir.getX(), testAir.getY(), testAir.getZ()) .getTypeId() == 0) { if (getCraft().getSinking()) { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft(), getCraft().getType().getSmokeOnSink())); } else { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft())); } } else { updateSet.add(new MapUpdateCommand(l1, 9, (byte) 0, getCraft())); } } else { if (getCraft().getSinking()) { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft(), getCraft().getType().getSmokeOnSink())); } else { updateSet.add(new MapUpdateCommand(l1, 0, (byte) 0, getCraft())); } } } } //add destroyed parts of growed for (MovecraftLocation destroyedLocation : destroyedBlocks) { updateSet.add(new MapUpdateCommand(destroyedLocation, 0, (byte) 0, getCraft())); } MapUpdateCommand[] updateArray = updateSet.toArray(new MapUpdateCommand[1]); // MapUpdateManager.getInstance().sortUpdates(updateArray); data.setUpdates(updateArray); data.setEntityUpdates(entityUpdateSet.toArray(new EntityUpdateCommand[1])); if (data.getDy() != 0) { data.setHitbox(BoundingBoxUtils.translateBoundingBoxVertically(data.getHitbox(), data.getDy())); } data.setMinX(data.getMinX() + data.getDx()); data.setMinZ(data.getMinZ() + data.getDz()); } captureYield(blocksList, harvestedBlocks); }
From source file:org.nuxeo.ecm.platform.usermanager.UserManagerImpl.java
@Override public String[] getUsersForPermission(String perm, ACP acp, DocumentModel context) { PermissionProvider permissionProvider = Framework.getService(PermissionProvider.class); // using a hashset to avoid duplicates HashSet<String> usernames = new HashSet<String>(); ACL merged = acp.getMergedACLs("merged"); // The list of permission that is has "perm" as its (compound) // permission ArrayList<ACE> filteredACEbyPerm = new ArrayList<ACE>(); List<String> currentPermissions = getLeafPermissions(perm); for (ACE ace : merged.getACEs()) { // Checking if the permission contains the permission we want to // check (we use the security service method for coumpound // permissions) List<String> acePermissions = getLeafPermissions(ace.getPermission()); // Everything is a special permission (not compound) if (SecurityConstants.EVERYTHING.equals(ace.getPermission())) { acePermissions = Arrays.asList(permissionProvider.getPermissions()); }//w w w . j a va2 s .co m if (acePermissions.containsAll(currentPermissions)) { // special case: everybody perm grant false, don't take in // account the previous ace if (SecurityConstants.EVERYONE.equals(ace.getUsername()) && !ace.isGranted()) { break; } filteredACEbyPerm.add(ace); } } for (ACE ace : filteredACEbyPerm) { String aceUsername = ace.getUsername(); List<String> users = null; // If everyone, add/remove all the users if (SecurityConstants.EVERYONE.equals(aceUsername)) { users = getUserIds(); } // if a group, add/remove all the user from the group (and // subgroups) if (users == null) { NuxeoGroup group; group = getGroup(aceUsername, context); if (group != null) { users = getUsersInGroupAndSubGroups(aceUsername, context); } } // otherwise, add the user if (users == null) { users = new ArrayList<String>(); users.add(aceUsername); } if (ace.isGranted()) { usernames.addAll(users); } else { usernames.removeAll(users); } } return usernames.toArray(new String[usernames.size()]); }
From source file:org.osaf.cosmo.migrate.ZeroPointSevenToZeroPointEightMigration.java
/** * ensure collections do not contain duplicate icaluids *///from ww w. j a v a 2s . c o m private void fixDuplicateIcalUids(Connection conn) throws Exception { PreparedStatement stmt = null; PreparedStatement selectByIcalUid = null; PreparedStatement selectNumDuplicates = null; PreparedStatement updateStmt = null; PreparedStatement updateColStmt = null; ResultSet rs = null; long itemCount = 0; HashSet<Long> collectionIds = new HashSet<Long>(); log.debug("starting fixDuplicateIcalUids()"); try { // get all to migrate stmt = conn.prepareStatement( "select * from (SELECT ci.collectionid, i.icaluid, count(*) as counticaluid from item i, collection_item ci where ci.itemid=i.id and i.icaluid is not null and i.modifiesitemid is null group by i.icaluid, ci.collectionid) as ss where counticaluid > 1"); selectNumDuplicates = conn.prepareStatement( "select count(*) from item i, collection_item ci where ci.collectionid=? and ci.itemid=i.id and i.icaluid=?"); selectByIcalUid = conn.prepareStatement( "select i.id from item i, collection_item ci where ci.collectionid=? and ci.itemid=i.id and i.icaluid=? and upper(i.icaluid)!=upper(i.uid)"); // migration statements updateStmt = conn.prepareStatement( "update item set icaluid=upper(uid), modifydate=?, version=version+1 where id=?"); updateStmt.setLong(1, System.currentTimeMillis()); updateColStmt = conn.prepareStatement("update item set modifydate=?, version=version+1 where id=?"); updateColStmt.setLong(1, System.currentTimeMillis()); rs = stmt.executeQuery(); // migrate each duplicate icaluid while (rs.next()) { long collectionId = rs.getLong(1); String icalUid = rs.getString(2); collectionIds.add(collectionId); selectNumDuplicates.setLong(1, collectionId); selectNumDuplicates.setString(2, icalUid); ResultSet duplicatesRs = selectNumDuplicates.executeQuery(); duplicatesRs.next(); int numDuplicates = duplicatesRs.getInt(1); duplicatesRs.close(); // While there are more than one item with the same // icaluid in the collection, attempt to fix by updating // a single item in the list of duplicates. while (numDuplicates > 1) { log.debug( "found " + numDuplicates + " for icaluid " + icalUid + " collectionid " + collectionId); log.debug("fixing collection " + collectionId + " icaluid " + icalUid); selectByIcalUid.setLong(1, collectionId); selectByIcalUid.setString(2, icalUid); ResultSet toFix = selectByIcalUid.executeQuery(); toFix.next(); // fix icaluid to be uid, update timestamp long itemId = toFix.getLong(1); log.debug("fixing item " + itemId + " by setting icalUid to uid"); updateStmt.setLong(2, itemId); updateStmt.executeUpdate(); toFix.close(); // update collection timestamp updateColStmt.setLong(2, collectionId); updateColStmt.executeUpdate(); itemCount++; duplicatesRs = selectNumDuplicates.executeQuery(); duplicatesRs.next(); numDuplicates = duplicatesRs.getInt(1); } } } finally { close(stmt); close(updateStmt); close(selectNumDuplicates); close(selectByIcalUid); } log.debug("fixed " + collectionIds.size() + " collections that contained duplicate icaluids"); log.debug("fixed " + itemCount + " items with duplicate icaluids"); }
From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java
private void discoverUmanagedFileSystems(AccessProfile profile) throws BaseCollectionException { _log.debug("Access Profile Details : IpAddress : PortNumber : {}, namespace : {}", profile.getIpAddress() + profile.getPortNumber(), profile.getnamespace()); URI storageSystemId = profile.getSystemId(); StorageSystem storageSystem = _dbClient.queryObject(StorageSystem.class, storageSystemId); if (null == storageSystem) { return;//from w w w . j a va2 s .c om } List<UnManagedFileSystem> unManagedFileSystems = new ArrayList<UnManagedFileSystem>(); List<UnManagedFileSystem> existingUnManagedFileSystems = new ArrayList<UnManagedFileSystem>(); Set<URI> allDiscoveredUnManagedFileSystems = new HashSet<URI>(); String detailedStatusMessage = "Discovery of Isilon Unmanaged FileSystem started"; long unmanagedFsCount = 0; try { IsilonApi isilonApi = getIsilonDevice(storageSystem); URIQueryResultList storagePoolURIs = new URIQueryResultList(); _dbClient.queryByConstraint( ContainmentConstraint.Factory.getStorageDeviceStoragePoolConstraint(storageSystem.getId()), storagePoolURIs); ArrayList<StoragePool> pools = new ArrayList(); Iterator<URI> poolsItr = storagePoolURIs.iterator(); while (poolsItr.hasNext()) { URI storagePoolURI = poolsItr.next(); StoragePool storagePool = _dbClient.queryObject(StoragePool.class, storagePoolURI); if (storagePool != null && !storagePool.getInactive()) { pools.add(storagePool); } } StoragePool storagePool = null; if (pools != null && !pools.isEmpty()) { storagePool = pools.get(0); } StoragePort storagePort = getStoragePortPool(storageSystem); String resumeToken = null; int totalIsilonFSDiscovered = 0; // get the associated storage port for vnas Server List<IsilonAccessZone> isilonAccessZones = isilonApi.getAccessZones(null); Map<String, NASServer> nasServers = getNASServer(storageSystem, isilonAccessZones); setDiscPathForAccess(nasServers); // Get All FileShare HashMap<String, HashSet<String>> allSMBShares = discoverAllSMBShares(storageSystem, isilonAccessZones); List<UnManagedCifsShareACL> unManagedCifsShareACLList = new ArrayList<UnManagedCifsShareACL>(); List<UnManagedCifsShareACL> oldunManagedCifsShareACLList = new ArrayList<UnManagedCifsShareACL>(); HashMap<String, HashSet<Integer>> expMap = discoverAllExports(storageSystem, isilonAccessZones); List<UnManagedNFSShareACL> unManagedNfsShareACLList = new ArrayList<UnManagedNFSShareACL>(); List<UnManagedNFSShareACL> oldunManagedNfsShareACLList = new ArrayList<UnManagedNFSShareACL>(); List<UnManagedFileExportRule> newUnManagedExportRules = new ArrayList<UnManagedFileExportRule>(); List<FileShare> discoveredFS = new ArrayList<FileShare>(); do { IsilonApi.IsilonList<FileShare> discoveredIsilonFS = discoverAllFileSystem(storageSystem, resumeToken); resumeToken = discoveredIsilonFS.getToken(); discoveredFS = discoveredIsilonFS.getList(); totalIsilonFSDiscovered += discoveredFS.size(); unManagedFileSystems = new ArrayList<UnManagedFileSystem>(); existingUnManagedFileSystems = new ArrayList<UnManagedFileSystem>(); int newFileSystemsCount = 0; int existingFileSystemsCount = 0; HashMap<String, HashMap<String, HashSet<Integer>>> exportMapTree = getExportsWithSubDirForFS( discoveredFS, expMap); for (FileShare fs : discoveredFS) { if (!checkStorageFileSystemExistsInDB(fs.getNativeGuid())) { // Create UnManaged FS String fsUnManagedFsNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileSystem(storageSystem.getSystemType(), storageSystem.getSerialNumber(), fs.getNativeId()); String fsPathName = fs.getPath(); UnManagedFileSystem unManagedFs = checkUnManagedFileSystemExistsInDB( fsUnManagedFsNativeGuid); // get the matched vNAS Server NASServer nasServer = getMatchedNASServer(nasServers, fsPathName); if (nasServer != null) { _log.info("fs path {} and nas server details {}", fs.getPath(), nasServer.toString()); if (nasServer.getStoragePorts() != null && !nasServer.getStoragePorts().isEmpty()) { storagePort = _dbClient.queryObject(StoragePort.class, URI.create(nasServer.getStoragePorts().iterator().next())); } } else { _log.info("fs path {} and vnas server not found", fs.getPath()); continue; // Skip further ingestion steps on this file share & move to next file share } boolean alreadyExist = unManagedFs == null ? false : true; unManagedFs = createUnManagedFileSystem(unManagedFs, fsUnManagedFsNativeGuid, storageSystem, storagePool, nasServer, fs); /* * Get all file exports with given file system */ HashSet<String> fsExportPaths = new HashSet<String>(); for (Entry<String, HashSet<Integer>> entry : expMap.entrySet()) { if (entry.getKey().equalsIgnoreCase(fsPathName) || entry.getKey().startsWith(fsPathName + "/")) { _log.info("filesystem path : {} and export path: {}", fs.getPath(), entry.getKey()); fsExportPaths.add(entry.getKey()); } } List<UnManagedNFSShareACL> tempUnManagedNfsShareACL = new ArrayList<UnManagedNFSShareACL>(); UnManagedNFSShareACL existingNfsACL = null; getUnmanagedNfsShareACL(unManagedFs, tempUnManagedNfsShareACL, storagePort, fs, isilonApi, fsExportPaths); if (tempUnManagedNfsShareACL != null && !tempUnManagedNfsShareACL.isEmpty()) { unManagedFs.setHasNFSAcl(true); } for (UnManagedNFSShareACL unManagedNFSACL : tempUnManagedNfsShareACL) { _log.info("Unmanaged File share acls : {}", unManagedNFSACL); String fsShareNativeId = unManagedNFSACL.getFileSystemNfsACLIndex(); _log.info("UMFS Share ACL index {}", fsShareNativeId); String fsUnManagedFileShareNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileShare(storageSystem, fsShareNativeId); _log.info("Native GUID {}", fsUnManagedFileShareNativeGuid); // set native guid, so each entry unique unManagedNFSACL.setNativeGuid(fsUnManagedFileShareNativeGuid); // Check whether the NFS share ACL was present in ViPR DB. existingNfsACL = checkUnManagedFsNfssACLExistsInDB(_dbClient, unManagedNFSACL.getNativeGuid()); if (existingNfsACL == null) { unManagedNfsShareACLList.add(unManagedNFSACL); } else { unManagedNfsShareACLList.add(unManagedNFSACL); // delete the existing acl existingNfsACL.setInactive(true); oldunManagedNfsShareACLList.add(existingNfsACL); } } // get all shares for given file system path HashSet<String> smbShareHashSet = new HashSet<String>(); for (Entry<String, HashSet<String>> entry : allSMBShares.entrySet()) { if (entry.getKey().equalsIgnoreCase(fsPathName) || entry.getKey().startsWith(fsPathName + "/")) { _log.info("filesystem path : {} and share path: {}", fs.getPath(), entry.getKey()); smbShareHashSet.addAll(entry.getValue()); } } _log.info("File System {} has shares and their size is {}", unManagedFs.getId(), smbShareHashSet.size()); if (!smbShareHashSet.isEmpty()) { List<UnManagedCifsShareACL> umfsCifsShareACL = new ArrayList<UnManagedCifsShareACL>(); // Set UnManaged ACL and also set the shares in fs object setUnmanagedCifsShareACL(unManagedFs, smbShareHashSet, umfsCifsShareACL, storagePort, fs.getName(), nasServer.getNasName(), isilonApi); if (!umfsCifsShareACL.isEmpty()) { for (UnManagedCifsShareACL unManagedCifsShareACL : umfsCifsShareACL) { _log.info("Unmanaged File share acl : {}", unManagedCifsShareACL); String fsShareNativeId = unManagedCifsShareACL.getFileSystemShareACLIndex(); _log.info("UMFS Share ACL index {}", fsShareNativeId); String fsUnManagedFileShareNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileShare(storageSystem, fsShareNativeId); _log.info("Native GUID {}", fsUnManagedFileShareNativeGuid); // set native guid, so each entry unique unManagedCifsShareACL.setNativeGuid(fsUnManagedFileShareNativeGuid); // Check whether the CIFS share ACL was present in ViPR DB. UnManagedCifsShareACL existingCifsShareACL = checkUnManagedFsCifsACLExistsInDB( _dbClient, unManagedCifsShareACL.getNativeGuid()); if (existingCifsShareACL == null) { unManagedCifsShareACLList.add(unManagedCifsShareACL); } else { unManagedCifsShareACLList.add(unManagedCifsShareACL); // delete the existing acl existingCifsShareACL.setInactive(true); oldunManagedCifsShareACLList.add(existingCifsShareACL); } } _log.info("UMFS ID {} - Size of ACL of all CIFS shares is {}", unManagedFs.getId(), umfsCifsShareACL.size()); } } // Get Export info _log.info("Getting export for {}", fs.getPath()); HashMap<String, HashSet<Integer>> expIdMap = exportMapTree.get(fs.getPath()); if (expIdMap == null) { expIdMap = new HashMap<>(); } List<UnManagedFileExportRule> unManagedExportRules = new ArrayList<UnManagedFileExportRule>(); if (!expIdMap.keySet().isEmpty()) { boolean validExportsFound = getUnManagedFSExportMap(unManagedFs, expIdMap, storagePort, fs.getPath(), nasServer.getNasName(), isilonApi); if (!validExportsFound) { // Invalid exports so ignore the FS String invalidExports = ""; for (String path : expIdMap.keySet()) { invalidExports += expIdMap.get(path); } _log.info("FS {} is ignored because it has conflicting exports {}", fs.getPath(), invalidExports); unManagedFs.setInactive(true); // Persists the inactive state before picking next UMFS!!! _dbClient.persistObject(unManagedFs); continue; } List<UnManagedFileExportRule> validExportRules = getUnManagedFSExportRules(unManagedFs, expIdMap, storagePort, fs.getPath(), nasServer.getNasName(), isilonApi); _log.info("Number of exports discovered for file system {} is {}", unManagedFs.getId(), validExportRules.size()); UnManagedFileExportRule existingRule = null; for (UnManagedFileExportRule dbExportRule : validExportRules) { _log.info("Un Managed File Export Rule : {}", dbExportRule); String fsExportRulenativeId = dbExportRule.getFsExportIndex(); _log.info("Native Id using to build Native Guid {}", fsExportRulenativeId); String fsUnManagedFileExportRuleNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileExportRule(storageSystem, fsExportRulenativeId); _log.info("Native GUID {}", fsUnManagedFileExportRuleNativeGuid); dbExportRule.setNativeGuid(fsUnManagedFileExportRuleNativeGuid); dbExportRule.setFileSystemId(unManagedFs.getId()); dbExportRule.setId(URIUtil.createId(UnManagedFileExportRule.class)); existingRule = checkUnManagedFsExportRuleExistsInDB(_dbClient, dbExportRule.getNativeGuid()); if (null == existingRule) { unManagedExportRules.add(dbExportRule); } else { existingRule.setInactive(true); _dbClient.persistObject(existingRule); unManagedExportRules.add(dbExportRule); } } // Validate Rules Compatible with ViPR - Same rules should // apply as per API SVC Validations. if (!unManagedExportRules.isEmpty()) { _log.info("Validating rules success for export {}", fs.getName()); newUnManagedExportRules.addAll(unManagedExportRules); unManagedFs.setHasExports(true); _log.info("File System {} has Exports and their size is {}", unManagedFs.getId(), newUnManagedExportRules.size()); } } if (unManagedFs.getHasExports() || unManagedFs.getHasShares()) { _log.info("FS {} is having exports/shares", fs.getPath()); unManagedFs.putFileSystemCharacterstics( UnManagedFileSystem.SupportedFileSystemCharacterstics.IS_FILESYSTEM_EXPORTED .toString(), TRUE); } else { // NO exports found _log.info("FS {} is ignored because it doesnt have exports and shares", fs.getPath()); } if (alreadyExist) { existingUnManagedFileSystems.add(unManagedFs); existingFileSystemsCount++; } else { unManagedFileSystems.add(unManagedFs); newFileSystemsCount++; } if (!newUnManagedExportRules.isEmpty()) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", newUnManagedExportRules.size()); _partitionManager.updateInBatches(newUnManagedExportRules, Constants.DEFAULT_PARTITION_SIZE, _dbClient, UNMANAGED_EXPORT_RULE); newUnManagedExportRules.clear(); } // save ACLs in db if (!unManagedCifsShareACLList.isEmpty() && unManagedCifsShareACLList.size() >= MAX_UMFS_RECORD_SIZE) { _log.info("Saving Number of UnManagedCifsShareACL(s) {}", unManagedCifsShareACLList.size()); _dbClient.createObject(unManagedCifsShareACLList); unManagedCifsShareACLList.clear(); } // save old acls if (!oldunManagedCifsShareACLList.isEmpty() && oldunManagedCifsShareACLList.size() >= MAX_UMFS_RECORD_SIZE) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", oldunManagedCifsShareACLList.size()); _dbClient.persistObject(oldunManagedCifsShareACLList); oldunManagedCifsShareACLList.clear(); } allDiscoveredUnManagedFileSystems.add(unManagedFs.getId()); /** * Persist 200 objects and clear them to avoid memory issue */ validateListSizeLimitAndPersist(unManagedFileSystems, existingUnManagedFileSystems, Constants.DEFAULT_PARTITION_SIZE * 2); } } _log.info("New unmanaged Isilon file systems count: {}", newFileSystemsCount); _log.info("Update unmanaged Isilon file systems count: {}", existingFileSystemsCount); if (!unManagedFileSystems.isEmpty()) { _dbClient.createObject(unManagedFileSystems); } if (!existingUnManagedFileSystems.isEmpty()) { _dbClient.updateAndReindexObject(existingUnManagedFileSystems); } } while (resumeToken != null); // save ACLs in db if (!unManagedCifsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedCifsShareACL(s) {}", unManagedCifsShareACLList.size()); _dbClient.createObject(unManagedCifsShareACLList); unManagedCifsShareACLList.clear(); } // save NFS ACLs in db if (!unManagedNfsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedNfsShareACL(s) {}", unManagedNfsShareACLList.size()); _dbClient.createObject(unManagedNfsShareACLList); unManagedNfsShareACLList.clear(); } // save old acls if (!oldunManagedCifsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", oldunManagedCifsShareACLList.size()); _dbClient.persistObject(oldunManagedCifsShareACLList); oldunManagedCifsShareACLList.clear(); } // save old acls if (!oldunManagedNfsShareACLList.isEmpty()) { _log.info("Saving Number of NFS UnManagedFileExportRule(s) {}", oldunManagedNfsShareACLList.size()); _dbClient.updateObject(oldunManagedNfsShareACLList); oldunManagedNfsShareACLList.clear(); } _log.info("Discovered {} Isilon file systems.", totalIsilonFSDiscovered); // Process those active unmanaged fs objects available in database but not in newly discovered items, to // mark them inactive. markUnManagedFSObjectsInActive(storageSystem, allDiscoveredUnManagedFileSystems); // discovery succeeds detailedStatusMessage = String.format( "Discovery completed successfully for Isilon: %s; new unmanaged file systems count: %s", storageSystemId.toString(), unmanagedFsCount); _log.info(detailedStatusMessage); } catch (Exception e) { if (storageSystem != null) { cleanupDiscovery(storageSystem); } detailedStatusMessage = String.format("Discovery failed for Isilon %s because %s", storageSystemId.toString(), e.getLocalizedMessage()); _log.error(detailedStatusMessage, e); throw new IsilonCollectionException(detailedStatusMessage); } finally { if (storageSystem != null) { try { // set detailed message storageSystem.setLastDiscoveryStatusMessage(detailedStatusMessage); _dbClient.persistObject(storageSystem); } catch (Exception ex) { _log.error("Error while persisting object to DB", ex); } } } }
From source file:com.clustercontrol.ping.factory.RunMonitorPing.java
/** * fping?ping?????<BR>/*from w w w . ja v a2 s . c om*/ * @param facilityId Ping?ID()collect?facilityID??? * @return ???????true * @throws HinemosUnknown */ private boolean collectFping(ArrayList<String> facilityList, ArrayList<Integer> priporityList) throws HinemosUnknown { //??? if (m_now != null) { m_nodeDate = m_now.getTime(); } //?????? // hosts[] IP(String ??) // hostsv6[] IPv6(String??) // node IP???? // target nodo? HashSet<String> hosts = new HashSet<String>(); HashSet<String> hostsv6 = new HashSet<String>(); m_Target = new Hashtable<String, String[]>(); String facilityId = null; int version = 4; String[] node; for (int index = 0; index < facilityList.size(); index++) { facilityId = facilityList.get(index); if (facilityId != null && !"".equals(facilityId)) { node = new String[2]; try { // ?? NodeInfo info = new RepositoryControllerBean().getNode(facilityId); //m_log.error(facilityAttrMap.get(FacilityAttributeConstant.IPPROTOCOLNUMBER)); //???????version? if (info.getIpAddressVersion() != null) { version = info.getIpAddressVersion(); } else { version = 4; } if (version == 6) { InetAddress[] ip = InetAddress.getAllByName(info.getIpAddressV6()); if (ip.length != 1) { //IPInetAddress??????1???????? //UnnownHostExcption UnknownHostException e = new UnknownHostException(); m_log.info("collectFping() : " + e.getClass().getSimpleName() + ", " + e.getMessage()); throw e; } node[0] = ip[0].getHostAddress(); if (node[0] != null && !node[0].equals("")) { //IPHashSet????? hostsv6.add(node[0]); } } else { node[0] = info.getIpAddressV4(); if (node[0] != null && !node[0].equals("")) { //IPHashSet????? hosts.add(node[0]); } } if (node[0] != null && !node[0].equals("")) { node[1] = info.getNodeName(); //target?????? m_Target.put(facilityId, node); } } catch (FacilityNotFound e) { m_message = MessageConstant.MESSAGE_COULD_NOT_GET_NODE_ATTRIBUTES_PING.getMessage(); m_messageOrg = e.getMessage(); return false; } catch (UnknownHostException e) { // ??? } } } if (m_reachabilityFping == null) { m_reachabilityFping = new ReachAddressFping(m_runCount, m_runInterval, m_pingTimeout); } boolean result = true; boolean resultTmp = true; //IPv4???fping?? if (hosts.size() != 0) { result = m_reachabilityFping.isReachable(hosts, 4); m_MsgErr = m_reachabilityFping.getM_errMsg(); } //IPv6???fping6?? if (hostsv6.size() != 0) { resultTmp = m_reachabilityFping.isReachable(hostsv6, 6); m_MsgErrV6 = m_reachabilityFping.getM_errMsg(); //??&???? result = result & resultTmp; } return result; }