List of usage examples for java.util HashSet add
public boolean add(E e)
From source file:IndexService.IndexMR.java
public static RunningJob run(Configuration conf2, String inputfiles, boolean column, String ids, String outputdir) {/*from w w w. j ava 2 s. c o m*/ if (inputfiles == null || outputdir == null) return null; JobConf conf = new JobConf(conf2); conf.setJobName("IndexMR:\t" + ids); conf.setJarByClass(IndexMR.class); FileSystem fs = null; try { fs = FileSystem.get(conf); fs.delete(new Path(outputdir), true); } catch (IOException e3) { e3.printStackTrace(); } conf.set("index.ids", ids); if (column) { conf.set("datafiletype", "column"); } else { conf.set("datafiletype", "format"); } String[] ifs = inputfiles.split(","); long wholerecnum = 0; String[] idxs = ids.split(","); String[] fieldStrings = new String[idxs.length + 2]; if (!column) { IFormatDataFile ifdf; try { ifdf = new IFormatDataFile(conf); ifdf.open(ifs[0]); for (int i = 0; i < idxs.length; i++) { int id = Integer.parseInt(idxs[i]); byte type = ifdf.fileInfo().head().fieldMap().fieldtypes().get(id).type(); fieldStrings[i] = type + ConstVar.RecordSplit + i; } ifdf.close(); } catch (IOException e) { e.printStackTrace(); } } else { try { IColumnDataFile icdf = new IColumnDataFile(conf); icdf.open(ifs[0]); for (int i = 0; i < idxs.length; i++) { int id = Integer.parseInt(idxs[i]); byte type = icdf.fieldtypes().get(id).type(); fieldStrings[i] = type + ConstVar.RecordSplit + i; } icdf.close(); } catch (IOException e) { e.printStackTrace(); } } fieldStrings[fieldStrings.length - 2] = ConstVar.FieldType_Short + ConstVar.RecordSplit + (fieldStrings.length - 2); fieldStrings[fieldStrings.length - 1] = ConstVar.FieldType_Int + ConstVar.RecordSplit + (fieldStrings.length - 1); conf.setStrings(ConstVar.HD_fieldMap, fieldStrings); if (!column) { conf.set(ConstVar.HD_index_filemap, inputfiles); for (String file : ifs) { IFormatDataFile fff; try { fff = new IFormatDataFile(conf); fff.open(file); wholerecnum += fff.segIndex().recnum(); fff.close(); } catch (IOException e) { e.printStackTrace(); } } } else { HashSet<String> files = new HashSet<String>(); for (String file : ifs) { files.add(file); } StringBuffer sb = new StringBuffer(); for (String str : files) { sb.append(str).append(","); } conf.set(ConstVar.HD_index_filemap, sb.substring(0, sb.length() - 1)); for (String file : files) { Path parent = new Path(file).getParent(); try { FileStatus[] fss = fs.listStatus(parent); String openfile = ""; for (FileStatus status : fss) { if (status.getPath().toString().contains(file)) { openfile = status.getPath().toString(); break; } } IFormatDataFile fff = new IFormatDataFile(conf); fff.open(openfile); wholerecnum += fff.segIndex().recnum(); fff.close(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } } conf.setNumReduceTasks((int) ((wholerecnum - 1) / (100000000) + 1)); FileInputFormat.setInputPaths(conf, inputfiles); Path outputPath = new Path(outputdir); FileOutputFormat.setOutputPath(conf, outputPath); conf.setOutputKeyClass(IndexKey.class); conf.setOutputValueClass(IndexValue.class); conf.setPartitionerClass(IndexPartitioner.class); conf.setMapperClass(IndexMap.class); conf.setCombinerClass(IndexReduce.class); conf.setReducerClass(IndexReduce.class); if (column) { conf.setInputFormat(IColumnInputFormat.class); } else { conf.setInputFormat(IFormatInputFormat.class); } conf.setOutputFormat(IndexIFormatOutputFormat.class); try { JobClient jc = new JobClient(conf); return jc.submitJob(conf); } catch (IOException e) { e.printStackTrace(); return null; } }
From source file:edu.msu.cme.rdp.multicompare.Reprocess.java
public static HashSet<String> readTaxonFilterFile(String file) throws IOException { HashSet<String> ret = new HashSet<String>(); BufferedReader reader = new BufferedReader(new FileReader(file)); String line;//from w w w.j a va 2 s.c o m while ((line = reader.readLine()) != null) { if (line.trim().equals("")) continue; ret.add(line.trim()); System.err.println(line.trim()); } reader.close(); return ret; }
From source file:com.vertica.hadoop.VerticaOutputFormat.java
/** * Optionally called at the end of a job to optimize any newly created and * loaded tables. Useful for new tables with more than 100k records. * /* w w w . j a va 2s . co m*/ * @param conf * @throws Exception */ public static void optimize(Configuration conf) throws Exception { VerticaConfiguration vtconfig = new VerticaConfiguration(conf); Connection conn = vtconfig.getConnection(true); // TODO: consider more tables and skip tables with non-temp projections Relation vTable = new Relation(vtconfig.getOutputTableName()); Statement stmt = conn.createStatement(); ResultSet rs = null; HashSet<String> tablesWithTemp = new HashSet<String>(); //for now just add the single output table tablesWithTemp.add(vTable.getQualifiedName().toString()); // map from table name to set of projection names HashMap<String, Collection<String>> tableProj = new HashMap<String, Collection<String>>(); rs = stmt.executeQuery("select projection_schema, anchor_table_name, projection_name from projections;"); while (rs.next()) { String ptable = rs.getString(1) + "." + rs.getString(2); if (!tableProj.containsKey(ptable)) { tableProj.put(ptable, new HashSet<String>()); } tableProj.get(ptable).add(rs.getString(3)); } for (String table : tablesWithTemp) { if (!tableProj.containsKey(table)) { throw new RuntimeException("Cannot optimize table with no data: " + table); } } String designName = (new Integer(conn.hashCode())).toString(); stmt.execute("select dbd_create_workspace('" + designName + "')"); stmt.execute("select dbd_create_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_design_tables('" + designName + "', '" + vTable.getQualifiedName().toString() + "')"); stmt.execute("select dbd_populate_design('" + designName + "', '" + designName + "')"); //Execute stmt.execute("select dbd_create_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_design('" + designName + "', '" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_drop('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_execute_deployment('" + designName + "', '" + designName + "')"); //Cleanup stmt.execute("select dbd_drop_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_remove_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_workspace('" + designName + "')"); }
From source file:ac.dynam.rundeck.plugin.resources.ovirt.InstanceToNodeMapper.java
/** * Convert an oVirt Instance to a RunDeck INodeEntry based on the mapping input *//*from www. ja v a 2 s . co m*/ @SuppressWarnings("unchecked") static INodeEntry instanceToNode(final VM inst) throws GeneratorException { final NodeEntryImpl node = new NodeEntryImpl(); node.setNodename(inst.getName()); node.setOsArch(inst.getCpu().getArchitecture()); node.setOsName(inst.getOs().getType()); node.setDescription(inst.getDescription()); node.setUsername("root"); InetAddress address = null; if (inst.getGuestInfo() != null) { try { address = InetAddress.getByName(inst.getGuestInfo().getFqdn()); logger.debug("Host " + node.getNodename() + " Guest FQDN " + inst.getGuestInfo().getFqdn() + " Address: " + address.getHostName()); if (address.getHostName() == "localhost") throw new UnknownHostException(); } catch (UnknownHostException e) { /* try the first IP instead then */ logger.warn("Host " + node.getNodename() + " address " + inst.getGuestInfo().getFqdn() + " does not resolve. Trying IP addresses instead"); for (int i = 0; i < inst.getGuestInfo().getIps().getIPs().size(); i++) { logger.debug("Host " + node.getNodename() + " Trying " + inst.getGuestInfo().getIps().getIPs().get(i).getAddress()); try { address = InetAddress.getByName(inst.getGuestInfo().getIps().getIPs().get(i).getAddress()); if (address != null) { if (address.isLinkLocalAddress() || address.isMulticastAddress()) { logger.warn("Host " + node.getNodename() + " ip address is not valid: " + inst.getGuestInfo().getIps().getIPs().get(i).getAddress()); continue; } logger.debug("Host " + node.getNodename() + " ip address " + address.getHostAddress() + " will be used instead"); break; } } catch (UnknownHostException e1) { logger.warn("Host " + node.getNodename() + " IP Address " + inst.getGuestInfo().getIps().getIPs().get(i).getAddress() + " is invalid"); } } } } if (address == null) { /* try resolving based on name */ try { address = InetAddress.getByName(node.getNodename()); } catch (UnknownHostException e) { logger.warn("Unable to Find IP address for Host " + node.getNodename()); return null; } } if (address != null) node.setHostname(address.getCanonicalHostName()); if (inst.getTags() != null) { VMTags tags = inst.getTags(); final HashSet<String> tagset = new HashSet<String>(); try { for (int j = 0; j < tags.list().size(); j++) { tagset.add(tags.list().get(j).getName()); } } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ServerException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (null == node.getTags()) { node.setTags(tagset); } else { final HashSet<String> orig = new HashSet<String>(node.getTags()); orig.addAll(tagset); node.setTags(orig); } } if (inst.getHighAvailability().getEnabled()) node.setAttribute("HighAvailability", "true"); if (inst.getType() != null) node.setAttribute("Host Type", inst.getType()); node.setAttribute("oVirt VM", "true"); node.setAttribute("oVirt Host", inst.getHost().getName()); return node; }
From source file:com.facebook.internal.JsonUtil.java
static Set<Map.Entry<String, Object>> jsonObjectEntrySet(JSONObject jsonObject) { HashSet<Map.Entry<String, Object>> result = new HashSet<Map.Entry<String, Object>>(); @SuppressWarnings("unchecked") Iterator<String> keys = (Iterator<String>) jsonObject.keys(); while (keys.hasNext()) { String key = keys.next(); Object value = jsonObject.opt(key); result.add(new JSONObjectEntry(key, value)); }/*from w w w . j a va 2 s .co m*/ return result; }
From source file:com.pclinuxos.rpm.util.FileUtils.java
/** * The method generated a string which contains all differences between two files, here for * two lists with package names./*w w w.j a va2 s . co m*/ * * @param before original package count * @param after the content of this file will be search in "before" * @return if errors occured null else a string with differences which are not in "before". */ public static String compareRpmLists(File before, File after) { String out = null; try { BufferedReader l1Reader = new BufferedReader(new FileReader(before)); BufferedReader l2Reader = new BufferedReader(new FileReader(after)); HashSet<String> bef = new HashSet<String>(); HashSet<String> aft = new HashSet<String>(); String read = l1Reader.readLine(); while (read != null) { bef.add(read); read = l1Reader.readLine(); } l1Reader.close(); read = l2Reader.readLine(); while (read != null) { aft.add(read); read = l2Reader.readLine(); } l2Reader.close(); for (String pkg : aft) { if (bef.contains(pkg)) { out = out + " " + pkg; } } } catch (FileNotFoundException e) { out = null; } catch (IOException e) { out = null; } return out; }
From source file:Main.java
private static void walkNodes(Node nodeIn, HashSet<String> hElements) { if (nodeIn == null) return;// ww w. j ava2 s . c o m NodeList nodes = nodeIn.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node n = nodes.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { String sNodeName = n.getNodeName(); if (!hElements.contains(sNodeName)) hElements.add(sNodeName); walkNodes(n, hElements); } } }
From source file:fr.mixit.android.io.JSONHandler.java
/** * Returns those id's from a {@link android.net.Uri} that were not found in a given set. */// w w w. ja v a 2 s .c o m protected static HashSet<String> getLostIds(Set<String> ids, Uri uri, String[] projection, int idColumnIndex, ContentResolver resolver) { final HashSet<String> lostIds = Sets.newHashSet(); final Cursor cursor = resolver.query(uri, projection, null, null, null); try { while (cursor.moveToNext()) { final String id = cursor.getString(idColumnIndex); if (!ids.contains(id)) { lostIds.add(id); } } } finally { cursor.close(); } if (!lostIds.isEmpty()) { Log.d(TAG, "Found " + lostIds.size() + " for " + uri.toString() + " that need to be removed."); } return lostIds; }
From source file:com.vertica.hivestoragehandler.VerticaOutputFormat.java
/** * Optionally called at the end of a job to optimize any newly created and * loaded tables. Useful for new tables with more than 100k records. * //from ww w . j a v a 2 s. c om * @param conf * @throws Exception */ public static void optimize(Configuration conf) throws Exception { VerticaConfiguration vtconfig = new VerticaConfiguration(conf); Connection conn = vtconfig.getConnection(true); // TODO: consider more tables and skip tables with non-temp projections VerticaRelation vTable = new VerticaRelation(vtconfig.getOutputTableName()); Statement stmt = conn.createStatement(); ResultSet rs = null; HashSet<String> tablesWithTemp = new HashSet<String>(); //for now just add the single output table tablesWithTemp.add(vTable.getQualifiedName().toString()); // map from table name to set of projection names HashMap<String, Collection<String>> tableProj = new HashMap<String, Collection<String>>(); rs = stmt.executeQuery("select projection_schema, anchor_table_name, projection_name from projections;"); while (rs.next()) { String ptable = rs.getString(1) + "." + rs.getString(2); if (!tableProj.containsKey(ptable)) { tableProj.put(ptable, new HashSet<String>()); } tableProj.get(ptable).add(rs.getString(3)); } for (String table : tablesWithTemp) { if (!tableProj.containsKey(table)) { throw new RuntimeException("Cannot optimize table with no data: " + table); } } String designName = (new Integer(conn.hashCode())).toString(); stmt.execute("select dbd_create_workspace('" + designName + "')"); stmt.execute("select dbd_create_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_design_tables('" + designName + "', '" + vTable.getQualifiedName().toString() + "')"); stmt.execute("select dbd_populate_design('" + designName + "', '" + designName + "')"); //Execute stmt.execute("select dbd_create_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_design('" + designName + "', '" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_drop('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_execute_deployment('" + designName + "', '" + designName + "')"); //Cleanup stmt.execute("select dbd_drop_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_remove_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_workspace('" + designName + "')"); }
From source file:main.java.refinement_class.Refinement.java
public static Output_Refinement run2(String refinemtType, String hspl_mspl, String sPSA_SG, String userPSA, String marketPSA, String subject_string, String content_string, String target_string, String optimizationType_string, String maxEvaluationsNo_string) { // ############################################# /* VALUES TO RETURN "application_graph": "<xml>", "MSPL": [/*w w w . j av a 2s .c om*/ "<xml>", "<xml>", "<xml>" ], "remediation": "<xml>" */ Output_Refinement output_refinement = new Output_Refinement(); // ############################################# String output = ""; try { //base64 hspl_mspl = Useful.dencode64(hspl_mspl); sPSA_SG = Useful.dencode64(sPSA_SG); userPSA = Useful.dencode64(userPSA); marketPSA = Useful.dencode64(marketPSA); subject_string = Useful.dencode64(subject_string); content_string = Useful.dencode64(content_string); target_string = Useful.dencode64(target_string); Configuration conf = new Configuration(refinemtType, hspl_mspl, sPSA_SG, userPSA, marketPSA, subject_string, content_string, target_string, optimizationType_string, maxEvaluationsNo_string); Initialization.run(conf); LOG.info("\n\nInitialization\n"); PSA_Selection.run(conf); LOG.info("\n\n run \n"); if (conf.getMap().isIsEnforciability()) { Optimization.run(conf); LOG.info("\n\nOptimization\n"); SG_Generator.run(conf); LOG.info("\n\nSG_Generator.run\n"); MSPL_Generator.run(conf); LOG.info("\n\nMSPL_Generator\n"); String ag = conf.mashalSG(); //output_refinement.setApplication_grap(ag); output_refinement.setApplication_grap(Useful.encode64(ag)); HashSet<String> mspls = conf.mashalMSPLs(); //output_refinement.setMspls(mspls); HashSet<String> mspls_64 = new HashSet<String>(); for (String m : mspls) { mspls_64.add(Useful.encode64(m)); output_refinement.setMspls(mspls_64); } output += ag; for (String s : mspls) { output += s; } } else { output_refinement.setApplication_grap(""); HashSet<String> mspls_64 = new HashSet<String>(); output_refinement.setMspls(mspls_64); String rem = conf.mashalRemediation(); output += rem; //output_refinement.setRemediation(rem); output_refinement.setRemediation(Useful.encode64(rem)); } } catch (Exception e) { LOG.error("\n\n*********ERRRORRR *********\n\n"); LOG.error(Useful.getStackTrace(e)); } return output_refinement; }