List of usage examples for java.util ArrayList clear
public void clear()
From source file:controllers.FileHandler.java
/** * brief: parse string from log file to tokens * @param strRawString - string to be tokenized * @return - string list of tokens *///from w w w.ja v a 2 s . co m private static ArrayList<String> ParseLogFile(String strRawString) { ArrayList<String> strList = new ArrayList<>(); strList.clear(); if (strRawString.matches(".*\\[.*\\].*")) { String[] tokens = strRawString.split("\\s*-\\s*"); strList.add(tokens[0]); strList.add(tokens[1]); } else { strList.add(""); strList.add(strRawString); } return strList; }
From source file:Main.java
public static <T> ArrayList<T> shuffle(ArrayList<T> population, int sample) { ArrayList<T> newList = new ArrayList<T>(); ArrayList<T> ret = new ArrayList<T>(); newList.addAll(population);// w w w.jav a 2s .co m Collections.shuffle(newList); ret.addAll(newList); for (int i = sample; i < ret.size(); i++) { ret.remove(i); i--; } newList.clear(); return ret; }
From source file:com.imolinfo.offline.CrossFoldValidation.java
public static void invokePipeline(JavaSparkContext jsc) throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { JavaRDD<Document> trainingSet, testSet; Logger.getLogger("org").setLevel(Level.OFF); Logger.getLogger("akka").setLevel(Level.OFF); // CARICAMENTO DEL DATASET E PARTIZIONE IN PIU PEZZETTINI Properties prop = GlobalVariable.getInstance().getProperties(); DocumentProvider tp = (DocumentProvider) Class.forName(prop.getProperty("sourceClass")).newInstance(); JavaRDD<Document> corpus = tp.getTextFromDs(jsc, prop.getProperty("splitDatasetPath") + "/" + prop.getProperty("corpus")); DocumentStandardCleaner tc = new DocumentStandardCleaner(); corpus = tc.cleanData(corpus);/*from w w w .ja va 2 s . c om*/ corpus.cache(); ArrayList<JavaRDD<Document>[]> LabelPortionSplits = new ArrayList<JavaRDD<Document>[]>(); for (final String label : GlobalVariable.getInstance().getIntLabelMap().values()) { JavaRDD<Document> labelPortion = corpus.filter(new Function<Document, Boolean>() { @Override public Boolean call(Document arg0) throws Exception { return arg0.getLabel().equals(label); } }); labelPortion.cache(); JavaRDD<Document>[] labelPortionSplit = labelPortion .randomSplit(new double[] { 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1 }); LabelPortionSplits.add(labelPortionSplit); } // metto in cache tutti dataset piccolini divisi per label e poi partizionati secondo il fattore K... for (JavaRDD<Document>[] line : LabelPortionSplits) { for (int j = 0; j < line.length; j++) { line[j].cache(); } } JavaRDD<Document>[] splitDataset = LabelPortionSplits.get(0); ArrayList<JavaRDD<Document>> otherSplits = new ArrayList<JavaRDD<Document>>(); String result = ""; // combino i dataset in modo da realizzare partizioni stratificati for (int i = 0; i < splitDataset.length; i++) { otherSplits.clear(); for (int j = 1; j < LabelPortionSplits.size(); j++) { JavaRDD<Document>[] target = LabelPortionSplits.get(j); target[i].cache(); otherSplits.add(target[i]); } splitDataset[i] = jsc.union(splitDataset[i], otherSplits); splitDataset[i].cache(); } ArrayList<JavaRDD<Document>> trainingSetList = new ArrayList<JavaRDD<Document>>(); // metto assieme le partizioni stratificate in modo da iterare training e testset List<JavaRDD<Document>> splitDocuments = Arrays.asList(splitDataset); for (int i = 0; i < splitDocuments.size(); i++) { testSet = splitDocuments.get(i); trainingSet = corpus.subtract(testSet); result = result + "\n" + trainAndTest(jsc, trainingSet, testSet); } System.out.println(result); }
From source file:Main.java
public static <T> ArrayList<T> rand(ArrayList<T> population, int nSamplesNeeded) { Random r = new Random(); ArrayList<T> ret = new ArrayList<T>(); if (nSamplesNeeded > population.size() / 2) { ArrayList<T> original = new ArrayList<T>(); original = population;/*w ww . ja v a2 s . c om*/ while (nSamplesNeeded > 0) { int rand = r.nextInt(original.size()); if (rand < nSamplesNeeded) { ret.add(original.get(rand)); original.remove(rand); nSamplesNeeded--; } } original.clear(); } else ret = shuffle(population, nSamplesNeeded); return ret; }
From source file:edu.isistan.carcha.util.PluginUtil.java
/** * Removes the duplicates.//ww w .j a va 2s .c om * * @param concerns the concerns */ public static void removeDuplicates(List<Entity> concerns) { ArrayList<Entity> ret = new ArrayList<Entity>(); HashSet<Entity> hash = new HashSet<Entity>(); hash.addAll(concerns); ret.clear(); ret.addAll(hash); }
From source file:com.dnielfe.manager.utils.SortUtils.java
public static ArrayList<String> sortList(ArrayList<String> content, String current) { int len = content != null ? content.size() : 0; int index = 0; String[] items = new String[len]; content.toArray(items);// w ww . j av a2 s. c o m switch (Settings.mSortType) { case SORT_ALPHA: Arrays.sort(items, Comparator_ALPH); content.clear(); for (String a : items) { content.add(a); } break; case SORT_SIZE: Arrays.sort(items, Comparator_SIZE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add((String) a); } break; case SORT_TYPE: Arrays.sort(items, Comparator_TYPE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add(a); } break; case SORT_DATE: Arrays.sort(items, Comparator_DATE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add(a); } break; } return content; }
From source file:com.lovejoy777sarootool.rootool.utils.SortUtils.java
public static void sortList(ArrayList<String> content, String current) { int len = content != null ? content.size() : 0; int index = 0; String[] items = new String[len]; content.toArray(items);//from w ww .j ava2s . c o m switch (Settings.mSortType) { case SORT_ALPHA: Arrays.sort(items, Comparator_ALPH); content.clear(); Collections.addAll(content, items); break; case SORT_SIZE: Arrays.sort(items, Comparator_SIZE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add(a); } break; case SORT_TYPE: Arrays.sort(items, Comparator_TYPE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add(a); } break; case SORT_DATE: Arrays.sort(items, Comparator_DATE); content.clear(); for (String a : items) { if (new File(current + "/" + a).isDirectory()) content.add(index++, a); else content.add(a); } break; } if (Settings.reverseListView()) { Collections.reverse(content); } }
From source file:com.dnielfe.manager.utils.SimpleUtils.java
@NotNull public static ArrayList<String> listFiles(String path) { ArrayList<String> mDirContent = new ArrayList<String>(); boolean showhidden = Settings.mShowHiddenFiles; if (!mDirContent.isEmpty()) mDirContent.clear(); final File file = new File(path); if (file.exists() && file.canRead()) { String[] list = file.list(); int len = list.length; // add files/folder to ArrayList depending on hidden status for (int i = 0; i < len; i++) { if (!showhidden) { if (list[i].toString().charAt(0) != '.') mDirContent.add(path + "/" + list[i]); } else { mDirContent.add(path + "/" + list[i]); }// www. ja v a2 s.c om } } else { try { mDirContent = RootCommands.listFiles(file.getAbsolutePath(), showhidden); } catch (Exception e) { e.printStackTrace(); } } // sort files with a comparator SortUtils.sortList(mDirContent, file.getPath()); return mDirContent; }
From source file:de.fuberlin.agcsw.heraclitus.svont.client.core.ChangeLog.java
public static void updateChangeLog(OntologyStore os, SVoNtProject sp, String user, String pwd) { //load the change log from server try {/*from ww w .j ava 2 s .co m*/ //1. fetch Changelog URI URI u = sp.getChangelogURI(); //2. search for change log owl files DefaultHttpClient client = new DefaultHttpClient(); client.getCredentialsProvider().setCredentials( new AuthScope(u.getHost(), AuthScope.ANY_PORT, AuthScope.ANY_SCHEME), new UsernamePasswordCredentials(user, pwd)); HttpGet httpget = new HttpGet(u); System.out.println("executing request" + httpget.getRequestLine()); ResponseHandler<String> responseHandler = new BasicResponseHandler(); String response = client.execute(httpget, responseHandler); System.out.println(response); List<String> files = ChangeLog.extractChangeLogFiles(response); ArrayList<ChangeLogElement> changelog = sp.getChangelog(); changelog.clear(); //4. sort the revisions for (int i = 0; i < files.size(); i++) { String fileName = files.get(i); System.out.println("rev sort: " + fileName); int rev = Integer.parseInt(fileName.split("\\.")[0]); changelog.add(new ChangeLogElement(URI.create(u + fileName), rev)); } Collections.sort(changelog, new SortChangeLogsElementsByRev()); //show sorted changelog System.out.print("["); for (ChangeLogElement cle : changelog) { System.out.print(cle.getRev() + ","); } System.out.println("]"); //5. map revision with SVN revisionInformations mapRevisionInformation(os, sp, changelog); //6. load change log files System.out.println("Load Changelog Files"); for (String s : files) { System.out.println(s); String req = u + s; httpget = new HttpGet(req); response = client.execute(httpget, responseHandler); // System.out.println(response); // save the changelog File persistent IFolder chlFold = sp.getChangeLogFolder(); IFile chlFile = chlFold.getFile(s); if (!chlFile.exists()) { chlFile.create(new ByteArrayInputStream(response.getBytes()), true, null); } os.getOntologyManager().loadOntology(new ReaderInputSource(new StringReader(response))); } System.out.println("Changelog Ontology successfully loaded"); //Show loaded onts Set<OWLOntology> onts = os.getOntologyManager().getOntologies(); for (OWLOntology o : onts) { System.out.println("loaded ont: " + o.getURI()); } //7 refresh possibly modified Mainontology os.getOntologyManager().reloadOntology(os.getMainOntologyLocalURI()); //8. recalculate Revision Information of the concept of this ontology sp.setRevisionMap(createConceptRevisionMap(os, sp)); sp.saveRevisionMap(); sp.saveRevisionInformationMap(); //9. show MetaInfos on ConceptTree ConceptTree.refreshConceptTree(os, os.getMainOntologyURI()); OntologyInformation.refreshOntologyInformation(os, os.getMainOntologyURI()); //shutdown http connection client.getConnectionManager().shutdown(); } catch (ClientProtocolException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (OWLReasonerException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SVNException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SVNClientException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (CoreException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:org.apache.hadoop.hive.ql.exec.AnalysisOperator.java
private static void deepCopyElements(Object[] keys, ObjectInspector[] keyObjectInspectors, ArrayList<Object> result, ObjectInspectorCopyOption copyOption) { result.clear(); for (int i = 0; i < keys.length; i++) { result.add(ObjectInspectorUtils.copyToStandardObject(keys[i], keyObjectInspectors[i], copyOption)); }/*w ww . j a v a 2 s. c o m*/ }