List of usage examples for java.util List isEmpty
boolean isEmpty();
From source file:com.thesmartweb.swebrank.Main.java
/** * @param args the command line arguments *///w w w.j a va 2s .co m public static void main(String[] args) { Path input_path = Paths.get("//mnt//var//DBs//inputsL10//nba//");//input directory String output_parent_directory = "//mnt//var//DBs//outputsConfL10//nba//";//output directory String config_path = "//mnt//var//DBs//config//";//input directory //---Disable apache log manually---- //System.setProperty("org.apache.commons.logging.Log","org.apache.commons.logging.impl.NoOpLog"); System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger"); //--------------Domain that is searched---------- String domain = ""; //------------------search engine related options---------------------- List<String> queries = null; int results_number = 0;//the number of results that are returned from each search engine List<Boolean> enginechoice = null; //list element #0. True/False Bing //list element #1. True/False Google //list element #2. True/False Yahoo! //list element #3. True/False Merged //-----------Moz options--------------------- List<Boolean> mozMetrics = null; //The list is going to contain the moz related input in the following order //list element #1. True/False, True we use Moz API, false not //list element #2. True if we use Domain Authority //list element #3. True if we use External MozRank //list element #4. True if we use MozRank //list element #5. True if we use MozTrust //list element #6. True if we use Subdomain MozRank //list element #7. True if we use Page Authority //only one is used (the first to be set to true) boolean moz_threshold_option = false;//set to true we use the threshold Double moz_threshold = 0.0;//if we want to have a threshold in moz int top_count_moz = 0;//if we want to get the moz top-something results //---------------Semantic Analysis method---------------- List<Boolean> ContentSemantics = null; int SensebotConcepts = 0;//define the amount of concepts that sensebot is going to recognize List<Double> SWebRankSettings = null; //------(string)directory is going to be used later----- String output_child_directory; //-------we get all the paths of the txt (input) files from the input directory------- DataManipulation getfiles = new DataManipulation();//class responsible for the extraction of paths Collection<File> inputs_files;//array to include the paths of the txt files inputs_files = getfiles.getinputfiles(input_path.toString(), "txt");//method to retrieve all the path of the input documents //------------read the txt files------------ for (File input : inputs_files) { ReadInput ri = new ReadInput();//function to read the input boolean check_reading_input = ri.perform(input); if (check_reading_input) { domain = ri.domain; //---------- queries = ri.queries; results_number = ri.results_number; enginechoice = ri.enginechoice; //------------ mozMetrics = ri.mozMetrics; moz_threshold_option = ri.moz_threshold_option; moz_threshold = ri.moz_threshold.doubleValue(); //--------------- ContentSemantics = ri.ContentSemantics; SWebRankSettings = ri.SWebRankSettings; } int top_visible = 0;//option to set the amount of results you can get in the merged search engine //------if we choose to use a Moz metric or Visibility score for our ranking, we need to set the results_number for the search engines to its max which is 50 //-----we set the top results number for moz or Visibility rank---- if (mozMetrics.get(0) || enginechoice.get(3)) { if (mozMetrics.get(0)) { top_count_moz = results_number; } //if moz is true, top_count_moz gets the value of result number if (enginechoice.get(3)) { top_visible = results_number; } //if merged engine is true, top_visible gets the value of result number results_number = 50;//this is the max amount of results that you can get from the search engine APIs } //-----if we want to use Moz we should check first if it works if (mozMetrics.get(0)) { Moz Moz = new Moz(); //---if it works, moz remains true, otherwise it is set to false mozMetrics.add(0, Moz.check(config_path)); //if it is false and we have chosen to use Visibility score with Moz, we reset back to the standard settings (ranking and not merged) //therefore, we reset the number of results from 50 to the top_count_moz which contained the original number of results if (!mozMetrics.get(0)) { if (!enginechoice.get(3)) { results_number = top_count_moz; } } } //----------we set the wordLists that we are going to use--------------------- List<String> finalList = new ArrayList<String>();//finalList is going to contain all the content in the end Total_analysis ta = new Total_analysis();//we call total analysis int iteration_counter = 0;//the iteration_counter is used in order to count the number of iterations of the algorithm and to be checked with perf_limit //this list of arraylists is going to contain all the wordLists that are produced for every term of the String[] query, //in order to calculate the NGD scores between every term of the wordList and the term that was used as query in order to produce the spesific wordList List<ArrayList<String>> array_wordLists = new ArrayList<>(); List<String> wordList_previous = new ArrayList<>(); List<String> wordList_new = new ArrayList<>(); double convergence = 0;//we create the convergence percentage and initialize it String conv_percentages = "";//string that contains all the convergence percentages DataManipulation wordsmanipulation = new DataManipulation();//method to manipulate various word data (String, list<String>, etc) do { //if we run the algorithm for the 1st time we already have the query so we skip the loop below that produces the new array of query if (iteration_counter != 0) { wordList_previous = wordList_new; //we add the previous wordList to the finalList finalList = wordsmanipulation.AddAList(wordList_previous, finalList); List<String> query_new_list_total = new ArrayList<>(); int iteration_previous = iteration_counter - 1; Combinations_Engine cn = new Combinations_Engine();//call the class to combine the terms produced for (String query : queries) { List<String> ids = new ArrayList<>(); if (enginechoice.get(0)) { String id = domain + "/" + query + "/bing" + "/" + iteration_previous; ids.add(id); } if (enginechoice.get(1)) { String id = domain + "/" + query + "/google" + "/" + iteration_previous; ids.add(id); } if (enginechoice.get(2)) { String id = domain + "/" + query + "/yahoo" + "/" + iteration_previous; ids.add(id); } ElasticGetWordList ESget = new ElasticGetWordList();//we call this class to get the wordlist from the Elastic Search List<String> maxWords = ESget.getMaxWords(ids, SWebRankSettings.get(9).intValue(), config_path);//we are going to get a max amount of words int query_index = queries.indexOf(query); int size_query_new = SWebRankSettings.get(10).intValue();//the amount of new queries we are willing to create //we create the new queries for every query of the previous round by combining the words produced from this query List<String> query_new_list = cn.perform(maxWords, SWebRankSettings.get(7), queries, SWebRankSettings.get(6), query_index, size_query_new, config_path); //we add the list of new queries to the total list that containas all the new queries query_new_list_total.addAll(query_new_list); System.out.println("query pointer=" + query_index + ""); } //---------------------the following cleans a list from null and duplicates query_new_list_total = wordsmanipulation.clearListString(query_new_list_total); //--------------we create the new directory that our files are going to be saved String txt_directory = FilenameUtils.getBaseName(input.getName()); output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter + "//"; //----------------append the wordlist to a file------------------ wordsmanipulation.AppendWordList(query_new_list_total, output_child_directory + "queries_" + iteration_counter + ".txt"); if (query_new_list_total.size() < 1) { break; } //if we don't create new queries we end the while loop //total analysis' function is going to do all the work and return back what we need ta = new Total_analysis(); ta.perform(wordList_previous, iteration_counter, output_child_directory, domain, enginechoice, query_new_list_total, results_number, top_visible, mozMetrics, moz_threshold_option, moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts, SWebRankSettings, config_path); //we get the array of wordlists array_wordLists = ta.getarray_wordLists(); //get the wordlist that includes all the new queries wordList_new = ta.getwordList_total(); //---------------------the following cleans a list from null and duplicates------------- wordList_new = wordsmanipulation.clearListString(wordList_new); //----------------append the wordlist to a file-------------------- wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt"); //the concergence percentage of this iteration convergence = ta.getConvergence();//we are going to use convergence score to check the convergence //a string that contains all the convergence percentage for each round separated by \n character conv_percentages = conv_percentages + "\n" + convergence; //a file that is going to include the convergence percentages wordsmanipulation.AppendString(conv_percentages, output_child_directory + "convergence_percentage.txt"); //we add the new wordList to the finalList finalList = wordsmanipulation.AddAList(wordList_new, finalList); //we set the query array to be equal to the query new total that we have created queries = query_new_list_total; //we increment the iteration_counter in order to count the iterations of the algorithm and to use the perf_limit iteration_counter++; } else {//the following source code is performed on the 1st run of the loop //------------we extract the parent path of the file String txt_directory = FilenameUtils.getBaseName(input.getName()); //----------we create a string that is going to be used for the corresponding directory of outputs output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter + "//"; //we call total analysis function performOld ta.perform(wordList_new, iteration_counter, output_child_directory, domain, enginechoice, queries, results_number, top_visible, mozMetrics, moz_threshold_option, moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts, SWebRankSettings, config_path); //we get the array of wordlists array_wordLists = ta.getarray_wordLists(); //get the wordlist that includes all the new queries wordList_new = ta.getwordList_total(); //---------------------the following cleans a list from null and duplicates wordList_new = wordsmanipulation.clearListString(wordList_new); //----------------append the wordlist to a file wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt"); //----------------------------------------- iteration_counter++;//increase the iteration_counter that counts the iterations of the algorithm } } while (convergence < SWebRankSettings.get(5).doubleValue() && iteration_counter < SWebRankSettings.get(8).intValue());//while the convergence percentage is below the limit and the iteration_counter below the performance limit if (iteration_counter == 1) { finalList = wordsmanipulation.AddAList(wordList_new, finalList); } //--------------------content List---------------- if (!finalList.isEmpty()) { //---------------------the following cleans the final list from null and duplicates finalList = wordsmanipulation.clearListString(finalList); //write the keywords to a file boolean flag_file = false;//boolean flag to declare successful write to file flag_file = wordsmanipulation.AppendWordList(finalList, output_parent_directory + "total_content.txt"); if (!flag_file) { System.out.print("can not create the content file for: " + output_parent_directory + "total_content.txt"); } } //we are going to save the total content with its convergence on the ElasticSearch cluster in a separated index //Node node = nodeBuilder().client(true).clusterName("lshrankldacluster").node(); //Client client = node.client(); //get the elastic search indexes in a list List<String> elasticIndexes = ri.GetKeyFile(config_path, "elasticSearchIndexes"); Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "lshrankldacluster") .build(); Client client = new TransportClient(settings) .addTransportAddress(new InetSocketTransportAddress("localhost", 9300)); JSONObject objEngineLevel = new JSONObject(); objEngineLevel.put("TotalContent", finalList);//we save the total content objEngineLevel.put("Convergences", conv_percentages);//we save the convergence percentages IndexRequest indexReq = new IndexRequest(elasticIndexes.get(0), "content", domain);//we save also the domain indexReq.source(objEngineLevel); IndexResponse indexRes = client.index(indexReq).actionGet(); //node.close(); client.close(); //----------------------convergence percentages writing to file--------------- //use the conv_percentages string if (conv_percentages.length() != 0) { boolean flag_file = false;//boolean flag to declare successful write to file flag_file = wordsmanipulation.AppendString(conv_percentages, output_parent_directory + "convergence_percentages.txt"); if (!flag_file) { System.out.print("can not create the convergence file for: " + output_parent_directory + "convergence_percentages.txt"); } } } }
From source file:Main.java
public static List<String> merge(final List<String> list, final int index) { if (list.isEmpty()) { throw new IndexOutOfBoundsException("Cannot merge empty list"); } else if (index + 1 >= list.size()) { throw new IndexOutOfBoundsException("Cannot merge last element"); } else {// w w w. j av a2s .co m final List<String> result = new ArrayList<String>(list); result.set(index, list.get(index) + list.get(index + 1)); result.remove(index + 1); return result; } }
From source file:Main.java
public static <T> List<T> head(List<T> col) { return col.isEmpty() ? col : col.subList(0, col.size() - 1); }
From source file:Main.java
public static <T> T last(List<T> list) { if (list.isEmpty()) { return null; }/*from w ww .j av a2 s . com*/ return list.get(list.size() - 1); }
From source file:Main.java
public static <T> List<T> tail(List<T> col) { return col.isEmpty() ? col : col.subList(1, col.size()); }
From source file:Main.java
/** * @return the last element in the list, or empty if the list is empty */// ww w.ja v a 2s . co m public static <E> Optional<E> last(List<E> list) { if (list.isEmpty()) return Optional.empty(); return Optional.of(list.get(list.size() - 1)); }
From source file:Main.java
/** * Provides the first item from a list, unless the list is <tt>null</tt> or empty, in which case <tt>null</tt> is * returned. This is useful because the behaviour of List.get(0) is to throw an exception is the list is empty. * * @param list The list to take the first item from. * @param <T> The type of the items in the list. * * @return The first item from a list, unless the list is <tt>null</tt> or empty, in which case <tt>null</tt> is * returned./*from w w w .jav a 2s . c o m*/ */ public static <T> T first(List<T> list) { return list.isEmpty() || list == null ? null : list.get(0); }
From source file:Main.java
public static boolean isEmptyOrNull(List list) { return list == null || list.isEmpty(); }
From source file:Main.java
public static <T> List<T> readOnlyCopy(List<T> orig) { if (orig.isEmpty()) return Collections.emptyList(); if (orig.size() == 1) return Collections.singletonList(orig.get(0)); ArrayList<T> copy = new ArrayList<>(orig.size()); copy.addAll(orig);/* ww w. j a v a 2 s . c o m*/ return Collections.unmodifiableList(copy); }
From source file:Main.java
public static boolean isListEmpty(List list) { return list == null || list.isEmpty(); }