List of usage examples for java.lang System gc
public static void gc()
From source file:com.thesmartweb.swebrank.Total_analysis.java
/** * Method to call search analysis for every query and to save the wordlists * @param wordList_previous the previous wordlist to check convergence * @param iteration_counter the iteration number * @param example_dir the directory to save the files * @param domain the domain we analyze//from w ww .j a va 2 s . c o m * @param enginechoice the search engines chosen * @param queries the queries we search for * @param results_number the amount of results for each query * @param top_visible the amount of results if we use Visibility Score (http://www.advancedwebranking.com/user-guide/html/en/ch08s06.html) * @param mozMetrics the metrics of Moz chosen * @param moz_threshold_option flag if we are going to use Moz threshold or not * @param moz_threshold the threshold to moz metrics * @param top_count_moz the amount of links to keep if we use Moz for evaluation * @param ContentSemantics get the choice of Content Semantic Analysis algorithm that we are going to use * @param SensebotConcepts the amount of concepts to be recognized if Sensebot is used * @param SWebRankSettings the settings for LDA and SwebRank in general (check the ReadInput Class) * @param config_path the configuration path to get all the api keys */ public void perform(List<String> wordList_previous, int iteration_counter, String example_dir, String domain, List<Boolean> enginechoice, List<String> queries, int results_number, int top_visible, List<Boolean> mozMetrics, boolean moz_threshold_option, double moz_threshold, int top_count_moz, List<Boolean> ContentSemantics, int SensebotConcepts, List<Double> SWebRankSettings, String config_path) { //for every term of the query String[] it performs the search analysis function //which includes sumbission of the term to the search engines, getting the results according to the options selected //parsing the websites and getting the content and the running LDA on them and getting the top content for (String query : queries) { System.gc(); System.gc(); System.gc(); List<String> wordList = new ArrayList<>(); //we call search analysis that is doing all the work needed and returns to us the wordlists Search_analysis sa = new Search_analysis(); //the following string represents the directory for each query String example_directory = example_dir + query + "-query//"; //we set the alpha variable of the LDA algorithm to the value that is said to be optimal in the paper of LDA, alpha double alpha = 50 / SWebRankSettings.get(1); //we call perform method of search analysis wordList = sa.perform(iteration_counter, example_directory, domain, enginechoice, query, results_number, top_visible, SWebRankSettings, alpha, mozMetrics, top_count_moz, moz_threshold_option, moz_threshold, ContentSemantics, SensebotConcepts, config_path); //we add the wordlist to the vector of word list ArrayList<String> wordArrayList = new ArrayList<>(wordList); array_wordLists.add(wordArrayList); //we add the wordlist and to the total wordlist wordList_total.addAll(wordList); } //we are going to check the convergence rate CheckConvergence cc = new CheckConvergence(); // here we check the convergence between the two wordLists, the new and the previous //the concergence percentage of this iteration, we save it in Elastic Search convergence = cc.ConvergenceCalc(wordList_total, wordList_previous); //Node node = nodeBuilder().client(true).clusterName("lshrankldacluster").node(); //Client client = node.client(); ReadInput ri = new ReadInput(); List<String> elasticIndexes = ri.GetKeyFile(config_path, "elasticSearchIndexes"); Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "lshrankldacluster").build(); Client client = new TransportClient(settings) .addTransportAddress(new InetSocketTransportAddress("localhost", 9300)); JSONObject objEngineLevel = new JSONObject(); objEngineLevel.put("RoundContent", wordList_total); objEngineLevel.put("Round", iteration_counter); objEngineLevel.put("Convergence", convergence); String id = domain + "/" + iteration_counter; IndexRequest indexReq = new IndexRequest(elasticIndexes.get(1), "content", id); indexReq.source(objEngineLevel); IndexResponse indexRes = client.index(indexReq).actionGet(); client.close(); //node.close(); }
From source file:com.vaadin.server.AbstractClientConnectorTest.java
@Test public void stateTypeCacheDoesNotLeakMemory() throws IllegalArgumentException, IllegalAccessException, NoSuchFieldException, SecurityException, InterruptedException, ClassNotFoundException { Field stateTypeCacheField = AbstractClientConnector.class.getDeclaredField("STATE_TYPE_CACHE"); stateTypeCacheField.setAccessible(true); Map<Class<?>, ?> stateTypeCache = (Map<Class<?>, ?>) stateTypeCacheField.get(null); WeakReference<Class<?>> classRef = loadClass("com.vaadin.server.AbstractClientConnector"); stateTypeCache.put(classRef.get(), null); int size = stateTypeCache.size(); assertNotNull("Class should not yet be garbage collected", classRef.get()); for (int i = 0; i < 100; ++i) { System.gc(); if (stateTypeCache.size() < size) { break; }//from w w w .j a v a 2 s.c o m Thread.sleep(100); } assertTrue(stateTypeCache.size() < size); assertNull("Class should be garbage collected", classRef.get()); }
From source file:com.imagelake.control.KeyWordsDAOImp.java
@Override public List<KeyWords> listKeyWords(String key) { List<KeyWords> lis = new ArrayList<KeyWords>(); try {//from w w w . java 2 s.c o m String sql = "SELECT * FROM key_words WHERE key_word LIKE '%" + key + "%' ORDER BY images_images_id DESC"; PreparedStatement ps = DBFactory.getConnection().prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { System.gc(); KeyWords w = new KeyWords(); w.setKey_words_id(rs.getInt(1)); w.setKey_word(rs.getString(2)); w.setImages_images_id(rs.getInt(3)); System.out.println("image id:" + rs.getInt(3)); lis.add(w); } } catch (Exception e) { e.printStackTrace(); } return lis; }
From source file:es.udc.gii.rosamituscma.ConstrainedMotionEvolutionaryAlgorithm.java
@Override protected synchronized void replace(Population toPopulation) { //La "toPopulation" es una sub-poblacin de buffer_population: while (buffer_population.size() < this.getPopulation().getSize()) { try {/*from w ww. j a va2s. c o m*/ wait(10); } catch (InterruptedException ex) { System.out.println( "Exception at CMARosaMitusEvolutionaryAlgorithm reproduce phase: " + ex.getMessage()); System.exit(0); } } //La poblacin buffer es la que tiene la informacin de la calidad: Population sub_population = new Population( this.buffer_population.subList(0, this.getPopulation().getSize())); if (this.getReplaceChain() != null) { super.replace(sub_population); } synchronized (this.buffer_population) { for (int i = 0; i < this.getPopulation().getSize(); i++) { this.buffer_population.remove(0); } System.gc(); } }
From source file:br.com.hslife.orcamento.controller.ArquivoController.java
@Override public String startUp() { initializeEntity();/*from w ww. jav a 2 s. co m*/ // Invoca o Garbage Collector System.gc(); return "/pages/Arquivo/listArquivo"; }
From source file:com.gargoylesoftware.htmlunit.javascript.configuration.JavaScriptConfigurationTest.java
/** * Regression test for bug 2854240./*from w ww . j a v a 2 s. c o m*/ * This test was throwing an OutOfMemoryError when the bug existed. * @throws Exception if an error occurs */ @Test public void memoryLeak() throws Exception { long count = 0; while (count++ < 3000) { final BrowserVersion browserVersion = new BrowserVersion( "App" + RandomStringUtils.randomAlphanumeric(20), "Version" + RandomStringUtils.randomAlphanumeric(20), "User Agent" + RandomStringUtils.randomAlphanumeric(20), 1); JavaScriptConfiguration.getInstance(browserVersion); if (LOG.isInfoEnabled()) { LOG.info("count: " + count + "; memory stats: " + getMemoryStats()); } } System.gc(); }
From source file:interfazGrafica.frmMoverRFC.java
public void mostrarPDF() { String curp = ""; curp = txtCapturaCurp.getText();/* www. j ava2 s . co m*/ ArrayList<DocumentoRFC> Docs = new ArrayList<>(); DocumentoRFC sigExp; DocumentoRFC temporal; RFCescaneado tempo = new RFCescaneado(); //tempo.borrartemporal(); sigExp = expe.obtenerArchivosExp(); Nombre_Archivo = sigExp.getNombre(); nombreArchivo.setText(Nombre_Archivo); if (Nombre_Archivo != "") { doc = sigExp; System.out.println("Obtuvo el nombre del archivo."); System.out.println(doc.ruta + doc.nombre); String file = "C:\\escaneos\\Local\\Temporal\\" + doc.nombre; File arch = new File(file); System.out.println("Encontr el siguiente archivo:"); System.out.println(file); System.out.println(""); if (arch.exists()) { System.out.println("El archivo existe"); } try { System.out.println("Entr al try"); RandomAccessFile raf = new RandomAccessFile(file, "r"); System.out.println("Reconoc el archivo" + file); FileChannel channel = raf.getChannel(); System.out.println("Se abrio el canal"); ByteBuffer buf = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); System.out.println("Channel map"); PDFFile pdffile = new PDFFile(buf); System.out.println("Creando un pdf file"); PDFPage page = pdffile.getPage(0); System.out.println("Obteniendo la pagina con " + 0); panelpdf2.showPage(page); System.out.println("mostrando el panel pdf2"); repaint(); System.gc(); buf.clear(); raf.close(); System.gc(); } catch (Exception ioe) { JOptionPane.showMessageDialog(null, "Error al abrir el archivo"); ioe.printStackTrace(); } } // tempo.borrartemporal(); }
From source file:dbs_project.storage.performance.MemoryEfficiencyTest.java
private static void printMemory() { for (int i = 0; i < 5; ++i) { System.gc(); }/*from w w w . j a va 2s . c om*/ double memory = ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024f / 1024f); Utils.getOut().println("Memory footprint: " + memory + " MB"); Utils.getOut().println(); Utils.getOut() .println("<measurements layer=\"storage\">\r\n" + "<measurement><name>footprint</name><scale>2000</scale><value>" + memory + "</value></measurement>\r\n" + "</measurements>"); Utils.getOut().println(); }
From source file:com.android.yijiang.kzx.http.DataAsyncHttpResponseHandler.java
/** * Returns byte array of response HttpEntity contents * * @param entity can be null/* ww w . ja v a 2s. c om*/ * @return response entity body or null * @throws java.io.IOException if reading entity or creating byte array failed */ @Override byte[] getResponseData(HttpEntity entity) throws IOException { byte[] responseBody = null; if (entity != null) { InputStream instream = entity.getContent(); if (instream != null) { long contentLength = entity.getContentLength(); if (contentLength > Integer.MAX_VALUE) { throw new IllegalArgumentException("HTTP entity too large to be buffered in memory"); } if (contentLength < 0) { contentLength = BUFFER_SIZE; } try { ByteArrayBuffer buffer = new ByteArrayBuffer((int) contentLength); try { byte[] tmp = new byte[BUFFER_SIZE]; int l; // do not send messages if request has been cancelled while ((l = instream.read(tmp)) != -1 && !Thread.currentThread().isInterrupted()) { buffer.append(tmp, 0, l); sendProgressDataMessage(copyOfRange(tmp, 0, l)); } } finally { AsyncHttpClient.silentCloseInputStream(instream); } responseBody = buffer.toByteArray(); } catch (OutOfMemoryError e) { System.gc(); throw new IOException("File too large to fit into available memory"); } } } return responseBody; }
From source file:gov.nih.nci.cabig.caaers.CaaersDbTestCase.java
@Override protected void setUp() throws Exception { super.setUp(); applicationContext = getDeployedApplicationContext(); ((CaaersJavaMailSender) applicationContext.getBean("mailer")).SUPRESS_MAIL_SEND_EXCEPTION = true; setUpAuthorization();/*from ww w . j av a2 s.c o m*/ setUpAuditing(); setUpSession(); System.gc(); }