List of usage examples for java.lang System gc
public static void gc()
From source file:edu.iu.daal_naive.NaiveDaalCollectiveMapper.java
protected void mapCollective(KeyValReader reader, Context context) throws IOException, InterruptedException { // long startTime = System.currentTimeMillis(); List<String> trainingDataFiles = new LinkedList<String>(); //splitting files between mapper while (reader.nextKeyValue()) { String key = reader.getCurrentKey(); String value = reader.getCurrentValue(); LOG.info("Key: " + key + ", Value: " + value); System.out.println("file name : " + value); trainingDataFiles.add(value);/*ww w . ja va 2 s . com*/ } Configuration conf = context.getConfiguration(); Path pointFilePath = new Path(trainingDataFiles.get(0)); System.out.println("path = " + pointFilePath.getName()); FileSystem fs = pointFilePath.getFileSystem(conf); FSDataInputStream in = fs.open(pointFilePath); runNaive(trainingDataFiles, conf, context); // LOG.info("Total time of iterations in master view: " // + (System.currentTimeMillis() - startTime)); this.freeMemory(); this.freeConn(); System.gc(); }
From source file:com.amalto.workbench.editors.XSDDriver.java
public String outputXSD() { StringBuffer buffer = new StringBuffer(); FileReader reader;//from w w w . j av a 2s .c om try { reader = new FileReader(outPut); BufferedReader bufferedReader = new BufferedReader(reader); String nextLine = bufferedReader.readLine(); while (nextLine != null) { buffer.append(nextLine); buffer.append("\r\n");//$NON-NLS-1$ nextLine = bufferedReader.readLine(); } reader.close(); } catch (Exception e) { // TODO Auto-generated catch block log.error(e.getMessage(), e); } finally { boolean result = false; int tryCount = 0; while (!result && tryCount++ < 10) { System.gc(); result = delete(outPut); } } return buffer.toString(); }
From source file:at.tuwien.minimee.migration.engines.MiniMeeDefaultMigrationEngine.java
/** * migrates a bytestream using the provided toolconfig and params, * and measures elapsed time.//from ww w . j a v a 2s .c om * */ public boolean migrate(byte[] data, ToolConfig config, String params, MigrationResult result) { System.gc(); boolean success = false; /** * we use this variable - time - for uniquely identifying all * files. This is handed down the working methods and appended * to temp files. Using nanoTime is sufficiently safe to avoid duplicates. */ long time = System.nanoTime(); String inputFile = prepareInputFile(data, config, time); String outputFile = prepareOutputFile(config, time); try { String command = prepareCommand(config, params, inputFile, outputFile, time); IRunner runner = makeRunner(command, config); RunInfo r = runner.run(); result.setSuccessful(r.isSuccess()); result.setReport(r.getReport()); byte[] migratedFile = new byte[] {}; try { migratedFile = FileUtils.getBytesFromFile(new File(outputFile)); DigitalObject u = new DigitalObject(); u.getData().setData(migratedFile); FormatInfo tFormat = new FormatInfo(); tFormat.setDefaultExtension(config.getOutEnding()); result.setTargetFormat(tFormat); result.setMigratedObject(u); } catch (Exception e) { log.error("Could not get outputfile " + outputFile); result.setSuccessful(false); log.error(e); } collectData(config, time, result); double length = migratedFile.length; double elapsed = r.getElapsedTimeMS(); double elapsedPerMB = ((double) elapsed) / (getMByte(data)); Measurement me = new Measurement(MigrationResult.MIGRES_ELAPSED_TIME, elapsed); result.getMeasurements().put(MigrationResult.MIGRES_ELAPSED_TIME, me); for (MeasurableProperty property : getMeasurableProperties()) { if (!property.getName().startsWith("machine:")) { Measurement m = new Measurement(); m.setProperty(property); PositiveFloatValue v = (PositiveFloatValue) property.getScale().createValue(); if (property.getName().equals(MigrationResult.MIGRES_ELAPSED_TIME)) { v.setValue(elapsed); m.setValue(v); result.getMeasurements().put(property.getName(), m); } else if (property.getName().equals(MigrationResult.MIGRES_ELAPSED_TIME_PER_MB)) { v.setValue(elapsedPerMB); m.setValue(v); result.getMeasurements().put(property.getName(), m); } else if (property.getName().equals(MigrationResult.MIGRES_RELATIVE_FILESIZE)) { v.setValue(((double) length) / data.length * 100); m.setValue(v); result.getMeasurements().put(property.getName(), m); } else if (property.getName().equals(MigrationResult.MIGRES_RESULT_FILESIZE)) { v.setValue((double) length); m.setValue(v); result.getMeasurements().put(property.getName(), m); } } } success = r.isSuccess(); } catch (Exception e) { log.error(e.getMessage(), e); return false; } finally { cleanup(time, inputFile, outputFile); } return success; }
From source file:com.secretlisa.lib.utils.BaseImageLoader.java
public Bitmap downloadBitmap(final String imageUrl) { // AndroidHttpClient is not allowed to be used from the main thread final HttpClient client = new DefaultHttpClient(); // ??//from w w w . j av a2s . com int netType = NetworkUtil.getNetworkType(mContext); if (netType == NetworkUtil.TYPE_WAP) { String proxyHost = android.net.Proxy.getDefaultHost(); if (proxyHost != null) { HttpHost proxy = new HttpHost(proxyHost, android.net.Proxy.getDefaultPort()); client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); } } final HttpGet getRequest = new HttpGet(imageUrl); try { HttpResponse response = client.execute(getRequest); final int statusCode = response.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { return null; } final HttpEntity entity = response.getEntity(); if (entity != null) { try { final byte[] respBytes = getBytes(entity.getContent()); writeImageFile(imageUrl, respBytes); // Decode the bytes and return the bitmap. return BitmapFactory.decodeByteArray(respBytes, 0, respBytes.length, null); } finally { entity.consumeContent(); } } } catch (IOException e) { getRequest.abort(); } catch (OutOfMemoryError e) { clearCache(); System.gc(); } catch (IllegalStateException e) { getRequest.abort(); } catch (Exception e) { getRequest.abort(); } finally { } return null; }
From source file:com.algoTrader.service.SimulationServiceImpl.java
protected SimulationResultVO handleRunByUnderlayings() { long startTime = System.currentTimeMillis(); // must call resetDB through ServiceLocator in order to get a transaction ServiceLocator.serverInstance().getSimulationService().resetDB(); // init all activatable strategies List<Strategy> strategies = getStrategyDao().findAutoActivateStrategies(); for (Strategy strategy : strategies) { getRuleService().initServiceProvider(strategy.getName()); getRuleService().deployAllModules(strategy.getName()); }/*from w w w.j a v a 2 s .c om*/ // feed the ticks inputCSV(); // get the results SimulationResultVO resultVO = getSimulationResultVO(startTime); // destroy all service providers for (Strategy strategy : strategies) { getRuleService().destroyServiceProvider(strategy.getName()); } // reset all configuration variables ConfigurationUtil.resetConfig(); // run a garbage collection System.gc(); return resultVO; }
From source file:imageencode.ImageEncode.java
public void decode(final String filePath, final Element node) throws IOException { final File imageFile = new File(filePath); final OutputStream os = new FileOutputStream(imageFile); String encodedImage = node.getTextContent(); // String decoded = decode(encodedImage); // os.write(decoded); final Runtime runtime = Runtime.getRuntime(); System.out.println("Free memory : " + runtime.freeMemory()); String[] sei = encodedImage.split("\r\n"); // System.out.println(encodedImage); System.out.println("Free memory : " + runtime.freeMemory()); for (final String element : sei) { final byte[] byteImage = Base64.decodeBase64(element); try {// w w w. ja va2 s.c o m os.write(byteImage); } catch (final FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } os.close(); System.out.println("Free memory : " + runtime.freeMemory()); encodedImage = null; sei = null; System.gc(); System.out.println("Free memory : " + runtime.freeMemory()); }
From source file:at.ac.tuwien.qse.sepm.gui.FullscreenWindow.java
private void loadImage() { if (photos.size() == 0 || photos.size() <= activeIndex) { // out of bounds return;//from w w w . j a v a2s .c o m } Photo photo = photos.get(activeIndex); ratingPicker.setRating(photo.getData().getRating()); image.setImage(photo.getFile()); // handling of images in original size can consume a lot of memory so collect it here System.gc(); }
From source file:ImageEncode.java
public void decode(String filePath, Element node) throws IOException { final File imageFile = new File(filePath); final OutputStream os = new FileOutputStream(imageFile); String encodedImage = node.getTextContent(); // String decoded = decode(encodedImage); // os.write(decoded); final Runtime runtime = Runtime.getRuntime(); System.out.println("Free memory : " + runtime.freeMemory()); String[] sei = encodedImage.split("\r\n"); // System.out.println(encodedImage); System.out.println("Free memory : " + runtime.freeMemory()); for (final String element : sei) { final byte[] byteImage = Base64.decodeBase64(element); try {/*from ww w. j av a 2s.c o m*/ os.write(byteImage); } catch (final FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } os.close(); System.out.println("Free memory : " + runtime.freeMemory()); encodedImage = null; sei = null; System.gc(); System.out.println("Free memory : " + runtime.freeMemory()); }
From source file:de.csw.expertfinder.test.pos.TestPartOfSpeechTagger.java
/** * Tests the MorphAdorner POS tagger./* w ww.ja va2s .co m*/ * @throws Exception */ @Test(dataProvider = "morphAdornerDataProvider") public void testMorphAdorner(List<List<String>> sentences, Class<PartOfSpeechTagger> taggerClass, Class<Lemmatizer> lemmatizerClass) throws Exception { StopWatch stopWatch = new StopWatch(); // initialize POS tagger (can take long if lexicon based) stopWatch.start(); PartOfSpeechTagger partOfSpeechTagger = taggerClass.newInstance(); stopWatch.stop(); if (log.isDebugEnabled()) log.debug("Initializing POS tagger (" + taggerClass.getSimpleName() + "): " + stopWatch); // Initialize lemmatizer stopWatch.reset(); stopWatch.start(); Lemmatizer lemmatizer = lemmatizerClass.newInstance(); stopWatch.stop(); if (log.isDebugEnabled()) log.debug("Initializing lemmatizer (" + lemmatizerClass.getSimpleName() + "): " + stopWatch); // POS tag sentences stopWatch.reset(); stopWatch.start(); List<List<AdornedWord>> taggedSentences = partOfSpeechTagger.tagSentences(sentences); stopWatch.stop(); if (log.isDebugEnabled()) log.debug("Tagging words: " + stopWatch); Lexicon wordLexicon = partOfSpeechTagger.getLexicon(); SpellingStandardizer standardizer = new DefaultSpellingStandardizer(); lemmatizer.setDictionary(standardizer.getStandardSpellings()); // Get the part of speech tags from // the word lexicon. PartOfSpeechTags partOfSpeechTags = wordLexicon.getPartOfSpeechTags(); WordTokenizer spellingTokenizer = new PennTreebankTokenizer(); // write results to file FileWriter out = new FileWriter("posresult_" + taggerClass.getSimpleName() + ".txt"); for (int i = 0; i < sentences.size(); i++) { // Get the next adorned sentence. // This contains a list of adorned // words. Only the spellings // and part of speech tags are // guaranteed to be defined. List<AdornedWord> sentence = taggedSentences.get(i); out.write("---------- Sentence " + (i + 1) + " ----------"); // Print out the spelling and part(s) // of speech for each word in the // sentence. Punctuation is treated // as a word too. for (int j = 0; j < sentence.size(); j++) { AdornedWord adornedWord = sentence.get(j); setLemma(adornedWord, wordLexicon, lemmatizer, partOfSpeechTags, spellingTokenizer); out.write(StringUtils.rpad((j + 1) + "", 3) + ": " + StringUtils.rpad("spell: " + adornedWord.getSpelling(), 20) + StringUtils.rpad("lemmata: " + adornedWord.getLemmata(), 20) + adornedWord.getPartsOfSpeech() + "\n"); } } out.close(); partOfSpeechTagger = null; System.gc(); }
From source file:edu.iu.daal_nn.NNDaalCollectiveMapper.java
protected void mapCollective(KeyValReader reader, Context context) throws IOException, InterruptedException { long startTime = System.currentTimeMillis(); List<String> trainingDataFiles = new LinkedList<String>(); List<String> trainingDataGroundTruthFiles = new LinkedList<String>(); //splitting files between mapper while (reader.nextKeyValue()) { String key = reader.getCurrentKey(); String value = reader.getCurrentValue(); LOG.info("Key: " + key + ", Value: " + value); System.out.println("file name : " + value); trainingDataFiles.add(value);//from w ww . j a v a 2s .c om } Configuration conf = context.getConfiguration(); Path pointFilePath = new Path(trainingDataFiles.get(0)); System.out.println("path = " + pointFilePath.getName()); FileSystem fs = pointFilePath.getFileSystem(conf); FSDataInputStream in = fs.open(pointFilePath); runNN(trainingDataFiles, conf, context); LOG.info("Total iterations in master view: " + (System.currentTimeMillis() - startTime)); this.freeMemory(); this.freeConn(); System.gc(); }