List of usage examples for java.io IOException getLocalizedMessage
public String getLocalizedMessage()
From source file:fr.treeptik.cloudunit.docker.DockerContainerJSON.java
/** * /containers/json : not same format as an inspect of container List all * Running or Paused containers retrieve name (with cloudunit format), image * and state/* www . jav a 2 s . com*/ * * @param hostAddress * @return * @throws DockerJSONException */ public List<DockerContainer> listAllContainers(String hostAddress) throws DockerJSONException { URI uri = null; List<DockerContainer> listContainers = new ArrayList<>(); try { uri = new URIBuilder().setScheme(dockerEndpointMode).setHost(hostAddress).setPath("/containers/json") .build(); if (logger.isDebugEnabled()) { logger.debug("uri : " + uri); } JsonResponse jsonResponse; try { jsonResponse = client.sendGet(uri); switch (jsonResponse.getStatus()) { case 400: throw new ErrorDockerJSONException("docker : bad parameter"); case 500: throw new ErrorDockerJSONException("docker : server error"); } } catch (IOException e) { e.printStackTrace(); throw new DockerJSONException("Error : listAllContainers " + e.getLocalizedMessage(), e); } if (logger.isDebugEnabled()) { logger.debug("response : " + jsonResponse); } JSONParser jsonParser = new JSONParser(); Object obj = jsonParser.parse(jsonResponse.getMessage()); JSONArray array = (JSONArray) obj; for (int i = 0; i < array.size(); i++) { String containerDescription = array.get(i).toString(); try { String firstSubString = (parser(containerDescription).get("Names").toString()).substring(4); String Names = null; // for container with link where the link name is also show if (firstSubString.lastIndexOf(",") != -1) { Names = firstSubString.substring(0, firstSubString.lastIndexOf(",") - 1); } else { Names = firstSubString.substring(0, firstSubString.lastIndexOf("\"")); } if (logger.isDebugEnabled()) { logger.debug("Names=[" + Names + "]"); } if (Names.trim().length() > 0) { DockerContainer dockerContainer = findOne(Names, hostAddress); if (dockerContainer != null) { listContainers.add(dockerContainer); } } } catch (ParseException e) { throw new DockerJSONException("Error : listAllContainers " + e.getLocalizedMessage(), e); } } } catch (NumberFormatException | URISyntaxException | ParseException e) { StringBuilder msgError = new StringBuilder(256); msgError.append(",hostIP=").append(hostAddress).append(",uri=").append(uri); logger.error("" + msgError, e); throw new FatalDockerJSONException("docker : error fatal"); } return listContainers; }
From source file:com.photon.phresco.framework.impl.CIManagerImpl.java
private CIJobStatus doJob(CIJob job, String jobType) throws PhrescoException { S_LOGGER.debug("Entering Method CIManagerImpl.createJob(CIJob job)"); try {// w w w.j av a2 s . c o m cli = getCLI(job); List<String> argList = new ArrayList<String>(); argList.add(jobType); argList.add(job.getName()); String configPath = PhrescoFrameworkFactory.getServiceManager().getCiConfigPath(); ConfigProcessor processor = new ConfigProcessor(new URL(configPath)); customizeNodes(processor, job); ByteArrayOutputStream baos = new ByteArrayOutputStream(); S_LOGGER.debug("argList " + argList.toString()); int result = cli.execute(argList, processor.getConfigAsStream(), System.out, baos); String message = "Job created successfully"; if (result == -1) { byte[] byteArray = baos.toByteArray(); message = new String(byteArray); } S_LOGGER.debug("message " + message); setSvnCredential(job); setMailCredential(job); return new CIJobStatus(result, message); } catch (IOException e) { throw new PhrescoException(e); } catch (JDOMException e) { throw new PhrescoException(e); } finally { if (cli != null) { try { cli.close(); } catch (IOException e) { if (debugEnabled) { S_LOGGER.error(e.getLocalizedMessage()); } } catch (InterruptedException e) { if (debugEnabled) { S_LOGGER.error(e.getLocalizedMessage()); } } } } }
From source file:web.diva.server.model.SomClustering.SomClustImgGenerator.java
public SomClustTreeSelectionResult updateUpperTreeSelection(int x, int y, double w, double h) { upperTreeBImg = upperTree.getImage(); Node n = this.getNodeAt(y, x, colNode); SomClustTreeSelectionResult result = new SomClustTreeSelectionResult(); if (n != null) { upperTree.painttree(n, upperTreeBImg.getGraphics(), Color.red); Stack st = new Stack(); Vector v = new Vector(); n.fillMembers(v, st);// www.j a va2 s.c o m int[] sel = new int[v.size()]; for (int i = 0; i < v.size(); i++) { sel[i] = ((Integer) v.elementAt(i)); } result.setSelectedIndices(sel); } try { byte[] imageData = ChartUtilities.encodeAsPNG(upperTreeBImg); String base64 = Base64.encodeBase64String(imageData); base64 = "data:image/png;base64," + base64; result.setTreeImg1Url(base64); System.gc(); // result.setTreeImgUrl(navgStringImg); return result; } catch (IOException e) { System.err.println(e.getLocalizedMessage()); } return null; }
From source file:TimestreamsTests.java
/** * Performs HTTP post for a given URL//from ww w . j a va2s . c o m * * @param url * is the URL to get * @param params * is a URL encoded string in the form x=y&a=b... * @return a String with the contents of the get */ private Map<String, List<String>> doPut(URL url, String params) { HttpURLConnection connection; try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("PUT"); connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); connection.setRequestProperty("Content-Length", "" + Integer.toString(params.getBytes().length)); connection.setDoInput(true); connection.setDoOutput(true); OutputStreamWriter out = new OutputStreamWriter(connection.getOutputStream()); out.write(params); out.close(); Map<String, List<String>> responseHeaderFields = connection.getHeaderFields(); System.out.println(responseHeaderFields); if (responseHeaderFields.get(null).get(0).equals("HTTP/1.1 200 OK")) { InputStream is = connection.getInputStream(); BufferedReader rd = new BufferedReader(new InputStreamReader(is)); String line; StringBuffer response = new StringBuffer(); while ((line = rd.readLine()) != null) { response.append(line); response.append('\r'); } rd.close(); System.out.println(response); } return responseHeaderFields; } catch (IOException e1) { fail("Post " + url + " failed: " + e1.getLocalizedMessage()); } return null; }
From source file:TimestreamsTests.java
/** * Performs HTTP post for a given URL//from ww w. java2 s .c o m * * @param url * is the URL to get * @param params * is a URL encoded string in the form x=y&a=b... * @return a String with the contents of the get */ private Map<String, List<String>> doPost(URL url, String params) { HttpURLConnection connection; try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); connection.setRequestProperty("Content-Length", "" + Integer.toString(params.getBytes().length)); connection.setDoInput(true); connection.setDoOutput(true); DataOutputStream wr = new DataOutputStream(connection.getOutputStream()); wr.writeBytes(params); wr.flush(); wr.close(); Map<String, List<String>> responseHeaderFields = connection.getHeaderFields(); System.out.println(responseHeaderFields); if (responseHeaderFields.get(null).get(0).equals("HTTP/1.1 200 OK")) { InputStream is = connection.getInputStream(); BufferedReader rd = new BufferedReader(new InputStreamReader(is)); String line; StringBuffer response = new StringBuffer(); while ((line = rd.readLine()) != null) { response.append(line); response.append('\r'); } rd.close(); System.out.println(response); } return responseHeaderFields; } catch (IOException e1) { fail("Post " + url + " failed: " + e1.getLocalizedMessage()); } return null; }
From source file:ec.edu.chyc.manejopersonal.managebean.GestorArticulo.java
public void fileUploadListener(FileUploadEvent event) { UploadedFile file = event.getFile(); boolean isBibtex = event.getComponent().getAttributes().get("bibtex") != null; //nombre del archivo que ya se encuentra almacenado en las propiedades String nombreArchivoGuardado; if (isBibtex) { nombreArchivoGuardado = articulo.getArchivoBibtex(); } else {//from w w w . j av a 2 s .co m nombreArchivoGuardado = articulo.getArchivoArticulo(); } if (nombreArchivoGuardado.isEmpty() && !modoModificar) { //si la propiedad esta llena, significa que antes ya se subi un archivo y ahora est subiendo uno nuevo para reemplazarlo // por lo tanto hay que eliminar el archivo anterior Path pathArchivoAnterior = ServerUtils.getPathTemp().resolve(nombreArchivoGuardado).normalize(); File archivoEliminar = pathArchivoAnterior.toFile(); //borrar el archivo anterior en caso de existir if (archivoEliminar.isFile()) { archivoEliminar.delete(); } } if (file != null) { String extensionSubida = FilenameUtils.getExtension(file.getFileName()); String nombreArchivoSubido = ServerUtils.generarNombreValidoArchivo(extensionSubida); Path pathArchivo = ServerUtils.getPathTemp().resolve(nombreArchivoSubido).normalize(); File newFile = pathArchivo.toFile(); try { BeansUtils.subirArchivoPrimefaces(file, newFile); if (isBibtex) { articulo.setArchivoBibtex(nombreArchivoSubido); tamanoArchivoBibtex = ServerUtils.humanReadableByteCount(file.getSize()); if (!soloSubirBibtex) { listaPersonaArticulo.clear(); leerBibtex(pathArchivo); } } else { articulo.setArchivoArticulo(nombreArchivoSubido); tamanoArchivo = ServerUtils.humanReadableByteCount(file.getSize()); } } catch (IOException ex) { GestorMensajes.getInstance().mostrarMensajeError(ex.getLocalizedMessage()); Logger.getLogger(GestorArticulo.class.getName()).log(Level.SEVERE, null, ex); } } else { System.err.println("Error al subir archivo"); } }
From source file:edu.ku.brc.util.FileStoreAttachmentManager.java
@Override public boolean setStorageLocationIntoAttachment(final Attachment attachment, final boolean doDisplayErrors) { String attName = attachment.getOrigFilename(); int lastPeriod = attName.lastIndexOf('.'); String suffix = ".att"; if (lastPeriod != -1) { // Make sure the file extension (if any) remains the same so the host // filesystem still sees the files as the proper types. This is simply // to make the files browsable from a system file browser. suffix = ".att" + attName.substring(lastPeriod); }/*from www . j a va 2s.c o m*/ String errMsg = null; String storageFilename = ""; try { if (originalsDir == null || !originalsDir.exists() || !originalsDir.canWrite()) { errMsg = UIRegistry.getLocalizedMessage("ATTCH_STRG_DIR_ERR", (originalsDir != null ? originalsDir.getAbsolutePath() : "(missing dir name)")); log.error("originalsDir doesn't exist[" + (originalsDir != null ? originalsDir.getAbsolutePath() : "null ") + "]"); } // find an unused filename in the originals dir File storageFile = File.createTempFile("sp6-", suffix, originalsDir); System.err.println("[" + storageFile.getAbsolutePath() + "] " + storageFile.canWrite()); FileOutputStream fos = new FileOutputStream(storageFile); fos.write(1); fos.flush(); fos.close(); if (storageFile.exists()) { attachment.setAttachmentLocation(storageFile.getName()); unfilledFiles.add(attachment.getAttachmentLocation()); return true; } errMsg = UIRegistry.getLocalizedMessage("ATTCH_NOT_SAVED_REPOS", (storageFile != null ? storageFile.getAbsolutePath() : "(missing file name)"), "File may not exist."); log.error("storageFile doesn't exist[" + (storageFile != null ? storageFile.getAbsolutePath() : "null") + "]"); } catch (IOException e) { e.printStackTrace(); if (doDisplayErrors) { errMsg = UIRegistry.getLocalizedMessage("ATTCH_NOT_SAVED_REPOS", storageFilename, e.getLocalizedMessage()); } else { // This happens when errors are not displayed. e.printStackTrace(); edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(FileStoreAttachmentManager.class, e); } } if (doDisplayErrors && errMsg != null) { UIRegistry.showError(errMsg); } return false; }
From source file:it.geosolutions.geobatch.flow.event.consumer.file.FileBasedEventConsumer.java
/** * remove all Cumulating progress listener from the Consumer and containing * action(s) remove all the actions from the action list remove * contextRunningDir/*from w w w. ja va2s .c om*/ */ private void clear() { // Progress Logging... // remove all Cumulating progress listener from the Consumer and // containing action(s) final ProgressListenerForwarder lf = this.getListenerForwarder(); final Collection<? extends IProgressListener> listeners = lf.getListeners(); if (listeners != null) { for (IProgressListener listener : listeners) { if (listener instanceof CumulatingProgressListener) { ((CumulatingProgressListener) listener).clearMessages(); } } } // Current Action Status... // remove all the actions from the action list if (actions != null) { for (Action action : this.actions) { if (action instanceof BaseAction<?>) { final BaseAction<?> baseAction = (BaseAction) action; // try the most interesting information holder Collection<IProgressListener> coll = baseAction.getListeners(CumulatingProgressListener.class); for (IProgressListener cpl : coll) { if (cpl != null && cpl instanceof CumulatingProgressListener) { ((CumulatingProgressListener) cpl).clearMessages(); } } } } this.actions.clear(); } // remove contextRunningDir if (!keepTempDir) { // removing running context directory try { FileUtils.deleteDirectory(getFlowInstanceTempDir()); } catch (IOException e) { if (LOGGER.isWarnEnabled()) { LOGGER.warn("Problem trying to remove the running context directory: " + getFlowInstanceTempDir() + ".\n " + e.getLocalizedMessage()); } } } }
From source file:com.apelon.akcds.loinc.LoincToEConcepts.java
public void execute() throws MojoExecutionException { ConsoleUtil.println("LOINC Processing Begins " + new Date().toString()); LOINCReader loincData = null;//from w w w . j ava 2 s .co m LOINCReader mapTo = null; LOINCReader sourceOrg = null; LOINCReader loincMultiData = null; try { super.execute(); if (!inputFileLocation.isDirectory()) { throw new MojoExecutionException( "LoincDataFiles must point to a directory containing the 3 required loinc data files"); } for (File f : inputFileLocation.listFiles()) { if (f.getName().toLowerCase().equals("loincdb.txt")) { loincData = new TxtFileReader(f); } else if (f.getName().toLowerCase().equals("loinc.csv")) { loincData = new CSVFileReader(f); } else if (f.getName().toLowerCase().equals("map_to.csv")) { mapTo = new CSVFileReader(f); } else if (f.getName().toLowerCase().equals("source_organization.csv")) { sourceOrg = new CSVFileReader(f); } else if (f.getName().toLowerCase().endsWith("multi-axial_hierarchy.csv")) { loincMultiData = new CSVFileReader(f); } } if (loincData == null) { throw new MojoExecutionException( "Could not find the loinc data file in " + inputFileLocation.getAbsolutePath()); } if (loincMultiData == null) { throw new MojoExecutionException( "Could not find the multi-axial file in " + inputFileLocation.getAbsolutePath()); } SimpleDateFormat dateReader = new SimpleDateFormat("MMMMMMMMMMMMM yyyy"); //Parse things like "June 2014" Date releaseDate = dateReader.parse(loincData.getReleaseDate()); File binaryOutputFile = new File(outputDirectory, "loincEConcepts.jbin"); dos_ = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(binaryOutputFile))); conceptUtility_ = new EConceptUtility(loincNamespaceBaseSeed_, "LOINC Path", dos_, releaseDate.getTime()); contentVersion_ = new PT_ContentVersion(); pt_SkipAxis_ = new PT_SkipAxis(); pt_SkipClass_ = new PT_SkipClass(); String version = loincData.getVersion(); //String releaseDate = ; fieldMap_ = loincData.getFieldMap(); fieldMapInverse_ = loincData.getFieldMapInverse(); String mapFileName = null; if (version.contains("2.36")) { PropertyType.setSourceVersion(1); mapFileName = "classMappings-2.36.txt"; } else if (version.contains("2.38")) { PropertyType.setSourceVersion(2); mapFileName = "classMappings-2.36.txt"; // Yes, wrong one, never made the file for 2.38 } else if (version.contains("2.40")) { PropertyType.setSourceVersion(3); mapFileName = "classMappings-2.40.txt"; } else if (version.contains("2.44")) { PropertyType.setSourceVersion(4); mapFileName = "classMappings-2.44.txt"; } else if (version.contains("2.46")) { PropertyType.setSourceVersion(4); mapFileName = "classMappings-2.46.txt"; } else if (version.contains("2.48")) { PropertyType.setSourceVersion(4); mapFileName = "classMappings-2.48.txt"; } else if (version.contains("2.50")) { PropertyType.setSourceVersion(5); mapFileName = "classMappings-2.52.txt"; //never did a 2.50, skipped to 2.52 } else if (version.contains("2.52")) { PropertyType.setSourceVersion(6); mapFileName = "classMappings-2.52.txt"; } else { ConsoleUtil.printErrorln("ERROR: UNTESTED VERSION - NO TESTED PROPERTY MAPPING EXISTS!"); PropertyType.setSourceVersion(6); mapFileName = "classMappings-2.52.txt"; } classMapping_ = new NameMap(mapFileName); if (mapTo != null) { String[] line = mapTo.readLine(); while (line != null) { if (line.length > 0) { HashMap<String, String> nestedData = mapToData.get(line[0]); if (nestedData == null) { nestedData = new HashMap<>(); mapToData.put(line[0], nestedData); } if (nestedData.put(line[1], line[2]) != null) { throw new Exception("Oops - " + line[0] + " " + line[1] + " " + line[2]); } } line = mapTo.readLine(); } } initProperties(); ConsoleUtil.println("Loading Metadata"); // Set up a meta-data root concept UUID archRoot = ArchitectonicAuxiliary.Concept.ARCHITECTONIC_ROOT_CONCEPT.getPrimoridalUid(); UUID metaDataRoot = ConverterUUID.createNamespaceUUIDFromString("metadata"); conceptUtility_.createAndStoreMetaDataConcept(metaDataRoot, "LOINC Metadata", false, archRoot, dos_); conceptUtility_.loadMetaDataItems(propertyTypes_, metaDataRoot, dos_); // Load up the propertyType map for speed, perform basic sanity check for (PropertyType pt : propertyTypes_) { for (String propertyName : pt.getPropertyNames()) { if (propertyToPropertyType_.containsKey(propertyName)) { ConsoleUtil .printErrorln("ERROR: Two different property types each contain " + propertyName); } propertyToPropertyType_.put(propertyName, pt); } } if (sourceOrg != null) { EConcept sourceOrgConcept = conceptUtility_.createAndStoreMetaDataConcept("Source Organization", false, metaDataRoot, dos_); String[] line = sourceOrg.readLine(); while (line != null) { //"COPYRIGHT_ID","NAME","COPYRIGHT","TERMS_OF_USE","URL" if (line.length > 0) { EConcept c = conceptUtility_.createConcept(line[0], sourceOrgConcept.getPrimordialUuid()); conceptUtility_.addDescription(c, line[1], DescriptionType.SYNONYM, true, propertyToPropertyType_.get("NAME").getProperty("NAME").getUUID(), null, false); conceptUtility_.addStringAnnotation(c, line[2], propertyToPropertyType_.get("COPYRIGHT").getProperty("COPYRIGHT").getUUID(), false); conceptUtility_.addStringAnnotation(c, line[3], propertyToPropertyType_.get("TERMS_OF_USE").getProperty("TERMS_OF_USE").getUUID(), false); conceptUtility_.addStringAnnotation(c, line[4], propertyToPropertyType_.get("URL").getProperty("URL").getUUID(), false); c.writeExternal(dos_); } line = sourceOrg.readLine(); } } // write this at the end EConcept loincRefset = pt_refsets_.getConcept(PT_Refsets.Refsets.ALL.getProperty()); // The next line of the file is the header. String[] headerFields = loincData.getHeader(); // validate that we are configured to map all properties properly checkForLeftoverPropertyTypes(headerFields); ConsoleUtil.println("Metadata summary:"); for (String s : conceptUtility_.getLoadStats().getSummary()) { ConsoleUtil.println(" " + s); } conceptUtility_.clearLoadStats(); // Root EConcept rootConcept = conceptUtility_.createConcept("LOINC"); conceptUtility_.addDescription(rootConcept, "LOINC", DescriptionType.SYNONYM, true, null, null, false); conceptUtility_.addDescription(rootConcept, "Logical Observation Identifiers Names and Codes", DescriptionType.SYNONYM, false, null, null, false); ConsoleUtil.println("Root concept FSN is 'LOINC' and the UUID is " + rootConcept.getPrimordialUuid()); conceptUtility_.addStringAnnotation(rootConcept, version, contentVersion_.getProperty("Source Version").getUUID(), false); conceptUtility_.addStringAnnotation(rootConcept, loincData.getReleaseDate(), contentVersion_.getProperty("Release Date").getUUID(), false); conceptUtility_.addStringAnnotation(rootConcept, converterResultVersion, contentVersion_.RELEASE.getUUID(), false); conceptUtility_.addStringAnnotation(rootConcept, loaderVersion, contentVersion_.LOADER_VERSION.getUUID(), false); concepts_.put(rootConcept.primordialUuid, rootConcept); // Build up the Class metadata EConcept classConcept = conceptUtility_.createConcept(pt_SkipClass_.getPropertyTypeUUID(), pt_SkipClass_.getPropertyTypeDescription(), rootConcept.primordialUuid); concepts_.put(classConcept.primordialUuid, classConcept); for (String property : pt_SkipClass_.getPropertyNames()) { EConcept temp = conceptUtility_.createConcept(pt_SkipClass_.getProperty(property).getUUID(), property, classConcept.primordialUuid); concepts_.put(temp.primordialUuid, temp); } // And the axis metadata EConcept axisConcept = conceptUtility_.createConcept(pt_SkipAxis_.getPropertyTypeUUID(), pt_SkipAxis_.getPropertyTypeDescription(), rootConcept.primordialUuid); concepts_.put(axisConcept.primordialUuid, axisConcept); for (String property : pt_SkipAxis_.getPropertyNames()) { EConcept temp = conceptUtility_.createConcept(pt_SkipAxis_.getProperty(property).getUUID(), property, axisConcept.primordialUuid); concepts_.put(temp.primordialUuid, temp); } // load the data ConsoleUtil.println("Reading data file into memory."); int dataRows = 0; { String[] line = loincData.readLine(); dataRows++; while (line != null) { if (line.length > 0) { processDataLine(line); } line = loincData.readLine(); dataRows++; if (dataRows % 1000 == 0) { ConsoleUtil.showProgress(); } } } loincData.close(); ConsoleUtil.println("Read " + dataRows + " data lines from file"); ConsoleUtil.println("Processing multi-axial file"); { // header - PATH_TO_ROOT,SEQUENCE,IMMEDIATE_PARENT,CODE,CODE_TEXT int lineCount = 0; String[] line = loincMultiData.readLine(); while (line != null) { lineCount++; if (line.length > 0) { processMultiAxialData(rootConcept.getPrimordialUuid(), line); } line = loincMultiData.readLine(); if (lineCount % 1000 == 0) { ConsoleUtil.showProgress(); } } loincMultiData.close(); ConsoleUtil.println("Read " + lineCount + " data lines from file"); } ConsoleUtil.println("Writing jbin file"); int conCounter = 0; for (EConcept concept : concepts_.values()) { conceptUtility_.addRefsetMember(loincRefset, concept.getPrimordialUuid(), null, true, null); concept.writeExternal(dos_); conCounter++; if (conCounter % 10 == 0) { ConsoleUtil.showProgress(); } if ((conCounter % 10000) == 0) { ConsoleUtil.println("Processed: " + conCounter + " - just completed " + concept.getDescriptions().get(0).getText()); } } ConsoleUtil.println("Processed " + conCounter + " concepts total"); conceptUtility_.storeRefsetConcepts(pt_refsets_, dos_); ConsoleUtil.println("Data Load Summary:"); for (String s : conceptUtility_.getLoadStats().getSummary()) { ConsoleUtil.println(" " + s); } ConsoleUtil.println("Skipped " + skippedDeletedItems + " Loinc codes because they were flagged as DELETED and they had no desriptions."); // this could be removed from final release. Just added to help debug editor problems. ConsoleUtil.println("Dumping UUID Debug File"); ConverterUUID.dump(outputDirectory, "loincUuid"); ConsoleUtil.println("LOINC Processing Completes " + new Date().toString()); ConsoleUtil.writeOutputToFile(new File(outputDirectory, "ConsoleOutput.txt").toPath()); } catch (Exception ex) { throw new MojoExecutionException(ex.getLocalizedMessage(), ex); } finally { if (dos_ != null) { try { dos_.flush(); dos_.close(); loincData.close(); loincMultiData.close(); if (mapTo != null) { mapTo.close(); } if (sourceOrg != null) { sourceOrg.close(); } } catch (IOException e) { throw new MojoExecutionException(e.getLocalizedMessage(), e); } } } }
From source file:net.sf.logsaw.index.internal.LuceneIndexServiceImpl.java
@Override public ResultPage query(IQueryContext context, final List<ARestriction<?>> restrictions, final int offset, final int limit) throws CoreException { Assert.isNotNull(context, "context"); //$NON-NLS-1$ Assert.isTrue(context instanceof LuceneQueryContextImpl, "Query context must be of type net.sf.logsaw.index.impl.LuceneQueryContextImpl"); //$NON-NLS-1$ Assert.isTrue(context.isOpen(), "Query context must be open"); //$NON-NLS-1$ Assert.isNotNull(restrictions, "restrictions"); //$NON-NLS-1$ ARunWithIndexReader<ResultPage> runnable = new ARunWithIndexReader<ResultPage>() { /* (non-Javadoc) * @see net.sf.logsaw.index.impl.ARunWithIndexReader#doRunWithIndexReader(org.apache.lucene.index.IndexReader, net.sf.logsaw.core.framework.ILogResource) *///from w w w.j a v a 2 s. c om @Override protected ResultPage doRunWithIndexReader(IndexReader reader, ILogResource log) throws CoreException { if (reader == null) { // Index does not exist yet return new ResultPage(); } try { IndexSearcher searcher = new IndexSearcher(reader); Sort sort = new Sort(new SortField[] { SortField.FIELD_DOC }); TopFieldCollector collector = TopFieldCollector.create(sort, offset + limit, false, false, false, true); // TODO Investigate use of searchAfter searcher.search(convertToQuery(restrictions), collector); List<LogEntry> result = new LinkedList<LogEntry>(); collectHits(searcher, collector.topDocs(offset), log.getDialect(), result); return new ResultPage(result, offset, collector.getTotalHits()); } catch (IOException e) { // Unexpected exception; wrap with CoreException throw new CoreException(new Status(IStatus.ERROR, IndexPlugin.PLUGIN_ID, NLS.bind(Messages.LuceneIndexService_error_failedToReadIndex, new Object[] { log.getName(), e.getLocalizedMessage() }), e)); } } }; runnable.setQueryContext((LuceneQueryContextImpl) context); return runnable.runWithIndexReader(context.getLogResource()); }