List of usage examples for java.io ByteArrayInputStream close
public void close() throws IOException
From source file:com.flexoodb.common.FlexUtils.java
static public Object getObject(byte[] b) throws Exception { ByteArrayInputStream bis = null; ObjectInputStream ois = null; Object obj = null;//from ww w. j ava 2s . co m try { bis = new ByteArrayInputStream(b); ois = new ObjectInputStream(bis); obj = ois.readObject(); } catch (Exception e) { throw e; } finally { try { ois.close(); bis.close(); } catch (Exception f) { } } return obj; }
From source file:org.globus.gsi.gssapi.GlobusGSSContextImpl.java
/** * Accept a delegated credential./*from w w w . j a v a 2 s . c o m*/ * * This function drives the accepting side of the credential * delegation process. It is expected to be called in tandem with the * {@link #initDelegation(GSSCredential, Oid, int, byte[], int, int) * initDelegation} function. * <BR> * The behavior of this function can be modified by * {@link GSSConstants#GSS_MODE GSSConstants.GSS_MODE} context * option. The * {@link GSSConstants#GSS_MODE GSSConstants.GSS_MODE} * option if set to * {@link GSIConstants#MODE_SSL GSIConstants.MODE_SSL} * results in tokens that are not wrapped. * * @param lifetime * The requested period of validity (seconds) of the delegated * credential. * @return A token that should be passed to <code>initDelegation</code> if * <code>isDelegationFinished</code> returns false. May be null. * @exception GSSException containing the following major error codes: * <code>GSSException.FAILURE</code> */ public byte[] acceptDelegation(int lifetime, byte[] buf, int off, int len) throws GSSException { logger.debug("Enter acceptDelegation: " + delegationState); if (this.gssMode != GSIConstants.MODE_SSL && buf != null && len > 0) { buf = unwrap(buf, off, len); off = 0; len = buf.length; } byte[] token = null; switch (delegationState) { case DELEGATION_START: this.delegationFinished = false; if (len != 1 && buf[off] != GSIConstants.DELEGATION_CHAR) { throw new GlobusGSSException(GSSException.FAILURE, GlobusGSSException.DELEGATION_ERROR, "delegError00", new Object[] { new Character((char) buf[off]) }); } try { /*DEL Vector certChain = this.conn.getCertificateChain(); */ Certificate[] certChain; try { certChain = this.sslEngine.getSession().getPeerCertificates(); } catch (SSLPeerUnverifiedException e) { certChain = null; } if (certChain == null || certChain.length == 0) { throw new GlobusGSSException(GSSException.FAILURE, GlobusGSSException.DELEGATION_ERROR, "noClientCert"); } X509Certificate tmpCert = /*DEL PureTLSUtil.convertCert((X509Cert)certChain.lastElement()); */ (X509Certificate) certChain[0]; token = generateCertRequest(tmpCert); } catch (GeneralSecurityException e) { throw new GlobusGSSException(GSSException.FAILURE, e); } this.delegationState = DELEGATION_COMPLETE_CRED; break; case DELEGATION_COMPLETE_CRED: ByteArrayInputStream in = null; X509Certificate[] chain = null; LinkedList certList = new LinkedList(); X509Certificate cert = null; try { in = new ByteArrayInputStream(buf, off, len); while (in.available() > 0) { cert = CertificateLoadUtil.loadCertificate(in); certList.add(cert); } chain = new X509Certificate[certList.size()]; chain = (X509Certificate[]) certList.toArray(chain); verifyDelegatedCert(chain[0]); } catch (GeneralSecurityException e) { throw new GlobusGSSException(GSSException.FAILURE, e); } finally { if (in != null) { try { in.close(); } catch (Exception e) { logger.warn("Unable to close streamreader."); } } } X509Credential proxy = new X509Credential(this.keyPair.getPrivate(), chain); this.delegatedCred = new GlobusGSSCredentialImpl(proxy, GSSCredential.INITIATE_AND_ACCEPT); this.delegationState = DELEGATION_START; this.delegationFinished = true; break; default: throw new GSSException(GSSException.FAILURE); } logger.debug("Exit acceptDelegation"); if (this.gssMode != GSIConstants.MODE_SSL && token != null) { // XXX: Why wrap() only when not in MODE_SSL? return wrap(token, 0, token.length); } else { return token; } }
From source file:big.BigZip.java
/** * Requires an InputStream, it will calculate the SHA1 checksum at the same * time that it writes data onto the big file. The input stream is expected * to be closed outside of this method./*from w ww .j a v a2s . c om*/ * @param stream * @param filePathToWriteInTextLine * @throws java.io.IOException */ public void quickWriteStreamStandalone(final InputStream stream, final String filePathToWriteInTextLine) throws Exception { // declare ByteArrayOutputStream outputZipStream = new ByteArrayOutputStream(); ByteArrayInputStream byteInput = null; // Create Archive Output Stream that attaches File Output Stream / and specifies type of compression ArchiveOutputStream logical_zip = new ArchiveStreamFactory() .createArchiveOutputStream(ArchiveStreamFactory.ZIP, outputZipStream); // Create Archive entry - write header information ZipArchiveEntry zipArchiveEntry = new ZipArchiveEntry(filePathToWriteInTextLine); logical_zip.putArchiveEntry(zipArchiveEntry); // prepare the SHA1 signature generation final MessageDigest hash = MessageDigest.getInstance("SHA1"); // Copy input file byte[] buffer = new byte[16384]; int length; // decompress from the original zip file, compress to our zip format // calculate the SHA1 signature on the same loop to save resource while ((length = stream.read(buffer)) > 0) { logical_zip.write(buffer, 0, length); hash.update(buffer, 0, length); } // compute the file signature byte[] digest = hash.digest(); final String SHA1 = utils.hashing.checksum.convertHash(digest); // close the zip related objects logical_zip.closeArchiveEntry(); logical_zip.finish(); logical_zip.flush(); logical_zip.close(); logical_zip = null; // define the line that will be written on the index file final String line = "\n".concat(utils.files.getPrettyFileSize(currentPosition)).concat(" ").concat(SHA1) .concat(" ").concat(filePathToWriteInTextLine); // get the bytes byteInput = new ByteArrayInputStream(outputZipStream.toByteArray()); int counter = 0; // add the magic number to this file block outputStream.write(magicSignature.getBytes()); // now copy the whole file into the BIG archive while ((length = byteInput.read(buffer)) > 0) { outputStream.write(buffer, 0, length); counter += length; } // write a new line in our index file writerFileIndex.write(line); // increase the position counter currentPosition += counter + magicSignature.length(); // close the streams that were created byteInput.close(); outputZipStream.close(); }
From source file:org.globus.gsi.gssapi.GlobusGSSContextImpl.java
/** * Initiate the delegation of a credential. * * This function drives the initiating side of the credential * delegation process. It is expected to be called in tandem with the * {@link #acceptDelegation(int, byte[], int, int) acceptDelegation} * function.//from w w w .j a va2s . c om * <BR> * The behavior of this function can be modified by * {@link GSSConstants#DELEGATION_TYPE GSSConstants.DELEGATION_TYPE} * and * {@link GSSConstants#GSS_MODE GSSConstants.GSS_MODE} context * options. * The {@link GSSConstants#DELEGATION_TYPE GSSConstants.DELEGATION_TYPE} * option controls delegation type to be performed. The * {@link GSSConstants#GSS_MODE GSSConstants.GSS_MODE} * option if set to * {@link GSIConstants#MODE_SSL GSIConstants.MODE_SSL} * results in tokens that are not wrapped. * * @param credential * The credential to be delegated. May be null * in which case the credential associated with the security * context is used. * @param mechanism * The desired security mechanism. May be null. * @param lifetime * The requested period of validity (seconds) of the delegated * credential. * @return A token that should be passed to <code>acceptDelegation</code> if * <code>isDelegationFinished</code> returns false. May be null. * @exception GSSException containing the following major error codes: * <code>GSSException.FAILURE</code> */ public byte[] initDelegation(GSSCredential credential, Oid mechanism, int lifetime, byte[] buf, int off, int len) throws GSSException { logger.debug("Enter initDelegation: " + delegationState); if (mechanism != null && !mechanism.equals(getMech())) { throw new GSSException(GSSException.BAD_MECH); } if (this.gssMode != GSIConstants.MODE_SSL && buf != null && len > 0) { buf = unwrap(buf, off, len); off = 0; len = buf.length; } byte[] token = null; switch (delegationState) { case DELEGATION_START: this.delegationFinished = false; token = DELEGATION_TOKEN; this.delegationState = DELEGATION_SIGN_CERT; break; case DELEGATION_SIGN_CERT: if (credential == null) { // get default credential GSSManager manager = new GlobusGSSManagerImpl(); credential = manager.createCredential(GSSCredential.INITIATE_AND_ACCEPT); } if (!(credential instanceof GlobusGSSCredentialImpl)) { throw new GSSException(GSSException.DEFECTIVE_CREDENTIAL); } X509Credential cred = ((GlobusGSSCredentialImpl) credential).getX509Credential(); X509Certificate[] chain = cred.getCertificateChain(); int time = (lifetime == GSSCredential.DEFAULT_LIFETIME) ? -1 : lifetime; ByteArrayInputStream inData = null; ByteArrayOutputStream out = null; try { inData = new ByteArrayInputStream(buf, off, len); X509Certificate cert = this.certFactory.createCertificate(inData, chain[0], cred.getPrivateKey(), time, /*DEL getDelegationType(chain[0])); */ BouncyCastleCertProcessingFactory.decideProxyType(chain[0], this.delegationType)); out = new ByteArrayOutputStream(); out.write(cert.getEncoded()); for (int i = 0; i < chain.length; i++) { out.write(chain[i].getEncoded()); } token = out.toByteArray(); } catch (Exception e) { throw new GlobusGSSException(GSSException.FAILURE, e); } finally { if (inData != null) { try { inData.close(); } catch (Exception e) { logger.warn("Unable to close stream."); } } if (out != null) { try { out.close(); } catch (Exception e) { logger.warn("Unable to close stream."); } } } this.delegationState = DELEGATION_START; this.delegationFinished = true; break; default: throw new GSSException(GSSException.FAILURE); } logger.debug("Exit initDelegation"); if (this.gssMode != GSIConstants.MODE_SSL && token != null) { // XXX: Why wrap() only when not in MODE_SSL? return wrap(token, 0, token.length); } else { return token; } }
From source file:net.yacy.document.TextParser.java
/** * @param location the URL of the source * @param mimeType the mime type of the source, if known * @param parsers a set of parsers supporting the resource at location * @param charset the charset name of the source, if known * @param scraper a vocabulary scraper// ww w . j av a2 s. c om * @param timezoneOffset the local time zone offset * @param depth the current crawling depth * @param sourceArray the resource content bytes * @param maxLinks the maximum total number of links to parse and add to the result documents * @param maxBytes the maximum number of content bytes to process * @return a list of documents that result from parsing the source * @throws Parser.Failure when the source could not be parsed */ private static Document[] parseSource(final DigestURL location, final String mimeType, final Set<Parser> parsers, final String charset, final Set<String> ignore_class_name, final VocabularyScraper scraper, final int timezoneOffset, final int depth, final byte[] sourceArray, final int maxLinks, final long maxBytes) throws Parser.Failure { final String fileExt = MultiProtocolURL.getFileExtension(location.getFileName()); if (AbstractParser.log.isFine()) AbstractParser.log.fine("Parsing " + location + " with mimeType '" + mimeType + "' and file extension '" + fileExt + "' from byte[]"); final String documentCharset = htmlParser.patchCharsetEncoding(charset); assert !parsers.isEmpty(); Document[] docs = null; final Map<Parser, Parser.Failure> failedParser = new HashMap<Parser, Parser.Failure>(); String origName = Thread.currentThread().getName(); Thread.currentThread().setName("parsing + " + location.toString()); // set a name to get the address in Thread Dump for (final Parser parser : parsers) { if (MemoryControl.request(sourceArray.length * 6, false)) { ByteArrayInputStream bis; if (mimeType.equals("text/plain") && parser.getName().equals("HTML Parser")) { // a hack to simulate html files .. is needed for NOLOAD queues. This throws their data into virtual text/plain messages. bis = new ByteArrayInputStream(UTF8.getBytes( "<html><head></head><body><h1>" + UTF8.String(sourceArray) + "</h1></body><html>")); } else { bis = new ByteArrayInputStream(sourceArray); } try { if (parser.isParseWithLimitsSupported()) { docs = parser.parseWithLimits(location, mimeType, documentCharset, ignore_class_name, scraper, timezoneOffset, bis, maxLinks, maxBytes); } else { /* Partial parsing is not supported by this parser : check content length now */ if (sourceArray.length > maxBytes) { throw new Parser.Failure("Content size is over maximum size of " + maxBytes + "", location); } docs = parser.parse(location, mimeType, documentCharset, ignore_class_name, scraper, timezoneOffset, bis); } } catch (final Parser.Failure e) { if (parser instanceof gzipParser && e.getCause() instanceof GZIPOpeningStreamException && (parsers.size() == 1 || (parsers.size() == 2 && parsers.contains(genericIdiom)))) { /* The gzip parser failed directly when opening the content stream : before falling back to the generic parser, * let's have a chance to parse the stream as uncompressed. */ /* Indeed, this can be a case of misconfigured web server, providing both headers "Content-Encoding" with value "gzip", * and "Content-type" with value such as "application/gzip". * In that case our HTTP client (see GzipResponseInterceptor) is already uncompressing the stream on the fly, * that's why the gzipparser fails opening the stream. * (see RFC 7231 section 3.1.2.2 for "Content-Encoding" header specification https://tools.ietf.org/html/rfc7231#section-3.1.2.2)*/ gzipParser gzParser = (gzipParser) parser; bis = new ByteArrayInputStream(sourceArray); Document maindoc = gzipParser.createMainDocument(location, mimeType, charset, gzParser); try { docs = gzParser.parseCompressedInputStream(location, charset, timezoneOffset, depth, bis, maxLinks, maxBytes); if (docs != null) { maindoc.addSubDocuments(docs); } docs = new Document[] { maindoc }; break; } catch (Parser.Failure e1) { failedParser.put(parser, e1); } catch (Exception e2) { failedParser.put(parser, new Parser.Failure(e2.getMessage(), location)); } } else { failedParser.put(parser, e); } } catch (final Exception e) { failedParser.put(parser, new Parser.Failure(e.getMessage(), location)); //log.logWarning("tried parser '" + parser.getName() + "' to parse " + location.toNormalform(true, false) + " but failed: " + e.getMessage(), e); } finally { try { bis.close(); } catch (IOException ioe) { // Ignore. } } if (docs != null) break; } } Thread.currentThread().setName(origName); if (docs == null) { if (failedParser.isEmpty()) { final String errorMsg = "Parsing content with file extension '" + fileExt + "' and mimetype '" + mimeType + "' failed."; //log.logWarning("Unable to parse '" + location + "'. " + errorMsg); throw new Parser.Failure(errorMsg, location); } String failedParsers = ""; for (final Map.Entry<Parser, Parser.Failure> error : failedParser.entrySet()) { AbstractParser.log.warn("tried parser '" + error.getKey().getName() + "' to parse " + location.toNormalform(true) + " but failed: " + error.getValue().getMessage(), error.getValue()); failedParsers += error.getKey().getName() + " "; } throw new Parser.Failure("All parser failed: " + failedParsers, location); } for (final Document d : docs) { InputStream textStream = d.getTextStream(); assert textStream != null : "mimeType = " + mimeType; try { if (textStream != null) { /* textStream can be a FileInputStream : we must close it to ensure releasing system resource */ textStream.close(); } } catch (IOException e) { AbstractParser.log.warn("Could not close text input stream"); } d.setDepth(depth); } // verify docs return docs; }
From source file:org.ejbca.ui.web.admin.certprof.CertProfilesBean.java
private CertificateProfile getCertProfileFromByteArray(String profilename, byte[] profileBytes) throws AuthorizationDeniedException { ByteArrayInputStream is = new ByteArrayInputStream(profileBytes); CertificateProfile cprofile = new CertificateProfile(); try {//from ww w . jav a 2 s. co m XMLDecoder decoder = getXMLDecoder(is); // Add certificate profile Object data = null; try { data = decoder.readObject(); } catch (IllegalArgumentException e) { if (log.isDebugEnabled()) { log.debug("IllegalArgumentException parsing certificate profile data: " + e.getMessage()); } return null; } decoder.close(); cprofile.loadData(data); // Make sure CAs in profile exist List<Integer> cas = cprofile.getAvailableCAs(); ArrayList<Integer> casToRemove = new ArrayList<Integer>(); for (Integer currentCA : cas) { // If the CA is not ANYCA and the CA does not exist, remove it from the profile before import if (currentCA != CertificateProfile.ANYCA) { try { getEjbcaWebBean().getEjb().getCaSession().getCAInfo(getAdmin(), currentCA); } catch (CADoesntExistsException e) { casToRemove.add(currentCA); } } } for (Integer toRemove : casToRemove) { log.warn("Warning: CA with id " + toRemove + " was not found and will not be used in certificate profile '" + profilename + "'."); cas.remove(toRemove); } if (cas.size() == 0) { log.error("Error: No CAs left in certificate profile '" + profilename + "' and no CA specified on command line. Using ANYCA."); cas.add(Integer.valueOf(CertificateProfile.ANYCA)); } cprofile.setAvailableCAs(cas); // Remove and warn about unknown publishers List<Integer> publishers = cprofile.getPublisherList(); ArrayList<Integer> allToRemove = new ArrayList<Integer>(); for (Integer publisher : publishers) { BasePublisher pub = null; try { pub = getEjbcaWebBean().getEjb().getPublisherSession().getPublisher(getAdmin(), publisher); } catch (Exception e) { log.warn("Warning: There was an error loading publisher with id " + publisher + ". Use debug logging to see stack trace: " + e.getMessage()); log.debug("Full stack trace: ", e); } if (pub == null) { allToRemove.add(publisher); } } for (Integer toRemove : allToRemove) { log.warn("Warning: Publisher with id " + toRemove + " was not found and will not be used in certificate profile '" + profilename + "'."); publishers.remove(toRemove); } cprofile.setPublisherList(publishers); } finally { try { is.close(); } catch (IOException e) { throw new IllegalStateException("Unknown IOException was caught when closing stream", e); } } return cprofile; }
From source file:GestoSAT.GestoSAT.java
public boolean actualizarConfiguracion(Vector<String> mySQL, Vector<String> confSeg, int iva, String logo) throws Exception { FileReader file;//from w w w . jav a 2 s . c o m try { this.iva = Math.abs(iva); BufferedImage image = null; byte[] imageByte; BASE64Decoder decoder = new BASE64Decoder(); imageByte = decoder.decodeBuffer(logo.split(",")[1]); ByteArrayInputStream bis = new ByteArrayInputStream(imageByte); image = ImageIO.read(bis); bis.close(); // write the image to a file File outputfile = new File("logo"); String formato = logo.split("/")[1].split(";")[0]; ImageIO.write(image, formato, outputfile); // MySQL if (mySQL.elementAt(0).equals(this.mySQL.elementAt(0))) { if (!mySQL.elementAt(1).equals(this.mySQL.elementAt(1)) && !mySQL.elementAt(2).equals(this.mySQL.elementAt(2)) && (!mySQL.elementAt(3).equals(this.mySQL.elementAt(3)) || !mySQL.elementAt(0).equals(""))) { Class.forName("com.mysql.jdbc.Driver"); this.con.close(); this.con = DriverManager.getConnection("jdbc:mysql://" + mySQL.elementAt(0) + ":" + Math.abs(Integer.parseInt(mySQL.elementAt(1))) + "/gestosat?user=" + mySQL.elementAt(2) + "&password=" + mySQL.elementAt(3)); this.mySQL.set(0, mySQL.elementAt(0)); this.mySQL.set(1, Math.abs(Integer.parseInt(mySQL.elementAt(1))) + ""); this.mySQL.set(2, mySQL.elementAt(2)); this.mySQL.set(3, mySQL.elementAt(3)); } } else { // Comprobar que pass != "" Process pGet = Runtime.getRuntime() .exec("mysqldump -u " + this.mySQL.elementAt(2) + " -p" + this.mySQL.elementAt(3) + " -h " + this.mySQL.elementAt(0) + " -P " + this.mySQL.elementAt(1) + " gestosat"); InputStream is = pGet.getInputStream(); FileOutputStream fos = new FileOutputStream("backupGestoSAT.sql"); byte[] bufferOut = new byte[1000]; int leido = is.read(bufferOut); while (leido > 0) { fos.write(bufferOut, 0, leido); leido = is.read(bufferOut); } fos.close(); Class.forName("com.mysql.jdbc.Driver"); this.con.close(); this.con = DriverManager.getConnection( "jdbc:mysql://" + mySQL.elementAt(0) + ":" + Math.abs(Integer.parseInt(mySQL.elementAt(1))) + "/gestosat?user=" + mySQL.elementAt(2) + "&password=" + mySQL.elementAt(3)); this.mySQL.set(0, mySQL.elementAt(0)); this.mySQL.set(1, Math.abs(Integer.parseInt(mySQL.elementAt(1))) + ""); this.mySQL.set(2, mySQL.elementAt(2)); this.mySQL.set(3, mySQL.elementAt(3)); Process pPut = Runtime.getRuntime() .exec("mysql -u " + mySQL.elementAt(2) + " -p" + mySQL.elementAt(3) + " -h " + mySQL.elementAt(0) + " -P " + Math.abs(Integer.parseInt(mySQL.elementAt(1))) + " gestosat"); OutputStream os = pPut.getOutputStream(); FileInputStream fis = new FileInputStream("backupGestoSAT.sql"); byte[] bufferIn = new byte[1000]; int escrito = fis.read(bufferIn); while (escrito > 0) { os.write(bufferIn, 0, leido); escrito = fis.read(bufferIn); } os.flush(); os.close(); fis.close(); } // FTP FTPClient cliente = new FTPClient(); if (!confSeg.elementAt(3).equals("")) { cliente.connect(confSeg.elementAt(0), Integer.parseInt(confSeg.elementAt(1))); if (cliente.login(confSeg.elementAt(2), confSeg.elementAt(3))) { cliente.setFileType(FTP.BINARY_FILE_TYPE); BufferedInputStream buffIn = new BufferedInputStream(new FileInputStream("backupGestoSAT.sql")); cliente.enterLocalPassiveMode(); cliente.storeFile("backupGestoSAT.sql", buffIn); buffIn.close(); cliente.logout(); cliente.disconnect(); this.confSeg = confSeg; } else return false; } File archConf = new File("confGestoSAT"); BufferedWriter bw = new BufferedWriter(new FileWriter(archConf)); bw.write(this.mySQL.elementAt(0) + ";" + Math.abs(Integer.parseInt(this.mySQL.elementAt(1))) + ";" + this.mySQL.elementAt(2) + ";" + this.mySQL.elementAt(3) + ";" + this.confSeg.elementAt(0) + ";" + Math.abs(Integer.parseInt(this.confSeg.elementAt(1))) + ";" + this.confSeg.elementAt(2) + ";" + this.confSeg.elementAt(3) + ";" + Math.abs(iva)); bw.close(); return true; } catch (Exception ex) { file = new FileReader("confGestoSAT"); BufferedReader b = new BufferedReader(file); String cadena; cadena = b.readLine(); String[] valores = cadena.split(";"); this.mySQL.add(valores[0]); this.mySQL.add(Math.abs(Integer.parseInt(valores[1])) + ""); this.mySQL.add(valores[2]); this.mySQL.add(valores[3]); con.close(); Class.forName("com.mysql.jdbc.Driver"); con = DriverManager .getConnection("jdbc:mysql://" + this.mySQL.elementAt(0) + ":" + this.mySQL.elementAt(1) + "/gestosat?user=" + this.mySQL.elementAt(2) + "&password=" + this.mySQL.elementAt(3)); this.confSeg.add(valores[4]); this.confSeg.add(Math.abs(Integer.parseInt(valores[5])) + ""); this.confSeg.add(valores[6]); this.confSeg.add(valores[7]); file.close(); Logger.getLogger(GestoSAT.class.getName()).log(Level.SEVERE, null, ex); return false; } }
From source file:jp.co.opentone.bsol.linkbinder.service.correspon.impl.CorresponSearchServiceImplTest.java
/** * ZIP?./* w ww. j av a 2 s . c o m*/ * HTML??? + ???????. */ @Test public void testGenerateZipHtmlName() throws Exception { List<String> nullKeys = new ArrayList<String>(); nullKeys.add("file.name.regex"); nullKeys.add("file.name.replacement"); MockSystemConfig.NULL_KEYS = nullKeys; MockAbstractService.RET_CURRENT_PROJECT_ID = "PJ1"; List<Correspon> corresponlist = new ArrayList<Correspon>(); Correspon c = new Correspon(); c.setId(1L); c.setCorresponNo("YOC:OT:BUILDING-00001"); c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); c = new Correspon(); c.setId(2L); c.setCorresponNo(null); // ???? c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); c = new Correspon(); c.setId(3L); c.setCorresponNo(" "); // ? c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); c = new Correspon(); c.setId(4L); c.setCorresponNo("\\/:*?\"<>|"); // ? c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); c = new Correspon(); c.setId(9999999999L); // ??? c.setCorresponNo("YOC:OT:BUILDING-00001"); c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); c = new Correspon(); c.setId(10000000000L); // 10? c.setCorresponNo("YOC:OT:BUILDING-00001"); c.setWorkflows(new ArrayList<Workflow>()); corresponlist.add(c); MockCorresponService.RET_FIND = corresponlist; MockAbstractService.CORRESPONS = corresponlist; byte[] actual = service.generateZip(corresponlist); ByteArrayInputStream bi = new ByteArrayInputStream(actual); ZipInputStream zis = new ZipInputStream(bi); String[] expFileNames = { "PJ1_YOC-OT-BUILDING-00001(0000000001).html", "PJ1_(0000000002).html", "PJ1_ (0000000003).html", "PJ1_---------(0000000004).html", "PJ1_YOC-OT-BUILDING-00001(9999999999).html", "PJ1_YOC-OT-BUILDING-00001(10000000000).html" }; int index = 0; try { for (ZipEntry ze = zis.getNextEntry(); ze != null; ze = zis.getNextEntry()) { String expFileName = expFileNames[index]; assertEquals(expFileName, ze.getName()); if (ze.isDirectory()) { continue; } index++; } } finally { zis.close(); bi.close(); } }
From source file:org.gbif.harvest.tapir.TapirMetadataHandler.java
/** * Get the most prioritised content namespace. * In the event the capabilities response cannot be parsed, * the default content namespace is used * * @param inputStream capabilities response as ByteArrayInputStream * @param directory as String/*from w w w . j a v a 2 s. com*/ * * @return most prioritized conetent namespace * * @throws HarvesterException thrown if method fails */ private String getNamespace(ByteArrayInputStream inputStream, String directory) throws HarvesterException { log.info("tapirmetadatahandler.start.getNamespace"); // Initially, set the namespace to the default String newestNamespace = DEFAULT_CONTENT_NAMESPACE; // reste stream as we're reading it a second time if (inputStream != null) { inputStream.reset(); } // retrieve the list of supported namespaces try { // namespaces = returnNamespace(fis, NAMESPACE_RESPONSE_XPATH_ELEMENT); Set<String> namespaces = digesterUtils.xmlToListOfAttributeValuesForSingleElement(inputStream, TapirMetadataHandler.namespaceResponseXPathElement, TapirMetadataHandler.supportedNamespaceAttributeName); // Iterate through the ordered list of available namespaces and // determine what the newest one from amongst the set of supported // namespaces retrieved is // Set the default namespace for (String supportedNamespace : supported_namespaces) { if (namespaces.contains(supportedNamespace)) { newestNamespace = supportedNamespace; log.debug("tapirmetadatahandler.getNamespace.chooseNamespace", newestNamespace); log.info("tapirmetadatahandler.end.getNamespace"); return newestNamespace; } } // if not found, alert operator log.error("tapirmetadatahandler.default.conceptualMappingNotFound", namespaces.toString()); // and write GBIF Log Message gbifLogger.openAndWriteToGbifLogMessageFile(directory, CommonGBIFLogEvent.COMMON_MESSAGES_UNKNOWN_SCHEMA_LOCATION.getName(), CommonGBIFLogEvent.COMMON_MESSAGES_UNKNOWN_SCHEMA_LOCATION.getValue(), Level.ERROR_INT, "None of the namespace(s) " + namespaces.toString() + " was not found in the TAPIR conceptualMapping.properties file. Please update this file with valid namespace(s) and try again. Defaulting to namespace http://rs.tdwg.org/dwc/dwcore/", 1, false); } catch (IOException e) { log.error("tapirmetadatahandler.error.getNamespace.parsing", e.getMessage(), e); log.debug("tapirmetadatahandler.default.getNamespace.chooseNamespace", newestNamespace); // throw new HarvesterException(e.getMessage(), e); } catch (SAXException e) { log.error("tapirmetadatahandler.error.getNamespace.parsing", e.getMessage(), e); log.debug("tapirmetadatahandler.default.getNamespace.chooseNamespace", newestNamespace); // throw new HarvesterException(e.getMessage(), e); } // close inputStream try { if (inputStream != null) { inputStream.close(); } } catch (Exception e) { // do nothing } log.info("tapirmetadatahandler.end.getNamespace"); return newestNamespace; }
From source file:org.gbif.harvest.digir.DigirMetadataHandler.java
/** * Collect resource metadata, including resource contact metadata. * Resources metadata is outputed to file. * Resource contact metadata is also outputted to file * * @param metadataResponse as ByteArrayInputStream * @param outputDirectory directory to write to * * @throws HarvesterException thrown if method fails *///from w ww . j a v a 2 s.c om private void processAllMetadata(ByteArrayInputStream metadataResponse, String outputDirectory) throws HarvesterException { log.info("start.processAllMetadata"); // create the output directory File directory = new File(outputDirectory); // Prepare directory log.debug("digirmetadatahandler.start.processAllMetadata.prepareDirectory"); if (directory.isDirectory()) { try { // remove all pre-existing contact tab files fileUtils.prepareDirectory(outputDirectory, DigirMetadataHandler.RESOURCES_WITH_COUNT_FILENAME); fileUtils.prepareDirectory(outputDirectory, Constants.CONTACT_FILENAME); log.debug("digirmetadatahandler.end.processAllMetadata.prepareDirectory"); } catch (Exception e) { log.error("digirmetadatahandler.error.processAllMetadata.prepareDirectory", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } } // create the output files File resourcesFile = new File(directory, DigirMetadataHandler.RESOURCES_WITH_COUNT_FILENAME.concat(Constants.TEXT_FILENAME_EXTENSION)); File resourceContactsFile = new File(directory, Constants.CONTACT_FILENAME.concat(Constants.TEXT_FILENAME_EXTENSION)); // ensure that they exist anew try { resourcesFile.createNewFile(); resourceContactsFile.createNewFile(); } catch (IOException e) { log.error("digirmetadatahandler.error.processAllMetadata.createFiles", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } // create file writers for each file try { resourcesBW = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(resourcesFile, true), "UTF8")); resourceContactsBW = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(resourceContactsFile, true), "UTF8")); } catch (IOException e) { log.error("error.createBWs", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } // write header column line for each file try { // The header line is derived from the names of the properties fileUtils.writeHeaderLine(resourcesBW, metadataElementsOfInterest.keySet(), true); // an identification number column name is also written fileUtils.writeHeaderLine(resourceContactsBW, metadataResourceContactElementsOfInterest.keySet(), true); } catch (IOException e) { log.error("error.writeHeaders", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } // parse metadata setLineNumber(1); try { parseResponseFile(metadataResponse); } catch (Exception e) { log.error("error.metadataRequest.parsing", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } // close the buffer writers and inputStream, and log having written the files so that // they appear in the console try { resourcesBW.close(); resourceContactsBW.close(); log.info("Writing to file: " + resourcesFile.getAbsolutePath()); log.info("Writing to file: " + resourceContactsFile.getAbsolutePath()); // close inputStream metadataResponse.close(); } catch (IOException e) { log.error("error.closeBWs", e.getMessage(), e); throw new HarvesterException(e.getMessage(), e); } log.info("end.processAllMetadata"); }