List of usage examples for java.io DataInputStream readByte
public final byte readByte() throws IOException
readByte
method of DataInput
. From source file:org.kse.crypto.filetype.CryptoFileUtil.java
/** * Detect the KeyStore type contained in the supplied file. * * @param is//from w w w . j ava 2 s.co m * Input stream to detect type for * @return KeyStore type or null if none matched * @throws IOException * If an I/O problem occurred */ public static KeyStoreType detectKeyStoreType(InputStream is) throws IOException { byte[] contents = ReadUtil.readFully(is); DataInputStream dis = null; try { dis = new DataInputStream(new ByteArrayInputStream(contents)); // If less than 4 bytes are available it isn't a KeyStore if (dis.available() < 4) { return null; } // Read first integer (4 bytes) int i1 = dis.readInt(); // Test for JKS - starts with appropriate magic number if (i1 == JKS_MAGIC_NUMBER) { return JKS; } // Test for JCEKS - starts with appropriate magic number if (i1 == JCEKS_MAGIC_NUMBER) { return JCEKS; } // Test for BKS and UBER // Both start with a version number of 0, 1 or 2 if ((i1 == 0) || (i1 == 1) || (i1 == 2)) { /* * For BKS and UBER the last 20 bytes of the file are the SHA-1 * Hash while the byte before that is a ASN1Null (0) indicating * the end of the store. UBER, however, encrypts the store * content making it highly unlikely that the ASN1Null end byte * will be preserved. Therefore if the 21st byte from the end of * the file is a ASN1Null then the KeyStore is BKS */ if (contents.length < 26) { // Insufficient bytes to be BKS or UBER return null; } // Skip to 21st from last byte (file length minus 21 and the 4 bytes already read) dis.skip(contents.length - 25); // Read what may be the null byte if (dis.readByte() == 0) { // Found null byte - BKS/BKS-V1 if (i1 == 1) { return BKS_V1; } else { return BKS; } } else { // No null byte - UBER return UBER; } } } finally { IOUtils.closeQuietly(dis); } // @formatter:off /* * Test for PKCS #12. ASN.1 should look like this: * * PFX ::= ASN1Sequence { version ASN1Integer {v3(3)}(v3,...), authSafe * ContentInfo, macData MacData OPTIONAL */ // @formatter:on ASN1Primitive pfx = null; try { pfx = ASN1Primitive.fromByteArray(contents); } catch (IOException e) { // if it cannot be parsed as ASN1, it is certainly not a pfx key store return null; } // Is a sequence... if ((pfx != null) && (pfx instanceof ASN1Sequence)) { // Has two or three components... ASN1Sequence sequence = (ASN1Sequence) pfx; if ((sequence.size() == 2) || (sequence.size() == 3)) { // ...the first of which is a version of 3 ASN1Encodable firstComponent = sequence.getObjectAt(0); if (firstComponent instanceof ASN1Integer) { ASN1Integer version = (ASN1Integer) firstComponent; if (version.getValue().intValue() == 3) { return PKCS12; } } } } // KeyStore type not recognised return null; }
From source file:org.apache.hadoop.hdfs.server.namenode.IngestLocal.java
/** * Load an edit log, and continue applying the changes to the in-memory * structure. This is where we ingest transactions into the standby. *//*from w w w . j av a2s. c o m*/ private int loadFSEdits(File edits) throws IOException { FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; int numEdits = 0; int logVersion = 0; String clientName = null; String clientMachine = null; String path = null; int numOpAdd = 0, numOpClose = 0, numOpDelete = 0, numOpRename = 0, numOpSetRepl = 0, numOpMkDir = 0, numOpSetPerm = 0, numOpSetOwner = 0, numOpSetGenStamp = 0, numOpTimes = 0, numOpOther = 0; long startTime = FSNamesystem.now(); LOG.info("Ingest: Consuming transactions from file " + edits + " of size " + edits.length()); rp = new RandomAccessFile(edits, "r"); fp = new FileInputStream(rp.getFD()); // open for reads fc = rp.getChannel(); DataInputStream in = new DataInputStream(fp); try { // Read log file version. Could be missing. in.mark(4); // If edits log is greater than 2G, available method will return negative // numbers, so we avoid having to call available boolean available = true; try { logVersion = in.readByte(); } catch (EOFException e) { available = false; } if (available) { fc.position(0); // reset in = new DataInputStream(fp); logVersion = in.readInt(); if (logVersion != FSConstants.LAYOUT_VERSION) // future version throw new IOException("Ingest: Unexpected version of the file system log file: " + logVersion + ". Current version = " + FSConstants.LAYOUT_VERSION + "."); } assert logVersion <= Storage.LAST_UPGRADABLE_LAYOUT_VERSION : "Unsupported version " + logVersion; currentPosition = fc.position(); numEdits = ingestFSEdits(edits, in, logVersion); // continue to ingest } finally { LOG.info("Ingest: Closing transactions file " + edits); fp.close(); } LOG.info("Ingest: Edits file " + edits.getName() + " of size " + edits.length() + " edits # " + numEdits + " loaded in " + (FSNamesystem.now() - startTime) / 1000 + " seconds."); if (LOG.isDebugEnabled()) { LOG.debug("Ingest: numOpAdd = " + numOpAdd + " numOpClose = " + numOpClose + " numOpDelete = " + numOpDelete + " numOpRename = " + numOpRename + " numOpSetRepl = " + numOpSetRepl + " numOpMkDir = " + numOpMkDir + " numOpSetPerm = " + numOpSetPerm + " numOpSetOwner = " + numOpSetOwner + " numOpSetGenStamp = " + numOpSetGenStamp + " numOpTimes = " + numOpTimes + " numOpOther = " + numOpOther); } if (logVersion != FSConstants.LAYOUT_VERSION) // other version numEdits++; // save this image asap return numEdits; }
From source file:org.apache.hadoop.hdfs.server.datanode.DataXceiver.java
/** * Read/write data from/to the DataXceiveServer. *//*from www .ja va 2 s .c o m*/ public void run() { DataInputStream in = null; try { in = new DataInputStream(new BufferedInputStream(NetUtils.getInputStream(s), SMALL_BUFFER_SIZE)); short version = in.readShort(); if (version != DataTransferProtocol.DATA_TRANSFER_VERSION) { throw new IOException("Version Mismatch"); } boolean local = s.getInetAddress().equals(s.getLocalAddress()); byte op = in.readByte(); // Make sure the xciver count is not exceeded int curXceiverCount = datanode.getXceiverCount(); if (curXceiverCount > dataXceiverServer.maxXceiverCount) { throw new IOException("xceiverCount " + curXceiverCount + " exceeds the limit of concurrent xcievers " + dataXceiverServer.maxXceiverCount); } long startTime = DataNode.now(); switch (op) { case DataTransferProtocol.OP_READ_BLOCK: readBlock(in); datanode.myMetrics.addReadBlockOp(DataNode.now() - startTime); if (local) datanode.myMetrics.incrReadsFromLocalClient(); else datanode.myMetrics.incrReadsFromRemoteClient(); break; case DataTransferProtocol.OP_WRITE_BLOCK: writeBlock(in); datanode.myMetrics.addWriteBlockOp(DataNode.now() - startTime); if (local) datanode.myMetrics.incrWritesFromLocalClient(); else datanode.myMetrics.incrWritesFromRemoteClient(); break; case DataTransferProtocol.OP_REPLACE_BLOCK: // for balancing purpose; send to a destination replaceBlock(in); datanode.myMetrics.addReplaceBlockOp(DataNode.now() - startTime); break; case DataTransferProtocol.OP_COPY_BLOCK: // for balancing purpose; send to a proxy source copyBlock(in); datanode.myMetrics.addCopyBlockOp(DataNode.now() - startTime); break; case DataTransferProtocol.OP_BLOCK_CHECKSUM: //get the checksum of a block getBlockChecksum(in); datanode.myMetrics.addBlockChecksumOp(DataNode.now() - startTime); break; default: throw new IOException("Unknown opcode " + op + " in data stream"); } } catch (Throwable t) { LOG.error(datanode.dnRegistration + ":DataXceiver", t); } finally { LOG.debug(datanode.dnRegistration + ":Number of active connections is: " + datanode.getXceiverCount()); IOUtils.closeStream(in); IOUtils.closeSocket(s); dataXceiverServer.childSockets.remove(s); } }
From source file:net.sf.keystore_explorer.crypto.filetype.CryptoFileUtil.java
/** * Detect the KeyStore type contained in the supplied file. * * @param is//from w ww.j a v a 2 s . c om * Input stream to detect type for * @return KeyStore type or null if none matched * @throws IOException * If an I/O problem occurred */ public static KeyStoreType detectKeyStoreType(InputStream is) throws IOException { byte[] contents = ReadUtil.readFully(is); DataInputStream dis = null; try { dis = new DataInputStream(new ByteArrayInputStream(contents)); // If less than 4 bytes are available it isn't a KeyStore if (dis.available() < 4) { return null; } // Read first integer (4 bytes) int i1 = dis.readInt(); // Test for JKS - starts with appropriate magic number if (i1 == JKS_MAGIC_NUMBER) { return JKS; } if (i1 == HTKS_MAGIC_NUMBER) { return HTKS; } // Test for JCEKS - starts with appropriate magic number if (i1 == JCEKS_MAGIC_NUMBER) { return JCEKS; } // Test for BKS and UBER // Both start with a version number of 0, 1 or 2 if ((i1 == 0) || (i1 == 1) || (i1 == 2)) { /* * For BKS and UBER the last 20 bytes of the file are the SHA-1 * Hash while the byte before that is a ASN1Null (0) indicating * the end of the store. UBER, however, encrypts the store * content making it highly unlikely that the ASN1Null end byte * will be preserved. Therefore if the 21st byte from the end of * the file is a ASN1Null then the KeyStore is BKS */ if (contents.length < 26) { // Insufficient bytes to be BKS or UBER return null; } // Skip to 21st from last byte (file length minus 21 and the 4 bytes already read) dis.skip(contents.length - 25); // Read what may be the null byte if (dis.readByte() == 0) { // Found null byte - BKS/BKS-V1 if (i1 == 1) { return BKS_V1; } else { return BKS; } } else { // No null byte - UBER return UBER; } } } finally { IOUtils.closeQuietly(dis); } // @formatter:off /* * Test for PKCS #12. ASN.1 should look like this: * * PFX ::= ASN1Sequence { version ASN1Integer {v3(3)}(v3,...), authSafe * ContentInfo, macData MacData OPTIONAL */ // @formatter:on ASN1Primitive pfx = null; try { pfx = ASN1Primitive.fromByteArray(contents); } catch (IOException e) { // if it cannot be parsed as ASN1, it is certainly not a pfx key store return null; } // Is a sequence... if ((pfx != null) && (pfx instanceof ASN1Sequence)) { // Has two or three components... ASN1Sequence sequence = (ASN1Sequence) pfx; if ((sequence.size() == 2) || (sequence.size() == 3)) { // ...the first of which is a version of 3 ASN1Encodable firstComponent = sequence.getObjectAt(0); if (firstComponent instanceof ASN1Integer) { ASN1Integer version = (ASN1Integer) firstComponent; if (version.getValue().intValue() == 3) { return PKCS12; } } } } // KeyStore type not recognised return null; }
From source file:su.comp.bk.ui.BkEmuActivity.java
/** * Load image in bin format (address/length/data) from byte array. * @param imageData image data byte array * @throws IOException in case of loading error *///from w ww .j ava 2s .c om public int loadBinImage(byte[] imageData) throws IOException { if (imageData.length < 5 || imageData.length > 01000000) { throw new IllegalArgumentException("Invalid binary image file length: " + imageData.length); } DataInputStream imageDataInputStream = new DataInputStream( new ByteArrayInputStream(imageData, 0, imageData.length)); lastBinImageAddress = (imageDataInputStream.readByte() & 0377) | ((imageDataInputStream.readByte() & 0377) << 8); lastBinImageLength = (imageDataInputStream.readByte() & 0377) | ((imageDataInputStream.readByte() & 0377) << 8); synchronized (computer) { for (int imageIndex = 0; imageIndex < lastBinImageLength; imageIndex++) { if (!computer.writeMemory(true, lastBinImageAddress + imageIndex, imageDataInputStream.read())) { throw new IllegalStateException("Can't write binary image data to address: 0" + Integer.toOctalString(lastBinImageAddress) + imageIndex); } } } Log.d(TAG, "loaded bin image file: address 0" + Integer.toOctalString(lastBinImageAddress) + ", length: " + lastBinImageLength); return lastBinImageAddress; }
From source file:com.cohesionforce.dis.BinaryConverter.java
public void run() { int count = 0; System.out.println("Opening file to convert: " + inputFile); FileInputStream fis;/*from w w w.ja v a 2 s. c o m*/ try { fis = new FileInputStream(inputFile); } catch (FileNotFoundException e1) { e1.printStackTrace(); return; } DataInputStream dis = new DataInputStream(fis); startWriters(); System.out.println("Starting to convert PDUs"); while (done == false) { byte buffer[] = new byte[MAX_PDU_SIZE]; byte pduType; try { pduType = dis.readByte(); int pduSize = dis.readInt(); int skip = dis.read(buffer, 0, 19); assert (skip == 19); int numberRead = dis.read(buffer, 0, pduSize); assert (numberRead == pduSize); ++count; // Convert the byte array to an object Object object; object = unmarshaller.getPdu(buffer); if (object != null) { logPdu(object, pduType); } } catch (EOFException e) { done = true; } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { done = true; e.printStackTrace(); } if (count % 100000 == 0) { System.out.println("Converted " + count + " PDUs"); } } // end loop try { dis.close(); } catch (IOException e1) { e1.printStackTrace(); } System.out.print("Waiting on writers to clear their queues"); boolean emptyQueue = false; while (!emptyQueue) { emptyQueue = true; for (LogWriter<?> writer : writers) { // If any queue is not empty, sleep and check again if (!writer.getQueue().isEmpty()) { try { emptyQueue = false; System.out.print("."); Thread.sleep(1000); break; } catch (InterruptedException e) { e.printStackTrace(); } } } } System.out.println(""); System.out.println("PDUs converted: " + count); System.out.println("Shutting down logging threads"); threadGroup.interrupt(); int tries = 0; while (threadGroup.activeCount() > 0 && tries < 10) { try { Thread.sleep(2000); } catch (InterruptedException e) { } ++tries; } System.out.println("Completed logging threads shutdown"); }
From source file:org.pentaho.di.job.entries.folderscompare.JobEntryFoldersCompare.java
/** * Check whether 2 files have the same contents. * * @param file1//from ww w. ja v a2s .c o m * first file to compare * @param file2 * second file to compare * @return true if files are equal, false if they are not * * @throws IOException * upon IO problems */ protected boolean equalFileContents(FileObject file1, FileObject file2) throws KettleFileException { // Really read the contents and do comparisons DataInputStream in1 = null; DataInputStream in2 = null; try { // Really read the contents and do comparisons in1 = new DataInputStream( new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file1), this))); in2 = new DataInputStream( new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file2), this))); char ch1, ch2; while (in1.available() != 0 && in2.available() != 0) { ch1 = (char) in1.readByte(); ch2 = (char) in2.readByte(); if (ch1 != ch2) { return false; } } if (in1.available() != in2.available()) { return false; } else { return true; } } catch (IOException e) { throw new KettleFileException(e); } finally { if (in1 != null) { try { in1.close(); } catch (IOException ignored) { // Nothing to see here... } } if (in2 != null) { try { in2.close(); } catch (Exception ignored) { // We can't do anything else here... } } } }
From source file:org.apache.geode.internal.cache.tier.sockets.HandShake.java
public static Properties readCredentials(DataInputStream dis, DataOutputStream dos, DistributedSystem system, SecurityService securityService) throws GemFireSecurityException, IOException { boolean requireAuthentication = securityService.isClientSecurityRequired(); Properties credentials = null; try {/* w ww. j a va 2 s .c om*/ byte secureMode = dis.readByte(); throwIfMissingRequiredCredentials(requireAuthentication, secureMode != CREDENTIALS_NONE); if (secureMode == CREDENTIALS_NORMAL) { if (requireAuthentication) { credentials = DataSerializer.readProperties(dis); } else { DataSerializer.readProperties(dis); // ignore the credentials } } else if (secureMode == CREDENTIALS_DHENCRYPT) { boolean sendAuthentication = dis.readBoolean(); InternalLogWriter securityLogWriter = (InternalLogWriter) system.getSecurityLogWriter(); // Get the symmetric encryption algorithm to be used String skAlgo = DataSerializer.readString(dis); // Get the public key of the other side byte[] keyBytes = DataSerializer.readByteArray(dis); byte[] challenge = null; PublicKey pubKey = null; if (requireAuthentication) { // Generate PublicKey from encoded form X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(keyBytes); KeyFactory keyFact = KeyFactory.getInstance("DH"); pubKey = keyFact.generatePublic(x509KeySpec); // Send the public key to other side keyBytes = dhPublicKey.getEncoded(); challenge = new byte[64]; random.nextBytes(challenge); // If the server has to also authenticate itself then // sign the challenge from client. if (sendAuthentication) { // Get the challenge string from client byte[] clientChallenge = DataSerializer.readByteArray(dis); if (privateKeyEncrypt == null) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_SERVER_PRIVATE_KEY_NOT_AVAILABLE_FOR_CREATING_SIGNATURE .toLocalizedString()); } // Sign the challenge from client and send it to the client Signature sig = Signature.getInstance(privateKeySignAlgo); sig.initSign(privateKeyEncrypt); sig.update(clientChallenge); byte[] signedBytes = sig.sign(); dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); // DataSerializer.writeString(privateKeyAlias, dos); DataSerializer.writeString(privateKeySubject, dos); DataSerializer.writeByteArray(signedBytes, dos); securityLogWriter.fine("HandShake: sent the signed client challenge"); } else { // These two lines should not be moved before the if{} statement in // a common block for both if...then...else parts. This is to handle // the case when an AuthenticationFailedException is thrown by the // if...then part when sending the signature. dos.writeByte(REPLY_OK); DataSerializer.writeByteArray(keyBytes, dos); } // Now send the server challenge DataSerializer.writeByteArray(challenge, dos); securityLogWriter.fine("HandShake: sent the public key and challenge"); dos.flush(); // Read and decrypt the credentials byte[] encBytes = DataSerializer.readByteArray(dis); KeyAgreement ka = KeyAgreement.getInstance("DH"); ka.init(dhPrivateKey); ka.doPhase(pubKey, true); Cipher decrypt; int keysize = getKeySize(skAlgo); int blocksize = getBlockSize(skAlgo); if (keysize == -1 || blocksize == -1) { SecretKey sKey = ka.generateSecret(skAlgo); decrypt = Cipher.getInstance(skAlgo); decrypt.init(Cipher.DECRYPT_MODE, sKey); } else { String algoStr = getDhAlgoStr(skAlgo); byte[] sKeyBytes = ka.generateSecret(); SecretKeySpec sks = new SecretKeySpec(sKeyBytes, 0, keysize, algoStr); IvParameterSpec ivps = new IvParameterSpec(sKeyBytes, keysize, blocksize); decrypt = Cipher.getInstance(algoStr + "/CBC/PKCS5Padding"); decrypt.init(Cipher.DECRYPT_MODE, sks, ivps); } byte[] credentialBytes = decrypt.doFinal(encBytes); ByteArrayInputStream bis = new ByteArrayInputStream(credentialBytes); DataInputStream dinp = new DataInputStream(bis); credentials = DataSerializer.readProperties(dinp); byte[] challengeRes = DataSerializer.readByteArray(dinp); // Check the challenge string if (!Arrays.equals(challenge, challengeRes)) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_MISMATCH_IN_CHALLENGE_BYTES_MALICIOUS_CLIENT .toLocalizedString()); } dinp.close(); } else { if (sendAuthentication) { // Read and ignore the client challenge DataSerializer.readByteArray(dis); } dos.writeByte(REPLY_AUTH_NOT_REQUIRED); dos.flush(); } } else if (secureMode == SECURITY_MULTIUSER_NOTIFICATIONCHANNEL) { // hitesh there will be no credential CCP will get credential(Principal) using // ServerConnection.. logger.debug("readCredential where multiuser mode creating callback connection"); } } catch (IOException ex) { throw ex; } catch (GemFireSecurityException ex) { throw ex; } catch (Exception ex) { throw new AuthenticationFailedException( LocalizedStrings.HandShake_FAILURE_IN_READING_CREDENTIALS.toLocalizedString(), ex); } return credentials; }
From source file:org.tacografo.file.FileBlockTGD.java
/** * Lectura de los bloques con formato :tag(fid)-longitud-value * //w ww . j a v a 2 s . co m * @param entrada * @throws IOException * @throws ErrorFile ocurrido cuando no es un fichero tgd o falla en la lectura de algun bloque * porque no encuentre el tag(fid) */ private void lectura_bloque(DataInputStream entrada) throws IOException, ErrorFile { boolean existe_fid = true; while (existe_fid) { // la lectura tiene que ser con readUnsignedShort debido a que // los fid c108 y c100 // los detecta con signo y me los rellenas como ffffc108 y // ffffc100 int fid = entrada.readUnsignedShort(); existe_fid = this.existe_Fid(fid); if (existe_fid) { // tipo de archivo 0 = bloque de dato -- 1 = certificado byte tipo = entrada.readByte(); Integer longitud = Integer.valueOf(entrada.readChar()); byte[] datos = new byte[longitud]; entrada.read(datos, 0, longitud); // tipo de bloque if (tipo == 0) { CardBlock block = FactoriaBloques.getFactoria(fid, datos); if (block != null) { this.lista_bloque.put(block.getFID(), block); } } } else { throw new ErrorFile(); } } }
From source file:org.motechproject.mobile.web.OXDFormUploadServlet.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods./* w ww .j a v a2s .c o m*/ * * @param request * servlet request * @param response * servlet response * @throws ServletException * if a servlet-specific error occurs * @throws IOException * if an I/O error occurs */ @RequestMapping(method = RequestMethod.POST) public void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { long startTime = System.currentTimeMillis(); IMPService impService = (IMPService) appCtx.getBean("impService"); StudyProcessor studyProcessor = (StudyProcessor) appCtx.getBean("studyProcessor"); InputStream input = request.getInputStream(); OutputStream output = response.getOutputStream(); ZOutputStream zOutput = null; // Wrap the streams for compression // Wrap the streams so for logical types DataInputStream dataInput = null; DataOutputStream dataOutput = null; // Set the MIME type so clients don't misinterpret response.setContentType("application/octet-stream"); try { zOutput = new ZOutputStream(output, JZlib.Z_BEST_COMPRESSION); dataInput = new DataInputStream(input); dataOutput = new DataOutputStream(zOutput); if (rawUploadLog.isInfoEnabled()) { byte[] rawPayload = IOUtils.toByteArray(dataInput); String hexEncodedPayload = Hex.encodeHexString(rawPayload); rawUploadLog.info(hexEncodedPayload); // Replace the original input stream with one using read payload dataInput.close(); dataInput = new DataInputStream(new ByteArrayInputStream(rawPayload)); } String name = dataInput.readUTF(); String password = dataInput.readUTF(); String serializer = dataInput.readUTF(); String locale = dataInput.readUTF(); byte action = dataInput.readByte(); // TODO Authentication of usename and password. Possible M6 // enhancement log.info("uploading: name=" + name + ", password=" + password + ", serializer=" + serializer + ", locale=" + locale + ", action=" + action); EpihandyXformSerializer serObj = new EpihandyXformSerializer(); serObj.addDeserializationListener(studyProcessor); try { Map<Integer, String> formVersionMap = formService.getXForms(); serObj.deserializeStudiesWithEvents(dataInput, formVersionMap); } catch (FormNotFoundException fne) { String msg = "failed to deserialize forms: "; log.error(msg + fne.getMessage()); dataOutput.writeByte(ResponseHeader.STATUS_FORMS_STALE); response.setStatus(HttpServletResponse.SC_OK); return; } catch (Exception e) { String msg = "failed to deserialize forms"; log.error(msg, e); dataOutput.writeByte(ResponseHeader.STATUS_ERROR); response.setStatus(HttpServletResponse.SC_OK); return; } String[][] studyForms = studyProcessor.getConvertedStudies(); int numForms = studyProcessor.getNumForms(); log.debug("upload contains: studies=" + studyForms.length + ", forms=" + numForms); // Starting processing here, only process until we run out of time int processedForms = 0; int faultyForms = 0; if (studyForms != null && numForms > 0) { formprocessing: for (int i = 0; i < studyForms.length; i++) { for (int j = 0; j < studyForms[i].length; j++, processedForms++) { if (maxProcessingTime > 0 && System.currentTimeMillis() - startTime > maxProcessingTime) break formprocessing; try { studyForms[i][j] = impService.processXForm(studyForms[i][j]); } catch (Exception ex) { log.error("processing form failed", ex); studyForms[i][j] = ex.getMessage(); } if (!impService.getFormProcessSuccess().equalsIgnoreCase(studyForms[i][j])) { faultyForms++; } } } } // Write out usual upload response dataOutput.writeByte(ResponseHeader.STATUS_SUCCESS); dataOutput.writeInt(processedForms); dataOutput.writeInt(faultyForms); for (int s = 0; s < studyForms.length; s++) { for (int f = 0; f < studyForms[s].length; f++) { if (!impService.getFormProcessSuccess().equalsIgnoreCase(studyForms[s][f])) { dataOutput.writeByte((byte) s); dataOutput.writeShort((short) f); dataOutput.writeUTF(studyForms[s][f]); } } } response.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { log.error("failure during upload", e); } finally { if (dataOutput != null) dataOutput.flush(); if (zOutput != null) zOutput.finish(); response.flushBuffer(); } }