List of usage examples for java.util LinkedList size
int size
To view the source code for java.util LinkedList size.
Click Source Link
From source file:com.mirth.connect.plugins.dashboardstatus.DashboardConnectorEventListener.java
public synchronized LinkedList<String[]> getChannelLog(Object object, String sessionId) { String channelName;// ww w.j a v a2 s.c om LinkedList<String[]> channelLog; if (object == null) { /* * object is null - no channel is selected. return the latest entire log entries of all * channels combined. ONLY new entries. */ channelName = "No Channel Selected"; channelLog = entireConnectorInfoLogs; } else { // object is not null - a channel is selected. return the latest // (LOG_SIZE) of that particular channel. channelName = object.toString(); // return only the newly added log entries for the client with // matching sessionId. channelLog = connectorInfoLogs.get(channelName); if (channelLog == null) { channelLog = new LinkedList<String[]>(); connectorInfoLogs.put(channelName, channelLog); } } Map<String, Long> lastDisplayedLogIdByChannel; if (lastDisplayedLogIndexBySessionId.containsKey(sessionId)) { // client exist with the sessionId. lastDisplayedLogIdByChannel = lastDisplayedLogIndexBySessionId.get(sessionId); if (lastDisplayedLogIdByChannel.containsKey(channelName)) { // existing channel on an already open client. // -> only display new log entries. long lastDisplayedLogId = lastDisplayedLogIdByChannel.get(channelName); LinkedList<String[]> newChannelLogEntries = new LinkedList<String[]>(); // FYI, channelLog.size() will never be larger than LOG_SIZE // = 1000. for (String[] aChannelLog : channelLog) { if (lastDisplayedLogId < Long.parseLong(aChannelLog[0])) { newChannelLogEntries.addLast(aChannelLog); } } if (newChannelLogEntries.size() > 0) { /* * put the lastDisplayedLogId into the HashMap. index 0 is the most recent * entry, and index0 of that entry contains the logId. */ lastDisplayedLogIdByChannel.put(channelName, Long.parseLong(newChannelLogEntries.get(0)[0])); lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel); } try { return SerializationUtils.clone(newChannelLogEntries); } catch (SerializationException e) { logger.error(e); } } else { /* * new channel viewing on an already open client. -> all log entries are new. * display them all. put the lastDisplayedLogId into the HashMap. index0 is the most * recent entry, and index0 of that entry object contains the logId. */ if (channelLog.size() > 0) { lastDisplayedLogIdByChannel.put(channelName, Long.parseLong(channelLog.get(0)[0])); lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel); } try { return SerializationUtils.clone(channelLog); } catch (SerializationException e) { logger.error(e); } } } else { // brand new client. // thus also new channel viewing. // -> all log entries are new. display them all. lastDisplayedLogIdByChannel = new HashMap<String, Long>(); if (channelLog.size() > 0) { lastDisplayedLogIdByChannel.put(channelName, Long.parseLong(channelLog.get(0)[0])); } else { // no log exist at all. put the currentLogId-1, which is the // very latest logId. lastDisplayedLogIdByChannel.put(channelName, logId - 1); } lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel); try { return SerializationUtils.clone(channelLog); } catch (SerializationException e) { logger.error(e); } } return null; }
From source file:android.net.http.Connection.java
/** * Process requests in queue/*w w w .ja v a2 s .co m*/ * pipelines requests */ void processRequests(Request firstRequest) { Request req = null; boolean empty; int error = EventHandler.OK; Exception exception = null; LinkedList<Request> pipe = new LinkedList<Request>(); int minPipe = MIN_PIPE, maxPipe = MAX_PIPE; int state = SEND; while (state != DONE) { if (HttpLog.LOGV) HttpLog.v(states[state] + " pipe " + pipe.size()); /* If a request was cancelled, give other cancel requests some time to go through so we don't uselessly restart connections */ if (mActive == STATE_CANCEL_REQUESTED) { try { Thread.sleep(100); } catch (InterruptedException x) { /* ignore */ } mActive = STATE_NORMAL; } switch (state) { case SEND: { if (pipe.size() == maxPipe) { state = READ; break; } /* get a request */ if (firstRequest == null) { req = mRequestFeeder.getRequest(mHost); } else { req = firstRequest; firstRequest = null; } if (req == null) { state = DRAIN; break; } req.setConnection(this); /* Don't work on cancelled requests. */ if (req.mCancelled) { if (HttpLog.LOGV) HttpLog.v("processRequests(): skipping cancelled request " + req); req.complete(); break; } if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) { /* If this call fails, the address is bad or the net is down. Punt for now. FIXME: blow out entire queue here on connection failure if net up? */ if (!openHttpConnection(req)) { state = DONE; break; } } try { /* FIXME: don't increment failure count if old connection? There should not be a penalty for attempting to reuse an old connection */ req.sendRequest(mHttpClientConnection); } catch (HttpException e) { exception = e; error = EventHandler.ERROR; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ pipe.addLast(req); } exception = null; state = clearPipe(pipe) ? DONE : SEND; minPipe = maxPipe = 1; break; } pipe.addLast(req); if (!mCanPersist) state = READ; break; } case DRAIN: case READ: { empty = !mRequestFeeder.haveRequest(mHost); int pipeSize = pipe.size(); if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) { state = SEND; break; } else if (pipeSize == 0) { /* Done if no other work to do */ state = empty ? DONE : SEND; break; } req = (Request) pipe.removeFirst(); if (HttpLog.LOGV) HttpLog.v("processRequests() reading " + req); try { req.readResponse(mHttpClientConnection); } catch (ParseException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ req.reset(); pipe.addFirst(req); } exception = null; mCanPersist = false; } if (!mCanPersist) { if (HttpLog.LOGV) HttpLog.v("processRequests(): no persist, closing " + mHost); closeConnection(); mHttpContext.removeAttribute(HTTP_CONNECTION); clearPipe(pipe); minPipe = maxPipe = 1; state = SEND; } break; } } } }
From source file:com.espertech.esper.regression.pattern.TestCronParameter.java
private void checkResults(String eventId) { log.debug(".checkResults Checking results for event " + eventId); String expressionText = patternStmt.getText(); LinkedHashMap<String, LinkedList<EventDescriptor>> allExpectedResults = testCase.getExpectedResults(); EventBean[] receivedResults = listener.getLastNewData(); // If nothing at all was expected for this event, make sure nothing was received if (!(allExpectedResults.containsKey(eventId))) { if ((receivedResults != null) && (receivedResults.length > 0)) { log.debug(".checkResults Incorrect result for expression : " + expressionText); log.debug(".checkResults Expected no results for event " + eventId + ", but received " + receivedResults.length + " events"); log.debug(".checkResults Received, have " + receivedResults.length + " entries"); printList(receivedResults);/*from w w w .ja v a2 s .co m*/ TestCase.assertFalse(true); } } LinkedList<EventDescriptor> expectedResults = allExpectedResults.get(eventId); // Compare the result lists, not caring about the order of the elements if (!(compareLists(receivedResults, expectedResults))) { log.debug(".checkResults Incorrect result for expression : " + expressionText); log.debug(".checkResults Expected size=" + expectedResults.size() + " received size=" + (receivedResults == null ? 0 : receivedResults.length)); log.debug(".checkResults Expected, have " + expectedResults.size() + " entries"); printList(expectedResults); log.debug(".checkResults Received, have " + (receivedResults == null ? 0 : receivedResults.length) + " entries"); printList(receivedResults); TestCase.assertFalse(true); } }
From source file:com.espertech.esper.regression.support.PatternTestHarness.java
private void checkResults(PatternTestStyle testStyle, String eventId) { // For each test descriptor, make sure the listener has received exactly the events expected int index = 0; log.debug(".checkResults Checking results for event " + eventId); for (EventExpressionCase descriptor : caseList.getResults()) { String expressionText = expressions[index].getText(); LinkedHashMap<String, LinkedList<EventDescriptor>> allExpectedResults = descriptor.getExpectedResults(); EventBean[] receivedResults = listeners[index].getLastNewData(); index++;// w w w . j a v a 2 s.co m // If nothing at all was expected for this event, make sure nothing was received if (!(allExpectedResults.containsKey(eventId))) { if ((receivedResults != null) && (receivedResults.length > 0)) { log.debug(".checkResults Incorrect result for style " + testStyle + " expression : " + expressionText); log.debug(".checkResults Expected no results for event " + eventId + ", but received " + receivedResults.length + " events"); log.debug(".checkResults Received, have " + receivedResults.length + " entries"); printList(receivedResults); TestCase.assertFalse(true); } continue; } LinkedList<EventDescriptor> expectedResults = allExpectedResults.get(eventId); // Compare the result lists, not caring about the order of the elements try { if (!(compareLists(receivedResults, expectedResults))) { log.debug(".checkResults Incorrect result for style " + testStyle + " expression : " + expressionText); log.debug(".checkResults Expected size=" + expectedResults.size() + " received size=" + (receivedResults == null ? 0 : receivedResults.length)); log.debug(".checkResults Expected, have " + expectedResults.size() + " entries"); printList(expectedResults); log.debug(".checkResults Received, have " + (receivedResults == null ? 0 : receivedResults.length) + " entries"); printList(receivedResults); TestCase.assertFalse(true); } } catch (Exception ex) { ex.printStackTrace(); Assert.fail("For statement '" + expressionText + "' failed to assert: " + ex.getMessage()); } } }
From source file:gsn.http.datarequest.DownloadData.java
@Override public void outputResult(OutputStream os) { PrintWriter respond = new PrintWriter(os); Iterator<Entry<String, AbstractQuery>> iter = qbuilder.getSqlQueries().entrySet().iterator(); Entry<String, AbstractQuery> nextSqlQuery; DataEnumerator de = null;//from w ww . jav a 2 s . co m try { if (ot == AllowedOutputType.xml) { respond.println("<result>"); } while (iter.hasNext()) { nextSqlQuery = iter.next(); Connection connection = null; connection = Main.getStorage(nextSqlQuery.getKey()).getConnection(); de = Main.getStorage(nextSqlQuery.getKey()).streamedExecuteQuery(nextSqlQuery.getValue(), true, connection); //get units in hash map HashMap<String, String> fieldToUnitMap = new HashMap<String, String>(); VSensorConfig sensorConfig = Mappings.getVSensorConfig(nextSqlQuery.getKey()); DataField[] dataFieldArray = sensorConfig.getOutputStructure(); for (DataField df : dataFieldArray) { String unit = df.getUnit(); if (unit == null || unit.trim().length() == 0) unit = ""; fieldToUnitMap.put(df.getName().toLowerCase(), unit); } logger.debug("Data Enumerator: " + de); if (ot == AllowedOutputType.csv) { respond.println("# vsname:" + nextSqlQuery.getKey()); respond.println("# query:" + nextSqlQuery.getValue().getStandardQuery() + (nextSqlQuery.getValue().getLimitCriterion() == null ? "" : "(" + nextSqlQuery.getValue().getLimitCriterion() + ")")); for (KeyValue df : sensorConfig.getAddressing()) { respond.println( "# " + df.getKey().toString().toLowerCase() + ":" + df.getValue().toString()); } respond.println("# description:" + sensorConfig.getDescription()); } else if (ot == AllowedOutputType.xml) { respond.println("\t<!-- " + nextSqlQuery.getValue().getStandardQuery() + " -->"); for (KeyValue df : sensorConfig.getAddressing()) { respond.println( "\t<!-- " + StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()) + ":" + StringEscapeUtils.escapeXml(df.getValue().toString()) + " -->"); } respond.println("\t<!-- description:" + StringEscapeUtils.escapeXml(sensorConfig.getDescription()) + " -->"); respond.println("\t<data vsname=\"" + nextSqlQuery.getKey() + "\">"); } FieldsCollection fc = qbuilder.getVsnamesAndStreams().get(nextSqlQuery.getKey()); boolean wantTimed = true; boolean firstLine = true; LinkedList<StreamElement> streamElements = new LinkedList<StreamElement>(); while (de.hasMoreElements()) { streamElements.add(de.nextElement()); } double valsPerVS = MAX_SAMPLE_VALUES / numberOfFieldsInRequest(); if (requestParameters.containsKey("sample") && "true".equalsIgnoreCase(requestParameters.get("sample")[0]) && streamElements.size() > valsPerVS) { //sampling int numOfVals = streamElements.size(); int left = (int) valsPerVS; int valsForAvg = (int) Math.ceil(numOfVals / valsPerVS); if (requestParameters.containsKey("sampling_percentage")) { try { String percentageString = requestParameters.get("sampling_percentage")[0]; int percentage = Integer.parseInt(percentageString); if (percentage > 0 && percentage <= 100 && numOfVals * percentage > 100) { left = numOfVals * percentage / 100; valsForAvg = (int) Math.ceil(numOfVals / left); } } catch (Exception e) { } } while (!streamElements.isEmpty()) { StreamElement se = null; if (numOfVals > left) { StreamElement[] seForSampling = new StreamElement[valsForAvg]; for (int i = 0; i < valsForAvg; i++) { seForSampling[i] = streamElements.removeLast(); } numOfVals -= valsForAvg; left--; se = sampleSkip(seForSampling); } else { se = streamElements.removeLast(); } if (ot == AllowedOutputType.csv) { formatCSVElement(respond, se, wantTimed, csvDelimiter, firstLine, fieldToUnitMap); } else if (ot == AllowedOutputType.xml) { formatXMLElement(respond, se, wantTimed, firstLine, fieldToUnitMap); } firstLine = false; } } else { while (!streamElements.isEmpty()) { if (ot == AllowedOutputType.csv) { formatCSVElement(respond, streamElements.removeLast(), wantTimed, csvDelimiter, firstLine, fieldToUnitMap); } else if (ot == AllowedOutputType.xml) { formatXMLElement(respond, streamElements.removeLast(), wantTimed, firstLine, fieldToUnitMap); } firstLine = false; } } if (ot == AllowedOutputType.xml) respond.println("\t</data>"); } if (ot == AllowedOutputType.xml) { respond.println("</result>"); } } catch (SQLException e) { logger.debug(e.getMessage()); } finally { respond.flush(); if (de != null) de.close(); } }
From source file:WaitSemaphore.java
protected void logRelease() { if (m_debug) { // Find a matching thread and remove info for it Thread thread = Thread.currentThread(); LinkedList list = (LinkedList) m_logMap.get(thread); if (list != null) { Info info = new Info(thread, 0, ""); if (!list.remove(info)) { System.err.println("LOG INFO SIZE: " + list); new IllegalStateException("BUG: semaphore log list does not contain required info") .printStackTrace(); }//from w w w . j a va 2s. co m // If no info left, remove the mapping int size = list.size(); if (size < 1) { m_logMap.remove(thread); } } else { throw new IllegalStateException("Semaphore log failed: release called without acquire"); } } }
From source file:org.nekorp.workflow.desktop.view.CobranzaView.java
@Override public void updateModel(Object origen, String property, Object value) { if (!ignore.remove(value)) { LinkedList<PagoCobranzaVB> param = (LinkedList<PagoCobranzaVB>) value; LinkedList<PagoCobranzaVB> borrar = new LinkedList<>(); for (PagoCobranzaVB obj : modelo) { if (!param.contains(obj)) { borrar.add(obj);//from w ww .ja va 2 s . c om } } for (PagoCobranzaVB x : borrar) { removePago(x); } for (int i = 0; i < param.size(); i++) { if (this.modelo.size() > i) { if (!param.get(i).equals(this.modelo.get(i))) { this.modelo.add(i, param.get(i)); addPagoView(this.modelo.get(i), i); } } else { this.modelo.add(param.get(i)); addPagoView(this.modelo.get(i), i); } } this.updateUI(); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { jScrollPane1.getVerticalScrollBar().setValue(jScrollPane1.getVerticalScrollBar().getMaximum()); } }); } }
From source file:android.net.http.Connection.java
/** * Process requests in queue/* www . ja va2 s . co m*/ * pipelines requests */ void processRequests(Request firstRequest) { Request req = null; boolean empty; int error = EventHandler.OK; Exception exception = null; LinkedList<Request> pipe = new LinkedList<Request>(); int minPipe = MIN_PIPE, maxPipe = MAX_PIPE; int state = SEND; while (state != DONE) { if (HttpLog.LOGV) HttpLog.v(states[state] + " pipe " + pipe.size()); /* If a request was cancelled, give other cancel requests some time to go through so we don't uselessly restart connections */ if (mActive == STATE_CANCEL_REQUESTED) { try { Thread.sleep(100); } catch (InterruptedException x) { /* ignore */ } mActive = STATE_NORMAL; } switch (state) { case SEND: { if (pipe.size() == maxPipe) { state = READ; break; } /* get a request */ if (firstRequest == null) { req = mRequestFeeder.getRequest(mHost); } else { req = firstRequest; firstRequest = null; } if (req == null) { state = DRAIN; break; } req.setConnection(this); /* Don't work on cancelled requests. */ if (req.mCancelled) { if (HttpLog.LOGV) HttpLog.v("processRequests(): skipping cancelled request " + req); req.complete(); break; } if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) { /* If this call fails, the address is bad or the net is down. Punt for now. FIXME: blow out entire queue here on connection failure if net up? */ if (!openHttpConnection(req)) { state = DONE; break; } } /* we have a connection, let the event handler * know of any associated certificate, * potentially none. */ req.mEventHandler.certificate(mCertificate); try { /* FIXME: don't increment failure count if old connection? There should not be a penalty for attempting to reuse an old connection */ req.sendRequest(mHttpClientConnection); } catch (HttpException e) { exception = e; error = EventHandler.ERROR; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ pipe.addLast(req); } exception = null; state = clearPipe(pipe) ? DONE : SEND; minPipe = maxPipe = 1; break; } pipe.addLast(req); if (!mCanPersist) state = READ; break; } case DRAIN: case READ: { empty = !mRequestFeeder.haveRequest(mHost); int pipeSize = pipe.size(); if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) { state = SEND; break; } else if (pipeSize == 0) { /* Done if no other work to do */ state = empty ? DONE : SEND; break; } req = (Request) pipe.removeFirst(); if (HttpLog.LOGV) HttpLog.v("processRequests() reading " + req); try { req.readResponse(mHttpClientConnection); } catch (ParseException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ req.reset(); pipe.addFirst(req); } exception = null; mCanPersist = false; } if (!mCanPersist) { if (HttpLog.LOGV) HttpLog.v("processRequests(): no persist, closing " + mHost); closeConnection(); mHttpContext.removeAttribute(HTTP_CONNECTION); clearPipe(pipe); minPipe = maxPipe = 1; state = SEND; } break; } } } }
From source file:com.commander4j.thread.OutboundMessageThread.java
public void run() { logger.debug("OutboundMessageThread running"); setSessionID(JUnique.getUniqueID()); JDBUser user = new JDBUser(getHostID(), getSessionID()); user.setUserId("interface"); user.setPassword("interface"); user.setLoginPassword("interface"); Common.userList.addUser(getSessionID(), user); Common.sd.setData(getSessionID(), "silentExceptions", "Yes", true); Boolean dbconnected = false;//w w w . ja v a2 s .c o m if (Common.hostList.getHost(hostID).isConnected(sessionID) == false) { dbconnected = Common.hostList.getHost(hostID).connect(sessionID, hostID); } else { dbconnected = true; } if (dbconnected) { JeMail mail = new JeMail(getHostID(), getSessionID()); JDBInterfaceRequest ir = new JDBInterfaceRequest(getHostID(), getSessionID()); JDBInterface inter = new JDBInterface(getHostID(), getSessionID()); OutgoingProductionDeclarationConfirmation opdc = new OutgoingProductionDeclarationConfirmation( getHostID(), getSessionID()); OutgoingDespatchConfirmation odc = new OutgoingDespatchConfirmation(getHostID(), getSessionID()); OutgoingDespatchPreAdvice opa = new OutgoingDespatchPreAdvice(getHostID(), getSessionID()); OutgoingDespatchEmail ode = new OutgoingDespatchEmail(getHostID(), getSessionID()); OutgoingEquipmentTracking oet = new OutgoingEquipmentTracking(getHostID(), getSessionID()); OutgoingPalletStatusChange psc = new OutgoingPalletStatusChange(getHostID(), getSessionID()); OutgoingPalletSplit ops = new OutgoingPalletSplit(getHostID(), getSessionID()); OutgoingPalletDelete opd = new OutgoingPalletDelete(getHostID(), getSessionID()); LinkedList<Long> irqList = new LinkedList<Long>(); int noOfMessages = 0; while (true) { JWait.milliSec(500); if (allDone) { if (dbconnected) { Common.hostList.getHost(hostID).disconnect(getSessionID()); } return; } irqList.clear(); irqList = ir.getInterfaceRequestIDs(); noOfMessages = irqList.size(); if (noOfMessages > 0) { for (int x = 0; x < noOfMessages; x++) { JWait.milliSec(100); ir.setInterfaceRequestID(irqList.get(x)); ir.getInterfaceRequestProperties(); if (ir.getMode().equals("Inbound File Re-Submit")) { if (inter.getInterfaceProperties(ir.getInterfaceType(), "Input") == true) { String sourceFile = Common.base_dir + java.io.File.separator + "xml" + java.io.File.separator + "interface" + java.io.File.separator + "error" + java.io.File.separator + ir.getInterfaceType() + java.io.File.separator + ir.getFilename(); destinationFile = inter.getRealPath() + java.io.File.separator + ir.getFilename(); renamedDestinationFile = inter.getRealPath() + java.io.File.separator + ir.getFilename().replaceAll(".xml", ".lmx"); mover.move_File(sourceFile, renamedDestinationFile); mover.move_File(renamedDestinationFile, destinationFile); ir.delete(); } } if (ir.getMode().equals("Normal")) { errorMessage = "Unknown Outbound Interface Type :" + ir.getInterfaceType(); messageProcessedOK = false; if (ir.getInterfaceType().equals("Production Declaration")) { messageProcessedOK = opdc.processMessage(ir.getTransactionRef()); errorMessage = opdc.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Production Declaration", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Pallet Status Change")) { messageProcessedOK = psc.processMessage(ir.getTransactionRef()); errorMessage = psc.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Pallet Status Change", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Pallet Split")) { messageProcessedOK = ops.processMessage(ir.getTransactionRef()); errorMessage = ops.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Pallet Split", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Pallet Delete")) { messageProcessedOK = opd.processMessage(ir.getTransactionRef()); errorMessage = opd.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Pallet Delete", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Despatch Confirmation")) { messageProcessedOK = odc.processMessage(ir.getTransactionRef()); errorMessage = odc.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Despatch Confirmation", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Despatch Pre Advice")) { messageProcessedOK = opa.processMessage(ir.getTransactionRef()); errorMessage = opa.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Despatch Pre Advice", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Despatch Email")) { messageProcessedOK = ode.processMessage(ir.getTransactionRef()); errorMessage = ode.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Despatch Email", messageProcessedOK.toString()); } if (ir.getInterfaceType().equals("Equipment Tracking")) { messageProcessedOK = oet.processMessage(ir.getTransactionRef()); errorMessage = oet.getErrorMessage(); GenericMessageHeader.updateStats("Output", "Equipment Tracking", messageProcessedOK.toString()); } if (messageProcessedOK == true) { ir.delete(); } else { ir.update(irqList.get(x), "Error"); if (inter.getInterfaceProperties(ir.getInterfaceType(), "Output") == true) { if (inter.getEmailError() == true) { String emailaddresses = inter.getEmailAddresses(); StringConverter stringConverter = new StringConverter(); ArrayConverter arrayConverter = new ArrayConverter(String[].class, stringConverter); arrayConverter.setDelimiter(';'); arrayConverter.setAllowedChars(new char[] { '@' }); String[] emailList = (String[]) arrayConverter.convert(String[].class, emailaddresses); if (emailList.length > 0) { try { String siteName = Common.hostList.getHost(getHostID()) .getSiteDescription(); mail.postMail(emailList, "Error Processing Outgoing " + ir.getInterfaceType() + " for [" + siteName + "] on " + JUtility.getClientName(), errorMessage, "", ""); } catch (MessagingException e) { } } } } } } } } } } }
From source file:fi.helsinki.lib.simplerest.BundleResource.java
@Put public Representation editBundle(InputRepresentation rep) { Context c = null;/*w w w.j a va 2 s . c o m*/ Bundle bundle = null; try { c = getAuthenticatedContext(); bundle = Bundle.find(c, this.bundleId); if (bundle == null) { return errorNotFound(c, "Could not find the bundle."); } } catch (SQLException e) { return errorInternal(c, "SQLException"); } DomRepresentation dom = new DomRepresentation(rep); Node attributesNode = dom.getNode("//dl[@id='attributes']"); if (attributesNode == null) { return error(c, "Did not find dl tag with a id 'attributes'.", Status.CLIENT_ERROR_BAD_REQUEST); } int nameFound = 0; int primarybitstreamidFound = 0; NodeList nodes = attributesNode.getChildNodes(); LinkedList<String> dtList = new LinkedList(); LinkedList<String> ddList = new LinkedList(); int nNodes = nodes.getLength(); for (int i = 0; i < nNodes; i++) { Node node = nodes.item(i); String nodeName = node.getNodeName(); if (nodeName.equals("dt")) { dtList.add(node.getTextContent()); } else if (nodeName.equals("dd")) { ddList.add(node.getTextContent()); } } if (dtList.size() != ddList.size()) { return error(c, "The number of <dt> and <dd> elements do not match.", Status.CLIENT_ERROR_BAD_REQUEST); } int size = dtList.size(); for (int i = 0; i < size; i++) { String dt = dtList.get(i); String dd = ddList.get(i); if (dt.equals("name")) { nameFound = 1; bundle.setName(dd); } else if (dt.equals("primarybitstreamid")) { primarybitstreamidFound = 1; Integer id = Integer.parseInt(dd); boolean validBitstreamId = false; Bitstream[] bitstreams = bundle.getBitstreams(); if (id == -1) { // -1 means that we do not want to validBitstreamId = true; // specify the primary bitstream. } else { for (Bitstream bitstream : bitstreams) { if (id == bitstream.getID()) { validBitstreamId = true; break; } } } if (!validBitstreamId) { return error(c, "Invalid primarybitstreamid.", Status.CLIENT_ERROR_UNPROCESSABLE_ENTITY); } if (id == -1) { bundle.unsetPrimaryBitstreamID(); } else { bundle.setPrimaryBitstreamID(id); } } else { return error(c, "Unexpected data in attributes: " + dt, Status.CLIENT_ERROR_BAD_REQUEST); } } // If the was data missing, report it: String[] problems = { "'nameFound' and 'primarybitstreamid'", "'nameFound'", "'primarybitstreamid'", "" }; String problem = problems[primarybitstreamidFound + 2 * nameFound]; if (!problem.equals("")) { return error(c, problem + " was not found from the request.", Status.CLIENT_ERROR_BAD_REQUEST); } try { bundle.update(); c.complete(); } catch (AuthorizeException ae) { return error(c, "Unauthorized", Status.CLIENT_ERROR_UNAUTHORIZED); } catch (Exception e) { return errorInternal(c, e.toString()); } return successOk("Bundle updated."); }