List of usage examples for java.util List listIterator
ListIterator<E> listIterator();
From source file:org.shareok.data.dspacemanager.DspaceApiHandlerImpl.java
/** * Load saf package information and import it into DSpace repository * //from ww w . j a v a2 s . c o m * @return : the mapping of the imported information to DSpace items */ @Override public Map<String, List<String>> loadItemsFromSafPackage() { String collectionHandle = job.getCollectionId(); String safPath = job.getFilePath(); if (DocumentProcessorUtil.isEmptyString(dspaceApiUrl)) { dspaceApiUrl = RedisUtil.getServerDaoInstance().findServerById(job.getServerId()).getAddress(); } Map<String, List<String>> importResults = new HashMap<>(); if (DocumentProcessorUtil.isEmptyString(token)) { getTokenFromServer(); if (DocumentProcessorUtil.isEmptyString(token)) { try { throw new NoExistingApiTokenException("Cannot get useful token!"); } catch (NoExistingApiTokenException ex) { logger.error(ex.getMessage()); return null; } } } try { File safFile = new File(safPath); if (safPath.endsWith(".zip")) { String newPath = FileZipper.unzipToDirectory(safPath); if (null == newPath) { throw new ErrorUnzipSafPackageException("Cannot unzip the saf package at " + safPath); } // Change the path to be the unzipped folder // safPath = DocumentProcessorUtil.getFileNameWithoutExtension(safPath); safFile = new File(newPath); } if (safFile.isDirectory()) { mainLoop: for (File file : safFile.listFiles()) { if (null != file && file.isDirectory()) { File[] fileList = file.listFiles(); boolean containsContentsFile = false; boolean containsMetadataFile = false; List<File> metadataFileList = new ArrayList<>(); File contentFile = null; for (File itemFile : fileList) { if (null == itemFile) { continue; } String fileName = itemFile.getName(); if ("contents".equals(fileName)) { containsContentsFile = true; contentFile = itemFile; } else if (METADATA_FILE_NAMES_LIST.contains(fileName.replaceAll(".xml", ""))) { // Check if there are some duplicates based on doi if (fileName.contains("dublin")) { String doi = findDoiFromDublin(itemFile.getAbsolutePath()); if (null != doi && checkDuplicatesByDoi(doi, collectionHandle, dspaceApiUrl)) { logger.debug("Duplication detected:in collection " + collectionHandle + " there has already been an item with doi=" + doi); output += "Duplication detected:in collection \"+collectionHandle+\" there has already been an item with doi=" + doi + ".\n\n"; continue mainLoop; } } containsMetadataFile = true; metadataFileList.add(itemFile); } } if (containsContentsFile == true && containsMetadataFile == true) { // Create the new item now: Map newItemInfo = createEmptyItem(getObjectIdByHandler(collectionHandle, dspaceApiUrl)); String newItemId = String.valueOf(newItemInfo.get("id")); String newItemHandle = (String) newItemInfo.get("handle"); logger.debug("A new item with handle = " + newItemHandle + " has been added to collection " + collectionHandle + "."); output += "A new item with handle = " + newItemHandle + " has been added to collection " + collectionHandle + ".\n\n"; mapping += file.getName() + " " + newItemHandle + "\n"; // Add the metadata to the new item: String[] paths = new String[metadataFileList.size()]; ListIterator metadataIt = metadataFileList.listIterator(); while (metadataIt.hasNext()) { File metadataFile = (File) metadataIt.next(); paths[metadataIt.nextIndex() - 1] = metadataFile.getAbsolutePath(); } Map<String, String> metadataStrings = getMetadataFromXmlFiles(paths); for (String path : metadataStrings.keySet()) { String metadata = metadataStrings.get(path); logger.debug(" adding metadata file " + metadata + " now with file name : " + file.getName()); output += " adding metadata file " + metadata + " now with file name : " + file.getName() + "\n\n"; try { String metadataInfo = addItemMetadata(newItemId, metadata); if (null == metadataInfo || metadataInfo.equals("")) { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataUnimportedList = (ArrayList) importResults .get("metadata-imported"); metadataUnimportedList.add(newItemId + "---" + path); logger.debug( "Failed to add the metadata into item " + newItemHandle + ".\n"); output += "Failed to add the metadata into item " + newItemHandle + ".\n\n"; } else { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataImportedList = (ArrayList) importResults .get("metadata-imported"); metadataImportedList.add(newItemId + "---" + path); logger.debug("A new set of metadata entries have been added to the item " + newItemHandle + ". \n"); output += "A new set of metadata entries have been added to the item " + newItemHandle + ". \n\n"; } } catch (Exception ex) { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataUnimportedList = (ArrayList) importResults .get("metadata-imported"); metadataUnimportedList.add(newItemId + "---" + path); logger.debug("Failed to add metadata into item " + newItemHandle + "\n" + ex.getMessage()); output += "Failed to add metadata into item " + newItemHandle + "\n" + ex.getMessage() + "\n\n"; } } List bitstreamFileList = DocumentProcessorUtil .readTextFileIntoList(contentFile.getAbsolutePath()); ListIterator it = bitstreamFileList.listIterator(); while (it.hasNext()) { String bitstreamFileName = (String) it.next(); File bitstreamFile = new File( file.getAbsoluteFile() + File.separator + bitstreamFileName); if (!bitstreamFile.exists()) { logger.debug("The bitstream file " + bitstreamFileName + " does not exist in the saf package " + safPath + "!\n"); output += "The bitstream file " + bitstreamFileName + " does not exist in the saf package " + safPath + "!\n\n"; } else { String newName = bitstreamFile.getName().replace(" ", "_"); try { Map bitstreamInfo = addItemBitstream(newItemId, bitstreamFile.getAbsolutePath(), newName, newName); if (null != bitstreamInfo) { if (null == importResults.get("bitstream-imported")) { importResults.put("bitstream-imported", new ArrayList<String>()); } List bitstreamImportedList = (ArrayList) importResults .get("bitstream-imported"); bitstreamImportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); logger.debug("A new bitstream file " + bitstreamFileName + " with link " + ((String) bitstreamInfo.get("retrieveLink")) + " has been added to the item " + newItemHandle + ". \n"); output += "A new bitstream file " + bitstreamFileName + " with link " + ((String) bitstreamInfo.get("retrieveLink")) + " has been added to the item " + newItemHandle + ". \n\n"; } else { if (null == importResults.get("bitstream-unimported")) { importResults.put("bitstream-unimported", new ArrayList<String>()); } List bitstreamUnimportedList = (ArrayList) importResults .get("bitstream-unimported"); bitstreamUnimportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); logger.debug("Failed to add the bitstream file " + bitstreamFileName + " into item " + newItemHandle + ".\n"); output += "Failed to add the bitstream file " + bitstreamFileName + " into item " + newItemHandle + ".\n\n"; } } catch (Exception ex) { if (null == importResults.get("bitstream-unimported")) { importResults.put("bitstream-unimported", new ArrayList<String>()); } List bitstreamUnimportedList = (ArrayList) importResults .get("bitstream-unimported"); bitstreamUnimportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); logger.debug("Failed to add the bitstream file " + bitstreamFileName + " into item " + newItemHandle + ".\n" + ex.getMessage()); output += "Failed to add the bitstream file " + bitstreamFileName + " into item " + newItemHandle + ".\n" + ex.getMessage() + "\n\n"; } } } } else { logger.debug( "This saf package is missing either the contents file or the metadata files.\n"); output += "This saf package is missing either the contents file or the metadata files.\n\n"; throw new SafPackageMissingFileException("Saf package at " + safPath + " either the contents file or the metadata files are missing!"); } } } } else { throw new SafPackagePathErrorException("Saf package path is not a directory"); } // Due to various reasons, some metadata and/or bitstreams cannot be added and are given second chance here: List metadataUnimportedList = (ArrayList) importResults.get("metadata-unimported"); if (null != metadataUnimportedList && metadataUnimportedList.size() > 0) { for (Iterator<String> iterator = metadataUnimportedList.iterator(); iterator.hasNext();) { String[] values = ((String) iterator.next()).split("---"); logger.debug("Second try to add metadata into " + values[0] + " with data " + values[1]); output += "Second try to add metadata into " + values[0] + " with data " + values[1] + "\n\n"; try { String metadataInfo = addItemMetadata(values[0], values[1]); if (null != metadataInfo) { iterator.remove(); logger.debug("Second try: sucessfully added metadata into item " + values[0] + " with data " + values[1]); output += "Second try: sucessfully added metadata into item " + values[0] + " with data " + values[1] + "\n\n"; } else { logger.debug("Second try: failed to add metadata into item " + values[0] + " with data " + values[1]); output += "Second try: failed to add metadata into item " + values[0] + " with data " + values[1] + "\n\n"; } } catch (Exception ex) { logger.debug("Second try: failed to add metadata into item " + values[0] + " with data " + values[1] + "\n" + ex.getMessage()); output += "Second try: failed to add metadata into item " + values[0] + " with data " + values[1] + "\n" + ex.getMessage() + "\n\n"; } } } List bitstreamUnimportedList = (ArrayList) importResults.get("bitstream-unimported"); if (null != bitstreamUnimportedList && bitstreamUnimportedList.size() > 0) { // for (Object bitstreamUnimportedList1 : bitstreamUnimportedList) { for (Iterator<String> iterator = metadataUnimportedList.iterator(); iterator.hasNext();) { String[] values = ((String) iterator.next()).split("---"); String name = new File(values[1]).getName().replace(" ", "_"); logger.debug("Second try to add bitstream into " + values[0] + " with path " + values[1]); output += "Second try to add bitstream into " + values[0] + " with path " + values[1] + "\n\n"; try { Map bitstreamInfo = addItemBitstream(values[0], values[1], name, name); if (null != bitstreamInfo) { iterator.remove(); logger.debug("Second try: sucessfully added bitstream into item " + values[0] + " with path " + values[1]); output += "Second try: sucessfully added bitstream into item " + values[0] + " with path " + values[1] + "\n\n"; } else { logger.debug("Second try: failed to add bitstream into item " + values[0] + " with path " + values[1]); output += "Second try: failed to add bitstream into item " + values[0] + " with path " + values[1] + "\n\n"; } } catch (Exception ex) { logger.debug("Second try: failed to add bitstream into item " + values[0] + " with path " + values[1] + "\n" + ex.getMessage()); output += "Second try: failed to add bitstream into item " + values[0] + " with path " + values[1] + "\n" + ex.getMessage() + "\n\n"; } } } } catch (SafPackagePathErrorException | SafPackageMissingFileException ex) { output += "Saf package is not valid!\n" + ex.getMessage() + "\n\n"; logger.error("Cannot create new items with saf package path: " + safPath, ex); } catch (ErrorUnzipSafPackageException ex) { Logger.getLogger(DspaceApiHandlerImpl.class.getName()).log(Level.SEVERE, null, ex); } finally { DocumentProcessorUtil.outputStringToFile(mapping, DocumentProcessorUtil.getFileContainerPath(reportFilePath) + File.separator + "mapfile"); DocumentProcessorUtil.outputStringToFile(output, reportFilePath); } return importResults; }
From source file:org.apache.hadoop.hbase.coprocessor.transactional.SsccRegionEndpoint.java
/** * Abort the transaction./*ww w . j a va 2 s . c o m*/ * * @param transactionId * @throws IOException * @throws UnknownTransactionException */ public void abortTransaction(final long transactionId) throws IOException, UnknownTransactionException { long txid = 0; if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: abort transactionId: " + transactionId + " " + m_Region.getRegionInfo().getRegionNameAsString()); SsccTransactionState state; try { state = getTransactionState(transactionId); } catch (UnknownTransactionException e) { IOException ioe = new IOException("UnknownTransactionException"); if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: Unknown transaction [" + transactionId + "] in region [" + m_Region.getRegionInfo().getRegionNameAsString() + "], " + ioe.toString()); throw new IOException("UnknownTransactionException"); } if (state.getStatus() == Status.ABORTED) { LOG.error("Transaction " + transactionId + " is already aborted, probably due to write conflict"); return; } if (state.getStatus() == Status.COMMITED) //should be programming error in client. Like commmit() then abort() { LOG.error("Transaction " + transactionId + " is already committed, cannot perform abort anymore"); return; } if (state.hasWrite()) { //get put/del list //do delet to undo put, do nothiing to undo del List<byte[]> putUndoList = ((SsccTransactionState) state).getPutRows(); //List<Mutation> mutList = new ArrayList<Mutation>(); for (byte[] rowkey : putUndoList) { long localTransId = state.getStartId(); Delete d = new Delete(rowkey, localTransId); Get forColListGet = new Get(rowkey); forColListGet.setTimeStamp(localTransId); //get only those cells affected by the given transaction //perform a get first, parse the result and get all column affected by the put Result r = m_Region.get(forColListGet); List<Cell> listCells = r.listCells(); if (listCells != null) { for (Cell cell : listCells) { d.deleteColumn(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), localTransId); //this is the cell that needs to be delete } } m_Region.delete(d); //clear the status item Delete statusDelete = new Delete(rowkey, localTransId); statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.STATUS_COL); //statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY , SsccConst.COLUMNS_COL ); m_Region.delete(statusDelete); //mutList.add(d); } //clear status List<Delete> deleteList = state.getDelRows(); ListIterator<Delete> deletesIter = null; for (deletesIter = deleteList.listIterator(); deletesIter.hasNext();) { Delete di = deletesIter.next(); long localTransId = state.getStartId(); Delete d = new Delete(di.getRow(), localTransId); d.deleteColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.STATUS_COL); m_Region.delete(d); } /* not understand how to use batchMutate yet try { Mutation[] m = (Mutation[])mutList.toArray(); m_Region.batchMutate(m); } catch (Exception e) { //TODO throw new IOException(e.toString()); } */ } synchronized (commitPendingTransactions) { commitPendingTransactions.remove(state); } if (state.isReinstated()) { synchronized (indoubtTransactionsById) { if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: Trafodion Recovery: abort reinstated indoubt transactions " + transactionId); indoubtTransactionsById.remove(state.getTransactionId()); int tmid = (int) (transactionId >> 32); int count = 0; // indoubtTransactionsCountByTmid protected by // indoubtTransactionsById synchronization if (indoubtTransactionsCountByTmid.containsKey(tmid)) { count = (int) indoubtTransactionsCountByTmid.get(tmid) - 1; if (count > 0) indoubtTransactionsCountByTmid.put(tmid, count); } // if all reinstated txns are resolved from a TM, remove it and delete associated zNode if (count == 0) { indoubtTransactionsCountByTmid.remove(tmid); String lv_encoded = m_Region.getRegionInfo().getEncodedName(); try { if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: Trafodion Recovery: delete in abort recovery zNode TM " + tmid + " region encoded name " + lv_encoded + " for 0 in-doubt transaction"); deleteRecoveryzNode(tmid, lv_encoded); } catch (IOException e) { LOG.error( "SsccRegionEndpoint coprocessor: Trafodion Recovery: delete recovery zNode failed"); } } if ((indoubtTransactionsById == null) || (indoubtTransactionsById.size() == 0)) { // change region state to STARTED, and archive the split-thlog if (indoubtTransactionsById == null) if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: Trafodion Recovery: start region in abort with indoubtTransactionsById null"); else if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: Trafodion Recovery: start region in abort with indoubtTransactionsById size " + indoubtTransactionsById.size()); startRegionAfterRecovery(); } } } state.setStatus(Status.ABORTED); retireTransaction(state); }
From source file:com.idega.builder.business.BuilderLogic.java
private void filterForPermission(List<Integer> groupIds, PresentationObject obj, PresentationObjectContainer parentObject, int index, IWContext iwc) { if (!iwc.hasViewPermission(groupIds, obj)) { logger.severe(obj + ": removed"); parentObject.getChildren().remove(index); parentObject.getChildren().add(index, PresentationObject.NULL_CLONE_OBJECT); } else if (obj.isContainer()) { if (obj instanceof Table) { Table tab = (Table) obj;/* ww w . j a v a2 s .c om*/ int cols = tab.getColumns(); int rows = tab.getRows(); for (int x = 1; x <= cols; x++) { for (int y = 1; y <= rows; y++) { PresentationObjectContainer moc = tab.containerAt(x, y); if (moc != null) { List l = moc.getChildren(); if (l != null) { ListIterator iterT = l.listIterator(); while (iterT.hasNext()) { int index2 = iterT.nextIndex(); Object itemT = iterT.next(); if (itemT instanceof PresentationObject) { filterForPermission(groupIds, (PresentationObject) itemT, moc, index2, iwc); } } } } } } } else { List list = obj.getChildren(); if (list != null) { ListIterator iter = list.listIterator(); while (iter.hasNext()) { int index2 = iter.nextIndex(); PresentationObject item = (PresentationObject) iter.next(); filterForPermission(groupIds, item, (PresentationObjectContainer) obj, index2, iwc); } } } } }
From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java
/** * Wrapper for {@link #getAllParentReferences(SELECTOR)}, but will add * a reference to this project too, if needed. * /*ww w. j a v a 2s . c om*/ * @param sortKey the key specifying the order in which projects are returned. * @param addSelf if true, add a self-reference in the correct spot * @return a list of all parent references, including a self-reference if * addSelf is true. */ public List<AbstractProjectReference> getAllParentReferences(ProjectReference.PrioComparator.SELECTOR sortKey, boolean addSelf) { List<AbstractProjectReference> lst = this.getAllParentReferences(sortKey); if (addSelf) { boolean hasAddedSelf = false; ListIterator<AbstractProjectReference> iter = lst.listIterator(); while (iter.hasNext()) { AbstractProjectReference ref = iter.next(); int prio; if (ref instanceof ProjectReference) { prio = PrioComparator.getPriorityFor(ref, sortKey); } else { //An anonymous ref is always at priority 0 prio = 0; } if (!hasAddedSelf && prio > 0) { hasAddedSelf = true; iter.add(new SimpleProjectReference(this.getFullName())); } } //Check if we were able to add a self-reference at all if (!hasAddedSelf) { lst.add(new SimpleProjectReference(this.getFullName())); } } return lst; }
From source file:com.idega.builder.business.BuilderLogic.java
public PresentationObject getTransformedObject(Page currentPage, String pageKey, UIComponent obj, int index, PresentationObjectContainer parent, String parentKey, IWContext iwc) { IWResourceBundle iwrb = getBuilderBundle().getResourceBundle(iwc); XMLElement pasted = (XMLElement) iwc.getSessionAttribute(CLIPBOARD); boolean clipboardEmpty = (pasted == null); //We can either be working with pure UIComponents or PresentationObjects boolean isPresentationObject = obj instanceof PresentationObject; //Some very special cases, added the boolean to make it faster /*if (isPresentationObject && obj instanceof Image) { obj = transformImage(pageKey, obj, iwc); }/*w ww . j a v a 2s. co m*/ else*/ if (isPresentationObject && ((PresentationObject) obj).isContainer()) { if (obj instanceof Table) { getTransformedTable(currentPage, pageKey, obj, iwc, clipboardEmpty); } else { List list = obj.getChildren(); if (list != null && !list.isEmpty()) { ListIterator iter = list.listIterator(); while (iter.hasNext()) { int index2 = iter.nextIndex(); UIComponent item = (UIComponent) iter.next(); /** * If parent is Table */ if (index == -1) { getTransformedObject(currentPage, pageKey, item, index2, (PresentationObjectContainer) obj, parentKey, iwc); } else { String newParentKey = null; //Ugly Hack of handling the regions inside HTML template based pages. This needs to change. //TODO: Remove this instanceof case, to make that possible then the getICObjectInstanceID // method needs to be changed to return String if (obj instanceof HtmlPageRegion) { HtmlPageRegion region = (HtmlPageRegion) obj; //newParentKey is normally an ICObjectInstanceId or -1 to mark the top page //but here we make a workaround. newParentKey = region.getRegionId(); } else { newParentKey = getInstanceId(obj); } getTransformedObject(currentPage, pageKey, item, index2, (PresentationObjectContainer) obj, newParentKey, iwc); } } } if (index != -1) { Page curr = getPageCacher().getComponentBasedPage(getCurrentIBPage(iwc)).getNewPage(iwc); PresentationObjectContainer container = ((PresentationObjectContainer) obj); String instanceId = getInstanceId(obj); if (instanceId == null) { instanceId = obj.getId(); } String regionLabel = container.getLabel(); String addModuleUri = getUriToAddModuleWindow(regionLabel); if (curr.getIsExtendingTemplate()) { if (container.getBelongsToParent()) { if (!container.isLocked()) { Layer marker = getLabelMarker(instanceId, regionLabel); addButtonsLayer(marker, addModuleUri, regionLabel, iwrb, marker.getId()); container.add(marker); } } else { Layer marker = getLabelMarker(instanceId, regionLabel); Layer buttons = addButtonsLayer(marker, addModuleUri, regionLabel, iwrb, marker.getId()); container.add(marker); if (curr.getIsTemplate()) { buttons.add(getLabelIcon(instanceId, iwc, regionLabel)); if (container.isLocked()) { buttons.add(getLockedIcon(instanceId, iwc, regionLabel)); } else { buttons.add(getUnlockedIcon(instanceId, iwc)); } } } } else { Layer marker = getLabelMarker(instanceId, regionLabel); Layer buttons = addButtonsLayer(marker, addModuleUri, regionLabel, iwrb, marker.getId()); container.add(marker); if (curr.getIsTemplate()) { marker.add(getLabelIcon(instanceId, iwc, regionLabel)); if (container.isLocked()) { buttons.add(getLockedIcon(instanceId, iwc, regionLabel)); } else { buttons.add(getUnlockedIcon(instanceId, iwc)); } } } } } } PresentationObject transformed = null; if ((isPresentationObject && ((PresentationObject) obj).getUseBuilderObjectControl()) || !isPresentationObject) { if (index != -1) { boolean lastModuleInRegion = false; if (index >= parent.getChildCount()) { lastModuleInRegion = true; } else if (index == (parent.getChildCount() - 1)) { lastModuleInRegion = true; } boolean objectFromCurrentPage = true; try { IBXMLPage page = getIBXMLPage(pageKey); objectFromCurrentPage = getIBXMLWriter().findModule(page, getInstanceId(obj)) != null; } catch (Exception e) { e.printStackTrace(); } transformed = new IBObjectControl(obj, parent, parentKey, iwc, index, lastModuleInRegion, objectFromCurrentPage); if (index < parent.getChildCount()) { parent.set(index, transformed); } else { parent.add(transformed); index++; } } return transformed; } if (isPresentationObject) { return (PresentationObject) obj; } return null; }
From source file:org.apache.nifi.cluster.manager.impl.WebClusterManager.java
@Override public ProcessGroupStatus getProcessGroupStatus(final String groupId) { final Set<Node> connectedNodes = getNodes(Node.Status.CONNECTED); // ensure there are some nodes in the cluster if (connectedNodes.isEmpty()) { throw new NoConnectedNodesException(); }/*from ww w .ja v a 2s.c o m*/ ProcessGroupStatus mergedProcessGroupStatus = null; for (final Node node : connectedNodes) { final NodeIdentifier nodeId = node.getNodeId(); final HeartbeatPayload nodeHeartbeatPayload = node.getHeartbeatPayload(); if (nodeHeartbeatPayload == null) { continue; } final ProcessGroupStatus nodeRootProcessGroupStatus = nodeHeartbeatPayload.getProcessGroupStatus(); final ProcessGroupStatus nodeProcessGroupStatus = groupId.equals(ROOT_GROUP_ID_ALIAS) ? nodeRootProcessGroupStatus : getProcessGroupStatus(nodeRootProcessGroupStatus, groupId); if (nodeProcessGroupStatus == null) { continue; } if (mergedProcessGroupStatus == null) { mergedProcessGroupStatus = nodeProcessGroupStatus.clone(); // update any issues with the node label if (mergedProcessGroupStatus.getRemoteProcessGroupStatus() != null) { for (final RemoteProcessGroupStatus remoteProcessGroupStatus : mergedProcessGroupStatus .getRemoteProcessGroupStatus()) { final List<String> nodeAuthorizationIssues = remoteProcessGroupStatus .getAuthorizationIssues(); if (!nodeAuthorizationIssues.isEmpty()) { for (final ListIterator<String> iter = nodeAuthorizationIssues.listIterator(); iter .hasNext();) { final String Issue = iter.next(); iter.set( "[" + nodeId.getApiAddress() + ":" + nodeId.getApiPort() + "] -- " + Issue); } remoteProcessGroupStatus.setAuthorizationIssues(nodeAuthorizationIssues); } } } } else { final ProcessGroupStatus nodeClone = nodeProcessGroupStatus.clone(); for (final RemoteProcessGroupStatus remoteProcessGroupStatus : nodeClone .getRemoteProcessGroupStatus()) { final List<String> nodeAuthorizationIssues = remoteProcessGroupStatus.getAuthorizationIssues(); if (!nodeAuthorizationIssues.isEmpty()) { for (final ListIterator<String> iter = nodeAuthorizationIssues.listIterator(); iter .hasNext();) { final String Issue = iter.next(); iter.set("[" + nodeId.getApiAddress() + ":" + nodeId.getApiPort() + "] -- " + Issue); } remoteProcessGroupStatus.setAuthorizationIssues(nodeAuthorizationIssues); } } ProcessGroupStatus.merge(mergedProcessGroupStatus, nodeClone); } } return mergedProcessGroupStatus; }
From source file:oscar.form.study.hsfo2.pageUtil.ManageHSFOAction.java
public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { logger.info("ContextPath: " + request.getContextPath()); logger.info("pathInfo: " + request.getPathInfo()); Map<String, String[]> params = request.getParameterMap(); Hsfo2Visit latestHsfo2Visit = new Hsfo2Visit(); PatientList historyList = new PatientList(); // RecordList record = new RecordList(); List recordList = new LinkedList(); String patientId = (String) request.getAttribute("demographic_no"); if (patientId == null) { patientId = request.getParameter("demographic_no"); }//from w w w. j av a2 s . co m String isfirstrecord = ""; boolean isFirstRecord = false; String user = (String) request.getSession().getAttribute("user"); HSFODAO hsfoDAO = new HSFODAO(); isFirstRecord = hsfoDAO.isFirstRecord(patientId); DemographicData demoData = new DemographicData(); //DemographicData.Demographic de = demoData.getDemographic( patientId ); Demographic de = demoData.getDemographic(patientId); boolean isDisplayGraphs = "displayGraphs".equalsIgnoreCase(request.getParameter("operation")); boolean isFromRegistrationForm = false; if ("true".equalsIgnoreCase(request.getParameter("isFromRegistrationForm"))) { //true means the request is from registration form, should goto followup form isFromRegistrationForm = true; } FORM forwardToForm = null; int patientHistorySize = 0; boolean isFirstVisitRecordForThePatient = false; // boolean isFromRegistrationForm = false; Integer formId = getFormIdFromRequest(request); Hsfo2Visit formHsfo2Visit = null; if (formId != null) formHsfo2Visit = hsfoDAO.retrieveSelectedRecord(formId); boolean isHistoryForm = !isFromRegistrationForm && (formId != null && formHsfo2Visit != null); if (formId != null) isFirstVisitRecordForThePatient = hsfoDAO.isFirstVisitRecordForThePatient(patientId, formId); boolean isRegistForm = !isDisplayGraphs && !isFromRegistrationForm && (isFirstRecord || isFirstVisitRecordForThePatient); //prepare data Hsfo2Patient hsfo2Patient = hsfoDAO.retrievePatientRecord(patientId); if (hsfo2Patient == null) hsfo2Patient = new Hsfo2Patient(); List patientHistory = hsfoDAO.retrieveVisitRecord(patientId); //save only or submit, it's for registration form and stay in that form boolean isSaveOnly = "Save".equalsIgnoreCase(request.getParameter("Save")); if (!isSaveOnly && !isFirstRecord) { isSaveOnly = !hsfo2Patient.isSubmitted(); } if (isSaveOnly) { //stay in regist form and treat as history isRegistForm = true; isHistoryForm = true; if (patientHistory.size() > 0) formHsfo2Visit = (Hsfo2Visit) patientHistory.get(patientHistory.size() - 1); } if (isHistoryForm) { latestHsfo2Visit = formHsfo2Visit; } else // create new form { patientHistorySize = patientHistory.size(); if (patientHistorySize >= 1) { latestHsfo2Visit = (Hsfo2Visit) patientHistory.get(patientHistorySize - 1); latestHsfo2Visit.setVisitDateIdToday(); latestHsfo2Visit.setId(hsfoDAO.getMaxVisitId() + 1); cleanNonePrefilledData(latestHsfo2Visit); getLabWork(latestHsfo2Visit, hsfo2Patient, ConvertUtil.toInt(patientId)); //If it's followup form, BP should not be prepopulated. Clean again. latestHsfo2Visit.setSBP(0); latestHsfo2Visit.setDBP(0); } else { latestHsfo2Visit = new Hsfo2Visit(); latestHsfo2Visit.setVisitDateIdToday(); getLabWork(latestHsfo2Visit, hsfo2Patient, ConvertUtil.toInt(patientId)); } } if (isRegistForm) { // registration, get data from DemographicData; isfirstrecord = "true"; hsfo2Patient.setPatient_Id(patientId); if (!isHistoryForm) { hsfo2Patient.setFName(de.getFirstName()); hsfo2Patient.setLName(de.getLastName()); hsfo2Patient.setBirthDate(oscar.util.DateUtils.toDate(de.getFormattedDob())); hsfo2Patient.setSex(de.getSex()); hsfo2Patient.setPostalCode(de.getPostal()); hsfo2Patient.setRegistrationId(HsfoUtil.getRegistrationId()); latestHsfo2Visit.setVisitDateIdToday(); } request.setAttribute("EmrHCPId1", user); request.setAttribute("EmrHCPId2", de.getProviderNo()); // TODO: may need to convert to provider name forwardToForm = FORM.registration; } else { //populate graphy data for followup form. the latestHsfo2Visit already keep the information of last visit. isfirstrecord = "false"; if (!isDisplayGraphs) forwardToForm = FORM.flowsheet; else { // If patientHistory is greater than 1 than fill the graphing arrays TimeSeries sbpSeries = new TimeSeries("Systolic Blood Pressure", Day.class); TimeSeries dbpSeries = new TimeSeries("Diastolic Blood Pressure", Day.class); TimeSeries bmiSeries = new TimeSeries("BMI", Day.class); TimeSeries waistSeries = new TimeSeries("Waist", Day.class); TimeSeries ldlSeries = new TimeSeries("LDL", Day.class); TimeSeries tcHdlSeries = new TimeSeries("TC/HDL", Day.class); TimeSeries importanceSeries = new TimeSeries("Importance", Day.class); TimeSeries confidenceSeries = new TimeSeries("Confidence", Day.class); Map<GraphDesc, TimeSeries> graphDescSeriesMap = new HashMap<GraphDesc, TimeSeries>(); graphDescSeriesMap.put(new GraphDesc("Systolic Blood Pressure", "Dates", "SBP(mmHg)"), sbpSeries); graphDescSeriesMap.put(new GraphDesc("Diastolic Blood Pressure", "Dates", "DBP(mmHg)"), dbpSeries); graphDescSeriesMap.put(new GraphDesc("BMI", "Dates", "BMI(kg/m2)"), bmiSeries); graphDescSeriesMap.put(new GraphDesc("Waist", "Dates", "Waist(cm)"), waistSeries); graphDescSeriesMap.put(new GraphDesc("LDL", "Dates", "LDL(mmol/L)"), ldlSeries); { GraphDesc tcHdlDesc = new GraphDesc("TC/HDL", "Dates", "TC/HDL(ratio)"); tcHdlDesc.setFileName("TC_HDL"); graphDescSeriesMap.put(tcHdlDesc, tcHdlSeries); } graphDescSeriesMap.put(new GraphDesc("Importance", "Dates", "Importance(1-10)"), importanceSeries); graphDescSeriesMap.put(new GraphDesc("Confidence", "Dates", "Confidence(1-10)"), confidenceSeries); if (patientHistorySize >= 1) { ListIterator patientHistoryIt = patientHistory.listIterator(); // int a = 0, b = 0, c = 0, d = 0, e = 0, f = 0, g = 0, h = 0; while (patientHistoryIt.hasNext()) { Hsfo2Visit Hsfo2Visit = (Hsfo2Visit) patientHistoryIt.next(); // visitDateArray.add( setDateFull( Hsfo2Visit.getVisitDate_Id() ) ); // visitIdArray.add( "" + Hsfo2Visit.getID() ); // //////// final Date visitDate = Hsfo2Visit.getVisitDate_Id(); if (visitDate != null) { final Day visitDay = new Day(visitDate); if (Hsfo2Visit.getSBP() != 0) { sbpSeries.addOrUpdate(visitDay, Hsfo2Visit.getSBP()); } if (Hsfo2Visit.getDBP() != 0) { dbpSeries.addOrUpdate(visitDay, Hsfo2Visit.getDBP()); } if (Hsfo2Visit.getWeight() != 0) { Double bmi = getBmi(Hsfo2Visit, hsfo2Patient); if (bmi > 0) bmiSeries.addOrUpdate(visitDay, bmi); } // modified by victor for waist_unit null bug,2007 // if (Hsfo2Visit.getWaist() != 0{ if (Hsfo2Visit.getWaist() != 0 && Hsfo2Visit.getWaist_unit() != null) { double waistv = Hsfo2Visit.getWaist(); String waistunit = Hsfo2Visit.getWaist_unit(); double waist = 0.0; if (waistunit.equals("cm")) { waist = waistv; } else { // 1 inch = 2.54 cm waist = waistv * 2.54; } waistSeries.addOrUpdate(visitDay, waist); } if (Hsfo2Visit.getChange_importance() != 0) { importanceSeries.addOrUpdate(visitDay, Hsfo2Visit.getChange_importance()); } if (Hsfo2Visit.getChange_confidence() != 0) { confidenceSeries.addOrUpdate(visitDay, Hsfo2Visit.getChange_confidence()); } } final Date labResultDate = Hsfo2Visit.getTC_HDL_LabresultsDate(); if (labResultDate != null) { final Day labResultDay = new Day(labResultDate); if (Hsfo2Visit.getTC_HDL() != 0) { tcHdlSeries.addOrUpdate(labResultDay, Hsfo2Visit.getTC_HDL()); } if (Hsfo2Visit.getLDL() != 0) { ldlSeries.addOrUpdate(labResultDay, Hsfo2Visit.getLDL()); } } } } //generate the graph and export as picture. generateGraphs(request, response, graphDescSeriesMap); forwardToForm = FORM.graphs; ; } } historyList.setPatientHistory(patientHistory); // set request attributes to forward to jsp request.setAttribute("siteId", OscarProperties.getInstance().getProperty("hsfo2.loginSiteCode", "xxx")); request.setAttribute("Hsfo2Patient", hsfo2Patient); request.setAttribute("historyList", historyList); request.setAttribute("Hsfo2Visit", latestHsfo2Visit); //getDay() is date of week request.setAttribute("isFirstRecord", isfirstrecord); return mapping.findForward(forwardToForm.name()); }
From source file:org.apache.hadoop.hbase.coprocessor.transactional.SsccRegionEndpoint.java
/** * Commits the transaction//from w w w . ja v a 2 s . c o m * @param SsccTransactionState state * @throws IOException */ private void commit(final SsccTransactionState state) throws IOException { long txid = 0; //if state is in ABORTED status, return do nothing if (state.getStatus() == Status.ABORTED || state.getStatus() == Status.COMMITED) return; if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: Commiting transaction: " + state.toString() + " to " + m_Region.getRegionInfo().getRegionNameAsString()); long inTransactionId = state.getTransactionId(); long startId = state.getStartId(); if (state.isReinstated()) { if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: commit Trafodion Recovery: commit reinstated indoubt transactions " + inTransactionId + " in region " + m_Region.getRegionInfo().getRegionNameAsString()); if (false) //Somthing wrong { state.setStatus(Status.ABORTED); retireTransaction(state); } } // reinstated transactions else { //get a commid ID // This commitId must be comparable with the startId if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: commit : try to update the status and version "); // long commitId = nextSsccSequenceId.getAndIncrement(); long commitId = state.getCommitId(); //update the putlist List<byte[]> putToDoList = state.getPutRows(); List<Delete> delToDoList = state.getDelRows(); //List<Mutation> mutList= new ArrayList<Mutation>(); try { for (byte[] rowkey : putToDoList) { // delete the status item from status column for this transactin Delete statusDelete = new Delete(rowkey, startId); statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.STATUS_COL); //statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY , SsccConst.COLUMNS_COL ); m_Region.delete(statusDelete); //mutList.add(statusDelete); // insert a new item into version column // Put verPut = new Put(rowkey, commitId.val); Put verPut = new Put(rowkey, commitId); verPut.add(DtmConst.TRANSACTION_META_FAMILY, SsccConst.VERSION_COL, SsccConst.generateVersionValue(startId, false)); m_Region.put(verPut); //mutList.add(verPut); } ListIterator<Delete> deletesIter = null; for (deletesIter = delToDoList.listIterator(); deletesIter.hasNext();) { Delete d = deletesIter.next(); //mutList.add(d); // insert a new item into version column // delete the status item from status column for this transactin byte[] dKey = d.getRow(); Delete statusDelete = new Delete(dKey, startId); statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.STATUS_COL); //statusDelete.deleteColumn(DtmConst.TRANSACTION_META_FAMILY , SsccConst.COLUMNS_COL ); m_Region.delete(statusDelete); // Put verPut = new Put(dKey, commitId.val); Put verPut = new Put(dKey, commitId); verPut.add(DtmConst.TRANSACTION_META_FAMILY, SsccConst.VERSION_COL, SsccConst.generateVersionValue(startId, true)); m_Region.put(verPut); // m_Region.delete(d); } //DO a batch mutation //Mutation[] m = (Mutation[])mutList.toArray(); //m_Region.batchMutate(m); //set the commitId of the transaction // state.setCommitId(commitId.val); // state.setCommitId(commitId); } catch (Exception e) //something wrong { LOG.error("SsccRegionEndpoint Commit get exception " + e.toString()); state.setStatus(Status.ABORTED); retireTransaction(state); throw new IOException(e.toString()); } } // normal transactions // Now the transactional writes live in the core WAL, we can write a commit to the log // so we don't have to recover it from the transactional WAL. if (state.hasWrite() || state.isReinstated()) { // comment out for now if (LOG.isTraceEnabled()) LOG.trace("write commit edit to HLOG"); if (LOG.isTraceEnabled()) LOG.trace("BBB write commit edit to HLOG after appendNoSync"); if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor:commit -- EXIT txId: " + inTransactionId + " HLog seq " + txid); if (!editGenerated) editGenerated = true; } state.setStatus(Status.COMMITED); /* if (state.hasWrite() || state.isReinstated()) { synchronized (commitPendingTransactions) { if (!commitPendingTransactions.remove(state)) { LOG.fatal("SsccRegionEndpoint coprocessor: commit Commiting a non-query transaction that is not in commitPendingTransactions"); // synchronized statements are cleared for a throw throw new IOException("commit failure"); } } } */ if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: commit(tstate) -- EXIT SsccTransactionState: " + state.toString()); if (state.isReinstated()) { synchronized (indoubtTransactionsById) { indoubtTransactionsById.remove(state.getTransactionId()); int tmid = (int) (inTransactionId >> 32); int count = 0; if (indoubtTransactionsCountByTmid.containsKey(tmid)) { count = (int) indoubtTransactionsCountByTmid.get(tmid) - 1; if (count > 0) indoubtTransactionsCountByTmid.put(tmid, count); } if (count == 0) { indoubtTransactionsCountByTmid.remove(tmid); String lv_encoded = m_Region.getRegionInfo().getEncodedName(); try { if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: commit Trafodion Recovery: delete in commit recovery zNode TM " + tmid + " region encoded name " + lv_encoded + " for 0 in-doubt transaction"); deleteRecoveryzNode(tmid, lv_encoded); } catch (IOException e) { LOG.error("Trafodion Recovery: delete recovery zNode failed"); } } if ((indoubtTransactionsById == null) || (indoubtTransactionsById.size() == 0)) { if (indoubtTransactionsById == null) if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: commit Trafodion Recovery: start region in commit with indoubtTransactionsById null"); else if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: commit Trafodion Recovery: start region in commit with indoubtTransactionsById size " + indoubtTransactionsById.size()); startRegionAfterRecovery(); } } } retireTransaction(state); }
From source file:org.shareok.data.dspacemanager.DspaceApiHandlerImpl.java
@Override public Map<String, List<String>> loadItemsFromSafPackage(String safPath, String collectionHandle, String dspaceApiUrl) {// w ww . j a v a2 s .co m Map<String, List<String>> importResults = new HashMap<>(); this.dspaceApiUrl = dspaceApiUrl; BufferedWriter loggingForUserWriter = null; BufferedWriter mapWriter = null; String message; if (DocumentProcessorUtil.isEmptyString(token)) { getTokenFromServer(); if (DocumentProcessorUtil.isEmptyString(token)) { try { throw new NoExistingApiTokenException("Cannot get useful token!"); } catch (NoExistingApiTokenException ex) { logger.error(ex.getMessage()); return null; } } } try { try { loggingForUserWriter = DataHandlersUtil.getWriterLoggingForUserFile( DataHandlersUtil.CURRENT_TASK_ID, DataHandlersUtil.CURRENT_TASK_TYPE); } catch (IOException ex) { logger.error("Cannot get the output file writer", ex); return null; } loggingForUserWriter.write( "\n\n==================================================\n\nStart to import the items in the SAF package.\n\n\n"); try { File mapFile = new File( DocumentProcessorUtil.getFileContainerPath(reportFilePath) + File.separator + "mapfile"); if (!mapFile.exists()) { mapFile.createNewFile(); } mapWriter = new BufferedWriter(new FileWriter(mapFile)); } catch (IOException ex) { logger.error("Cannot get the output file writer", ex); loggingForUserWriter.write( "Cannot generate the map file, task dismissed. Please contact the relative personnel.\n"); return null; } File safFile = new File(safPath); if (safPath.endsWith(".zip")) { String newPath = FileZipper.unzipToDirectory(safPath); if (null == newPath) { loggingForUserWriter .write("The SAF package has problem, please contact the relative personnel.\n"); throw new ErrorUnzipSafPackageException("Cannot unzip the saf package at " + safPath); } // Change the path to be the unzipped folder // safPath = DocumentProcessorUtil.getFileNameWithoutExtension(safPath); safFile = new File(newPath); } if (safFile.isDirectory()) { mainLoop: for (File file : safFile.listFiles()) { if (null != file && file.isDirectory()) { File[] fileList = file.listFiles(); boolean containsContentsFile = false; boolean containsMetadataFile = false; List<File> metadataFileList = new ArrayList<>(); File contentFile = null; String doi = ""; for (File itemFile : fileList) { if (null == itemFile) { continue; } String fileName = itemFile.getName(); if ("contents".equals(fileName)) { containsContentsFile = true; contentFile = itemFile; } else if (METADATA_FILE_NAMES_LIST.contains(fileName.replaceAll(".xml", ""))) { // Check if there are some duplicates based on doi if (fileName.contains("dublin")) { doi = findDoiFromDublin(itemFile.getAbsolutePath()); if (null != doi && checkDuplicatesByDoi(doi, collectionHandle, dspaceApiUrl)) { message = "Duplication detected: collection " + collectionHandle + " has already had an item with doi=" + doi; logger.debug(message); System.out.println(message + ".\n\n\n"); loggingForUserWriter.write(message + ".\n\n\n"); loggingForUserWriter.write("Item with doi:" + doi + " has been processed.\n\n==================================================\n\n"); continue mainLoop; } } containsMetadataFile = true; metadataFileList.add(itemFile); } } if (containsContentsFile == true && containsMetadataFile == true) { String newItemId; String newItemHandle; // Create the new item now: try { String collectionId = getObjectIdByHandler(collectionHandle, dspaceApiUrl); if (DocumentProcessorUtil.isEmptyString(collectionId)) { throw new Exception( "Cannot get collection ID from handler " + collectionHandle + "!"); } Map newItemInfo = createEmptyItem( getObjectIdByHandler(collectionHandle, dspaceApiUrl)); newItemId = String.valueOf(newItemInfo.get("id")); newItemHandle = (String) newItemInfo.get("handle"); message = "A new item with handle = " + newItemHandle + " has been added to collection " + collectionHandle + ".\n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); mapWriter.write(file.getName() + " " + newItemHandle + "\n"); } catch (Exception ex) { message = "Cannot create the new item with collection handler " + collectionHandle + ".\nItem with doi=" + doi + " is not added."; logger.error(message, ex); System.out.println(message + "\n\n\n"); loggingForUserWriter.write(message + "\n\n\n"); continue; } // Add the metadata to the new item: String[] paths = new String[metadataFileList.size()]; ListIterator metadataIt = metadataFileList.listIterator(); while (metadataIt.hasNext()) { File metadataFile = (File) metadataIt.next(); paths[metadataIt.nextIndex() - 1] = metadataFile.getAbsolutePath(); } Map<String, String> metadataStrings = getMetadataFromXmlFiles(paths); for (String path : metadataStrings.keySet()) { String metadata = metadataStrings.get(path); message = "Adding metadata file " + metadata + " now with file name : " + file.getName(); logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write("Start to add metadata for item doi:" + doi + "\n"); try { String metadataInfo = addItemMetadata(newItemId, metadata); if (null == metadataInfo || metadataInfo.equals("")) { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataUnimportedList = (ArrayList) importResults .get("metadata-imported"); metadataUnimportedList.add(newItemId + "---" + path); message = "Failed to add the metadata into item " + newItemHandle + ".\n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); } else { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataImportedList = (ArrayList) importResults .get("metadata-imported"); metadataImportedList.add(newItemId + "---" + path); message = "A new set of metadata entries have been added to the item " + newItemHandle + ". \n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); } } catch (Exception ex) { if (null == importResults.get("metadata-imported")) { importResults.put("metadata-imported", new ArrayList<String>()); } List metadataUnimportedList = (ArrayList) importResults .get("metadata-imported"); metadataUnimportedList.add(newItemId + "---" + path); message = "Failed to add metadata into item " + newItemHandle + "\n" + ex.getMessage(); logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); } } List bitstreamFileList = DocumentProcessorUtil .readTextFileIntoList(contentFile.getAbsolutePath()); ListIterator it = bitstreamFileList.listIterator(); while (it.hasNext()) { String bitstreamFileName = (String) it.next(); File bitstreamFile = new File( file.getAbsoluteFile() + File.separator + bitstreamFileName); if (!bitstreamFile.exists()) { message = "The bitstream file " + bitstreamFileName + " does not exist in the saf package " + safPath + "!\n"; logger.debug(message); System.out.println(message + "!\n\n"); loggingForUserWriter.write(message + "!\n\n"); } else { String newName = bitstreamFile.getName().replace(" ", "_"); try { Map bitstreamInfo = addItemBitstream(newItemId, bitstreamFile.getAbsolutePath(), newName, newName); if (null != bitstreamInfo) { if (null == importResults.get("bitstream-imported")) { importResults.put("bitstream-imported", new ArrayList<String>()); } List bitstreamImportedList = (ArrayList) importResults .get("bitstream-imported"); bitstreamImportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); message = "A new bitstream file " + bitstreamFileName + " has been added to the item " + newItemHandle + ".\n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); } else { if (null == importResults.get("bitstream-unimported")) { importResults.put("bitstream-unimported", new ArrayList<String>()); } List bitstreamUnimportedList = (ArrayList) importResults .get("bitstream-unimported"); bitstreamUnimportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); message = "Cannot add the bitstream!"; throw new Exception(message); } } catch (Exception ex) { if (null == importResults.get("bitstream-unimported")) { importResults.put("bitstream-unimported", new ArrayList<String>()); } List bitstreamUnimportedList = (ArrayList) importResults .get("bitstream-unimported"); bitstreamUnimportedList .add(newItemId + "---" + bitstreamFile.getAbsoluteFile()); message = "Failed to add the bitstream file " + bitstreamFileName + " into item " + newItemHandle + ".\n"; logger.debug(message, ex); ex.printStackTrace(); loggingForUserWriter.write(message); } } } } else { message = "This saf package is missing either the contents file or the metadata files.\n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); //throw new SafPackageMissingFileException("Saf package at " + safPath + " either the contents file or the metadata files are missing!"); } loggingForUserWriter.write("Item with doi:" + doi + " has been processed.\n\n==================================================\n\n"); } } } else { message = "This SAF package is not a directory.\n\n\n"; logger.debug(message); System.out.println(message); loggingForUserWriter.write(message); throw new SafPackagePathErrorException(message); } // Due to various reasons, some metadata and/or bitstreams cannot be added and are given second chance here: List metadataUnimportedList = (ArrayList) importResults.get("metadata-unimported"); if (null != metadataUnimportedList && metadataUnimportedList.size() > 0) { for (Iterator<String> iterator = metadataUnimportedList.iterator(); iterator.hasNext();) { String[] values = ((String) iterator.next()).split("---"); message = "Second try to add metadata into " + values[0] + " with data " + values[1]; logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); try { String metadataInfo = addItemMetadata(values[0], values[1]); if (null != metadataInfo) { iterator.remove(); message = "Second try: sucessfully added metadata into item " + values[0] + " with data " + values[1]; logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); } else { throw new Exception(); } } catch (Exception ex) { message = "Second try: failed to add metadata into item " + values[0] + " with data " + values[1] + "\n" + ex.getMessage(); logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); } } } List bitstreamUnimportedList = (ArrayList) importResults.get("bitstream-unimported"); if (null != bitstreamUnimportedList && bitstreamUnimportedList.size() > 0) { // for (Object bitstreamUnimportedList1 : bitstreamUnimportedList) { for (Iterator<String> iterator = bitstreamUnimportedList.iterator(); iterator.hasNext();) { String[] values = ((String) iterator.next()).split("---"); String name = new File(values[1]).getName().replace(" ", "_"); message = "Second try to add bitstream into " + values[0] + " with path " + values[1]; logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); try { Map bitstreamInfo = addItemBitstream(values[0], values[1], name, name); if (null != bitstreamInfo) { iterator.remove(); message = "Second try: sucessfully added bitstream into item " + values[0] + " with path " + values[1]; logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); } else { message = "Second try: failed to add bitstream into item " + values[0] + " with path " + values[1]; logger.debug(message); System.out.println(message + "\n\n"); loggingForUserWriter.write(message + "\n\n"); throw new Exception(message); } } catch (Exception ex) { logger.error(ex); } } } } catch (SafPackagePathErrorException ex) { message = "Saf package is not valid!\n" + ex.getMessage(); System.out.println(message + "\n\n"); logger.error("Cannot create new items with saf package path: " + safPath, ex); } catch (NullPointerException | ErrorUnzipSafPackageException | IOException ex) { logger.error(ex); } finally { if (null != loggingForUserWriter) { try { loggingForUserWriter.flush(); } catch (IOException ex) { logger.error("Cannot close the output file writer!"); } } if (null != mapWriter) { try { mapWriter.flush(); mapWriter.close(); } catch (IOException ex) { logger.error("Cannot close the map file writer!"); } } } return importResults; }
From source file:org.apache.hadoop.hbase.coprocessor.transactional.SsccRegionEndpoint.java
@Override public void performScan(RpcController controller, SsccPerformScanRequest request, RpcCallback<SsccPerformScanResponse> done) { boolean hasMore = true; RegionScanner scanner = null;/*from ww w .j a v a2s. c om*/ Throwable t = null; ScannerTimeoutException ste = null; OutOfOrderScannerNextException ooo = null; WrongRegionException wre = null; Exception ne = null; Scan scan = null; List<Cell> cellResults = new ArrayList<Cell>(); List<Result> results = new ArrayList<Result>(); List<Cell> validResults = new ArrayList<Cell>(); org.apache.hadoop.hbase.client.Result result = null; long transId = request.getTransactionId(); long startId = request.getStartId(); long scannerId = request.getScannerId(); int numberOfRows = request.getNumberOfRows(); boolean closeScanner = request.getCloseScanner(); long nextCallSeq = request.getNextCallSeq(); long count = 0L; boolean shouldContinue = true; if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - scannerId " + scannerId + ", numberOfRows " + numberOfRows + ", nextCallSeq " + nextCallSeq); //This may be wrong, check later Map<String, Cell> tempBuf = new TreeMap<String, Cell>(); try { scanner = getScanner(scannerId, nextCallSeq); if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - scanner is: " + scanner == null ? "NULL" : "NOT NULL"); SsccTransactionState state = this.beginTransIfNotExist(transId, startId); Set<byte[]> visitCols = new HashSet<byte[]>(); if (scanner != null) { if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + scannerId + ", scanner is not null"); boolean firstCell = true; while (shouldContinue) { hasMore = scanner.next(cellResults); if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan hasMore is: " + hasMore); firstCell = true; Result verResult = null; Result statusResult = null; Result colResult = null; tempBuf.clear(); ListIterator<Cell> cellIter = null; for (cellIter = cellResults.listIterator(); cellIter.hasNext();) { Cell c = cellIter.next(); if (firstCell == true) { if (CellUtil.cloneFamily(c) != DtmConst.TRANSACTION_META_FAMILY) { //get the statusList Get statusGet = new Get(c.getRow()); //TODO: deprecated API //statusGet.setTimeStamp(startId); statusGet.addColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.STATUS_COL); statusGet.setMaxVersions(DtmConst.MAX_VERSION); statusResult = m_Region.get(statusGet); //get the colList Get colGet = new Get(c.getRow()); //TODO: deprecated API //colGet.setTimeStamp(startId); colGet.addColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.COLUMNS_COL); colGet.setMaxVersions(DtmConst.MAX_VERSION); colResult = m_Region.get(colGet); //get the versionList Get verGet = new Get(c.getRow());//TODO: deprecated API //verGet.setTimeStamp(startId); verGet.addColumn(DtmConst.TRANSACTION_META_FAMILY, SsccConst.VERSION_COL); verGet.setMaxVersions(DtmConst.MAX_VERSION); verResult = m_Region.get(verGet); firstCell = false; } } //long kvStartId = getStartIdFromTs(thisTs); if (firstCell == false) { //long thisTs = c.getTimestamp(); if (state.handleResult(c, statusResult.listCells(), verResult.listCells(), colResult.listCells(), transId) == true) { byte[] keyt = CellUtil.cloneQualifier(c); String keys = new String(keyt); if (tempBuf.containsKey(keys) == false) //only get the first one, suppose first is latest tempBuf.put(keys, c); } } } for (String j : tempBuf.keySet()) { Cell kv = tempBuf.get(j); validResults.add(kv); } result = Result.create(validResults); cellResults.clear(); validResults.clear(); if (!result.isEmpty()) { results.add(result); count++; } if (count == numberOfRows || !hasMore) shouldContinue = false; if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + scannerId + ", count is " + count + ", hasMore is " + hasMore + ", result " + result.isEmpty() + ", row " + result.getRow()); } } else { if (LOG.isTraceEnabled()) LOG.trace( "SsccRegionEndpoint coprocessor: performScan - id " + scannerId + ", scanner is null"); } } catch (OutOfOrderScannerNextException ooone) { if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + scannerId + " Caught OutOfOrderScannerNextException " + ooone.getMessage() + " " + stackTraceToString(ooone)); ooo = ooone; } catch (ScannerTimeoutException cste) { if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + scannerId + " Caught ScannerTimeoutException " + cste.getMessage() + " " + stackTraceToString(cste)); ste = cste; } catch (Throwable e) { if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + scannerId + " Caught exception " + e.getMessage() + " " + stackTraceToString(e)); t = e; } finally { if (scanner != null) { try { if (closeScanner) { scanner.close(); /* try { scannerLeases.cancelLease(getScannerLeaseId(scannerId)); } catch (LeaseException le) { // ignore if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan failed to get a lease " + scannerId); } */ } } catch (Exception e) { if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan caught exception " + e.getMessage() + "" + stackTraceToString(e)); ne = e; } } } TransactionalRegionScannerHolder rsh = scanners.get(scannerId); nextCallSeq++; rsh.nextCallSeq = nextCallSeq; if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: performScan - id " + transId + ", regionName " + regionInfo.getRegionNameAsString() + ", scannerId " + scannerId + ", nextCallSeq " + nextCallSeq + ", rsh.nextCallSeq " + rsh.nextCallSeq); org.apache.hadoop.hbase.coprocessor.transactional.generated.SsccRegionProtos.SsccPerformScanResponse.Builder performResponseBuilder = SsccPerformScanResponse .newBuilder(); performResponseBuilder.setHasMore(hasMore); performResponseBuilder.setNextCallSeq(nextCallSeq); performResponseBuilder.setCount(count); performResponseBuilder.setHasException(false); if (results != null) { if (!results.isEmpty()) { ListIterator<Result> resultIter = null; for (resultIter = results.listIterator(); resultIter.hasNext();) { Result r = resultIter.next(); performResponseBuilder.addResult(ProtobufUtil.toResult(r)); // LOG.info("UNIQUE: performScan return row " + Arrays.toString(r.getRow()) ); // for( Cell c : r.listCells() ) { // LOG.info("UNIQUE: performScan return value col : " + Arrays.toString(CellUtil.cloneQualifier(c) )+ " value " + Arrays.toString(CellUtil.cloneValue(c) ) ); // } // LOG.info("UNIQUE : -- "); } } } if (t != null) { performResponseBuilder.setHasException(true); performResponseBuilder.setException(t.toString()); } if (ste != null) { performResponseBuilder.setHasException(true); performResponseBuilder.setException(ste.toString()); } if (wre != null) { performResponseBuilder.setHasException(true); performResponseBuilder.setException(wre.toString()); } if (ne != null) { performResponseBuilder.setHasException(true); performResponseBuilder.setException(ne.toString()); } if (ooo != null) { performResponseBuilder.setHasException(true); performResponseBuilder.setException(ooo.toString()); } SsccPerformScanResponse presponse = performResponseBuilder.build(); done.run(presponse); }