List of usage examples for java.util HashMap isEmpty
public boolean isEmpty()
From source file:com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper.java
/** * Saves and deletes the objects given using one or more calls to the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. * * @param objectsToWrite/*from w ww .j a v a2s. co m*/ * A list of objects to save to DynamoDB. <b>No version checks * are performed</b>, as required by the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} * API. * @param objectsToDelete * A list of objects to delete from DynamoDB. <b>No version * checks are performed</b>, as required by the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} * API. * @param config * Only {@link DynamoDBMapperConfig#getTableNameOverride()} is * considered; if specified, all objects in the two parameter * lists will be considered to belong to the given table * override. In particular, this method <b>always acts as * if SaveBehavior.CLOBBER was specified</b> regardless of the * value of the config parameter. * @return A list of failed batches which includes the unprocessed items and * the exceptions causing the failure. */ public List<FailedBatch> batchWrite(List<? extends Object> objectsToWrite, List<? extends Object> objectsToDelete, DynamoDBMapperConfig config) { config = mergeConfig(config); List<FailedBatch> totalFailedBatches = new LinkedList<FailedBatch>(); HashMap<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); List<ValueUpdate> inMemoryUpdates = new LinkedList<ValueUpdate>(); for (Object toWrite : objectsToWrite) { Class<?> clazz = toWrite.getClass(); String tableName = getTableName(clazz, config); Map<String, AttributeValue> attributeValues = new HashMap<String, AttributeValue>(); // Look at every getter and construct a value object for it for (Method method : reflector.getRelevantGetters(clazz)) { Object getterResult = safeInvoke(method, toWrite); String attributeName = reflector.getAttributeName(method); AttributeValue currentValue = null; if (getterResult == null && reflector.isAssignableKey(method)) { currentValue = getAutoGeneratedKeyAttributeValue(method, getterResult); inMemoryUpdates.add(new ValueUpdate(method, currentValue, toWrite)); } else { currentValue = getSimpleAttributeValue(method, getterResult); } if (currentValue != null) { attributeValues.put(attributeName, currentValue); } } if (!requestItems.containsKey(tableName)) { requestItems.put(tableName, new LinkedList<WriteRequest>()); } AttributeTransformer.Parameters<?> parameters = toParameters(attributeValues, clazz, config); requestItems.get(tableName).add( new WriteRequest().withPutRequest(new PutRequest().withItem(transformAttributes(parameters)))); } for (Object toDelete : objectsToDelete) { Class<?> clazz = toDelete.getClass(); String tableName = getTableName(clazz, config); Map<String, AttributeValue> key = getKey(toDelete); if (!requestItems.containsKey(tableName)) { requestItems.put(tableName, new LinkedList<WriteRequest>()); } requestItems.get(tableName).add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(key))); } // Break into chunks of 25 items and make service requests to DynamoDB while (!requestItems.isEmpty()) { HashMap<String, List<WriteRequest>> batch = new HashMap<String, List<WriteRequest>>(); int i = 0; Iterator<Entry<String, List<WriteRequest>>> tableIter = requestItems.entrySet().iterator(); while (tableIter.hasNext() && i < 25) { Entry<String, List<WriteRequest>> tableRequest = tableIter.next(); batch.put(tableRequest.getKey(), new LinkedList<WriteRequest>()); Iterator<WriteRequest> writeRequestIter = tableRequest.getValue().iterator(); while (writeRequestIter.hasNext() && i++ < 25) { WriteRequest writeRequest = writeRequestIter.next(); batch.get(tableRequest.getKey()).add(writeRequest); writeRequestIter.remove(); } // If we've processed all the write requests for this table, // remove it from the parent iterator. if (!writeRequestIter.hasNext()) { tableIter.remove(); } } List<FailedBatch> failedBatches = writeOneBatch(batch); if (failedBatches != null) { totalFailedBatches.addAll(failedBatches); // If contains throttling exception, we do a backoff if (containsThrottlingException(failedBatches)) { try { Thread.sleep(1000 * 2); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new AmazonClientException(e.getMessage(), e); } } } } // Once the entire batch is processed, update assigned keys in memory for (ValueUpdate update : inMemoryUpdates) { update.apply(); } return totalFailedBatches; }
From source file:com.amazonaws.mobileconnectors.dynamodbv2.dynamodbmapper.DynamoDBMapper.java
/** * Saves and deletes the objects given using one or more calls to the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. * <p>/* w w w.j a v a 2s . co m*/ * This method fails to save the batch if the size of an individual object * in the batch exceeds 400 KB. For more information on batch restrictions * see, http://docs.aws.amazon * .com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html * </p> * * @param objectsToWrite A list of objects to save to DynamoDB. <b>No * version checks are performed</b>, as required by the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} * API. * @param objectsToDelete A list of objects to delete from DynamoDB. <b>No * version checks are performed</b>, as required by the * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} * API. * @param config Only {@link DynamoDBMapperConfig#getTableNameOverride()} is * considered; if specified, all objects in the two parameter * lists will be considered to belong to the given table * override. In particular, this method <b>always acts as if * SaveBehavior.CLOBBER was specified</b> regardless of the value * of the config parameter. * @return A list of failed batches which includes the unprocessed items and * the exceptions causing the failure. */ public List<FailedBatch> batchWrite(List<? extends Object> objectsToWrite, List<? extends Object> objectsToDelete, DynamoDBMapperConfig config) { config = mergeConfig(config); final List<FailedBatch> totalFailedBatches = new LinkedList<FailedBatch>(); final HashMap<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); final ItemConverter converter = getConverter(config); final List<ValueUpdate> inMemoryUpdates = new LinkedList<ValueUpdate>(); for (final Object toWrite : objectsToWrite) { final Class<?> clazz = toWrite.getClass(); final String tableName = getTableName(clazz, toWrite, config); final Map<String, AttributeValue> attributeValues = new HashMap<String, AttributeValue>(); // Look at every getter and construct a value object for it for (final Method method : reflector.getRelevantGetters(clazz)) { final Object getterResult = ReflectionUtils.safeInvoke(method, toWrite); final String attributeName = reflector.getAttributeName(method); AttributeValue currentValue = null; if (getterResult == null && reflector.isAssignableKey(method)) { currentValue = getAutoGeneratedKeyAttributeValue(converter, method); inMemoryUpdates.add(new ValueUpdate(method, currentValue, toWrite, converter)); } else { currentValue = converter.convert(method, getterResult); } if (currentValue != null) { attributeValues.put(attributeName, currentValue); } } if (!requestItems.containsKey(tableName)) { requestItems.put(tableName, new LinkedList<WriteRequest>()); } final AttributeTransformer.Parameters<?> parameters = toParameters(attributeValues, clazz, tableName, config); requestItems.get(tableName).add( new WriteRequest().withPutRequest(new PutRequest().withItem(transformAttributes(parameters)))); } for (final Object toDelete : objectsToDelete) { final Class<?> clazz = toDelete.getClass(); final String tableName = getTableName(clazz, toDelete, config); final Map<String, AttributeValue> key = getKey(converter, toDelete); if (!requestItems.containsKey(tableName)) { requestItems.put(tableName, new LinkedList<WriteRequest>()); } requestItems.get(tableName).add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(key))); } // Break into chunks of 25 items and make service requests to DynamoDB while (!requestItems.isEmpty()) { final HashMap<String, List<WriteRequest>> batch = new HashMap<String, List<WriteRequest>>(); int i = 0; final Iterator<Entry<String, List<WriteRequest>>> tableIter = requestItems.entrySet().iterator(); while (tableIter.hasNext() && i < MAX_ITEMS_PER_BATCH) { final Entry<String, List<WriteRequest>> tableRequest = tableIter.next(); batch.put(tableRequest.getKey(), new LinkedList<WriteRequest>()); final Iterator<WriteRequest> writeRequestIter = tableRequest.getValue().iterator(); while (writeRequestIter.hasNext() && i++ < MAX_ITEMS_PER_BATCH) { final WriteRequest writeRequest = writeRequestIter.next(); batch.get(tableRequest.getKey()).add(writeRequest); writeRequestIter.remove(); } // If we've processed all the write requests for this table, // remove it from the parent iterator. if (!writeRequestIter.hasNext()) { tableIter.remove(); } } final List<FailedBatch> failedBatches = writeOneBatch(batch); if (failedBatches != null) { totalFailedBatches.addAll(failedBatches); // If contains throttling exception, we do a backoff if (containsThrottlingException(failedBatches)) { try { Thread.sleep(THREAD_SLEEP_TWO_SECONDS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); throw new AmazonClientException(e.getMessage(), e); } } } } // Once the entire batch is processed, update assigned keys in memory for (final ValueUpdate update : inMemoryUpdates) { update.apply(); } return totalFailedBatches; }
From source file:org.akaza.openclinica.control.submit.CreateDiscrepancyNoteServlet.java
@Override protected void processRequest() throws Exception { FormProcessor fp = new FormProcessor(request); DiscrepancyNoteDAO dndao = new DiscrepancyNoteDAO(sm.getDataSource()); List<DiscrepancyNoteType> types = DiscrepancyNoteType.list; request.setAttribute(DIS_TYPES, types); request.setAttribute(RES_STATUSES, ResolutionStatus.list); boolean writeToDB = fp.getBoolean(WRITE_TO_DB, true); //this should be set based on a new property of DisplayItemBean boolean isReasonForChange = fp.getBoolean(IS_REASON_FOR_CHANGE); int entityId = fp.getInt(ENTITY_ID); // subjectId has to be added to the database when disc notes area saved // as entity_type 'subject' int subjectId = fp.getInt(SUBJECT_ID); int itemId = fp.getInt(ITEM_ID); String entityType = fp.getString(ENTITY_TYPE); String field = fp.getString(ENTITY_FIELD); String column = fp.getString(ENTITY_COLUMN); int parentId = fp.getInt(PARENT_ID); //patch for repeating groups and RFC on empty fields int isGroup = fp.getInt(IS_GROUP_ITEM); // request.setAttribute(IS_GROUP_ITEM, new Integer(isGroup)); int eventCRFId = fp.getInt(EVENT_CRF_ID); request.setAttribute(EVENT_CRF_ID, new Integer(eventCRFId)); int rowCount = fp.getInt(PARENT_ROW_COUNT); // run only once: try to recalculate writeToDB if (!StringUtils.isBlank(entityType) && "itemData".equalsIgnoreCase(entityType) && isGroup != 0 && eventCRFId != 0) { // request.setAttribute(PARENT_ROW_COUNT, new Integer(eventCRFId)); int ordinal_for_repeating_group_field = calculateOrdinal(isGroup, field, eventCRFId, rowCount); int writeToDBStatus = isWriteToDB(isGroup, field, entityId, itemId, ordinal_for_repeating_group_field, eventCRFId);/*from w w w .jav a 2 s .c o m*/ writeToDB = (writeToDBStatus == -1) ? false : ((writeToDBStatus == 1) ? true : writeToDB); } boolean isInError = fp.getBoolean(ERROR_FLAG); boolean isNew = fp.getBoolean(NEW_NOTE); request.setAttribute(NEW_NOTE, isNew ? "1" : "0"); String strResStatus = fp.getString(PRESET_RES_STATUS); if (!strResStatus.equals("")) { request.setAttribute(PRESET_RES_STATUS, strResStatus); } String monitor = fp.getString("monitor"); String enterData = fp.getString("enterData"); request.setAttribute("enterData", enterData); boolean enteringData = false; if (enterData != null && "1".equalsIgnoreCase(enterData)) { // variables are not set in JSP, so not from viewing data and from // entering data request.setAttribute(CAN_MONITOR, "1"); request.setAttribute("monitor", monitor); enteringData = true; } else if ("1".equalsIgnoreCase(monitor)) {// change to allow user to // enter note for all items, // not just blank items request.setAttribute(CAN_MONITOR, "1"); request.setAttribute("monitor", monitor); } else { request.setAttribute(CAN_MONITOR, "0"); } if ("itemData".equalsIgnoreCase(entityType) && enteringData) { request.setAttribute("enterItemData", "yes"); } DateFormat dateFormatter = DateFormat.getDateInstance(DateFormat.DEFAULT, locale); int preUserId = 0; if (!StringUtils.isBlank(entityType)) { if ("itemData".equalsIgnoreCase(entityType) || "itemdata".equalsIgnoreCase(entityType)) { ItemBean item = (ItemBean) new ItemDAO(sm.getDataSource()).findByPK(itemId); ItemDataBean itemData = (ItemDataBean) new ItemDataDAO(sm.getDataSource()).findByPK(entityId); request.setAttribute("entityValue", itemData.getValue()); request.setAttribute("entityName", item.getName()); EventCRFDAO ecdao = new EventCRFDAO(sm.getDataSource()); EventCRFBean ec = (EventCRFBean) ecdao.findByPK(itemData.getEventCRFId()); preUserId = ec.getOwnerId(); } else if ("studySub".equalsIgnoreCase(entityType)) { StudySubjectBean ssub = (StudySubjectBean) new StudySubjectDAO(sm.getDataSource()) .findByPK(entityId); SubjectBean sub = (SubjectBean) new SubjectDAO(sm.getDataSource()).findByPK(ssub.getSubjectId()); preUserId = ssub.getOwnerId(); if (!StringUtils.isBlank(column)) { if ("enrollment_date".equalsIgnoreCase(column)) { if (ssub.getEnrollmentDate() != null) { request.setAttribute("entityValue", dateFormatter.format(ssub.getEnrollmentDate())); } else { request.setAttribute("entityValue", resword.getString("N/A")); } request.setAttribute("entityName", resword.getString("enrollment_date")); } else if ("gender".equalsIgnoreCase(column)) { request.setAttribute("entityValue", sub.getGender() + ""); request.setAttribute("entityName", resword.getString("gender")); } else if ("date_of_birth".equalsIgnoreCase(column)) { if (sub.getDateOfBirth() != null) { request.setAttribute("entityValue", dateFormatter.format(sub.getDateOfBirth())); } else { request.setAttribute("entityValue", resword.getString("N/A")); } request.setAttribute("entityName", resword.getString("date_of_birth")); } else if ("unique_identifier".equalsIgnoreCase(column)) { if (sub.getUniqueIdentifier() != null) { request.setAttribute("entityValue", sub.getUniqueIdentifier()); } request.setAttribute("entityName", resword.getString("unique_identifier")); } } } else if ("subject".equalsIgnoreCase(entityType)) { SubjectBean sub = (SubjectBean) new SubjectDAO(sm.getDataSource()).findByPK(entityId); preUserId = sub.getOwnerId(); if (!StringUtils.isBlank(column)) { if ("gender".equalsIgnoreCase(column)) { request.setAttribute("entityValue", sub.getGender() + ""); request.setAttribute("entityName", resword.getString("gender")); } else if ("date_of_birth".equalsIgnoreCase(column)) { if (sub.getDateOfBirth() != null) { request.setAttribute("entityValue", dateFormatter.format(sub.getDateOfBirth())); } request.setAttribute("entityName", resword.getString("date_of_birth")); } else if ("unique_identifier".equalsIgnoreCase(column)) { request.setAttribute("entityValue", sub.getUniqueIdentifier()); request.setAttribute("entityName", resword.getString("unique_identifier")); } } } else if ("studyEvent".equalsIgnoreCase(entityType)) { StudyEventBean se = (StudyEventBean) new StudyEventDAO(sm.getDataSource()).findByPK(entityId); preUserId = se.getOwnerId(); if (!StringUtils.isBlank(column)) { if ("location".equalsIgnoreCase(column)) { request.setAttribute("entityValue", se.getLocation().equals("") || se.getLocation() == null ? resword.getString("N/A") : se.getLocation()); request.setAttribute("entityName", resword.getString("location")); } else if ("start_date".equalsIgnoreCase(column)) { if (se.getDateStarted() != null) { request.setAttribute("entityValue", dateFormatter.format(se.getDateStarted())); } else { request.setAttribute("entityValue", resword.getString("N/A")); } request.setAttribute("entityName", resword.getString("start_date")); } else if ("end_date".equalsIgnoreCase(column)) { if (se.getDateEnded() != null) { request.setAttribute("entityValue", dateFormatter.format(se.getDateEnded())); } else { request.setAttribute("entityValue", resword.getString("N/A")); } request.setAttribute("entityName", resword.getString("end_date")); } } } else if ("eventCrf".equalsIgnoreCase(entityType)) { EventCRFBean ec = (EventCRFBean) new EventCRFDAO(sm.getDataSource()).findByPK(entityId); preUserId = ec.getOwnerId(); if (!StringUtils.isBlank(column)) { if ("date_interviewed".equals(column)) { if (ec.getDateInterviewed() != null) { request.setAttribute("entityValue", dateFormatter.format(ec.getDateInterviewed())); } else { request.setAttribute("entityValue", resword.getString("N/A")); } request.setAttribute("entityName", resword.getString("date_interviewed")); } else if ("interviewer_name".equals(column)) { request.setAttribute("entityValue", ec.getInterviewerName()); request.setAttribute("entityName", resword.getString("interviewer_name")); } } } } // finds all the related notes ArrayList notes = (ArrayList) dndao.findAllByEntityAndColumn(entityType, entityId, column); DiscrepancyNoteBean parent = new DiscrepancyNoteBean(); if (parentId > 0) { dndao.setFetchMapping(true); parent = (DiscrepancyNoteBean) dndao.findByPK(parentId); if (parent.isActive()) { request.setAttribute("parent", parent); } dndao.setFetchMapping(false); } FormDiscrepancyNotes newNotes = (FormDiscrepancyNotes) session .getAttribute(AddNewSubjectServlet.FORM_DISCREPANCY_NOTES_NAME); if (newNotes == null) { newNotes = new FormDiscrepancyNotes(); } boolean isNotesExistInSession = (!newNotes.getNotes(field).isEmpty()) ? true : (!newNotes.getNotes(eventCRFId + "_" + field).isEmpty()) ? true : false; if (!notes.isEmpty() || isNotesExistInSession) { request.setAttribute("hasNotes", "yes"); } else { request.setAttribute("hasNotes", "no"); logger.debug("has notes:" + "no"); } //only for adding a new thread if (currentRole.getRole().equals(Role.RESEARCHASSISTANT) || currentRole.getRole().equals(Role.RESEARCHASSISTANT2) || currentRole.getRole().equals(Role.INVESTIGATOR)) { ArrayList<ResolutionStatus> resStatuses = new ArrayList<ResolutionStatus>(); resStatuses.add(ResolutionStatus.OPEN); resStatuses.add(ResolutionStatus.RESOLVED); request.setAttribute(RES_STATUSES, resStatuses); List<DiscrepancyNoteType> types2 = new ArrayList<DiscrepancyNoteType>(DiscrepancyNoteType.list); types2.remove(DiscrepancyNoteType.QUERY); request.setAttribute(DIS_TYPES, types2); request.setAttribute(WHICH_RES_STATUSES, "22"); } else if (currentRole.getRole().equals(Role.MONITOR)) { ArrayList<ResolutionStatus> resStatuses = new ArrayList(); resStatuses.add(ResolutionStatus.OPEN); resStatuses.add(ResolutionStatus.UPDATED); resStatuses.add(ResolutionStatus.CLOSED); request.setAttribute(RES_STATUSES, resStatuses); request.setAttribute(WHICH_RES_STATUSES, "1"); ArrayList<DiscrepancyNoteType> types2 = new ArrayList<DiscrepancyNoteType>(); types2.add(DiscrepancyNoteType.QUERY); request.setAttribute(DIS_TYPES, types2); } else {//Role.STUDYDIRECTOR Role.COORDINATOR List<ResolutionStatus> resStatuses = new ArrayList<ResolutionStatus>(ResolutionStatus.list); resStatuses.remove(ResolutionStatus.NOT_APPLICABLE); request.setAttribute(RES_STATUSES, resStatuses); ; request.setAttribute(WHICH_RES_STATUSES, "2"); } if (!fp.isSubmitted()) { DiscrepancyNoteBean dnb = new DiscrepancyNoteBean(); if (subjectId > 0) { // BWP: this doesn't seem correct, because the SubjectId should // be the id for // the SubjectBean, different from StudySubjectBean StudySubjectDAO ssdao = new StudySubjectDAO(sm.getDataSource()); StudySubjectBean ssub = (StudySubjectBean) ssdao.findByPK(subjectId); dnb.setSubjectName(ssub.getName()); dnb.setSubjectId(ssub.getId()); dnb.setStudySub(ssub); StudyDAO studyDAO = new StudyDAO(sm.getDataSource()); int parentStudyForSubject = 0; StudyBean studyBeanSub = (StudyBean) studyDAO.findByPK(ssub.getStudyId()); if (null != studyBeanSub) { parentStudyForSubject = studyBeanSub.getParentStudyId(); } if (ssub.getStudyId() != currentStudy.getId() && currentStudy.getId() != parentStudyForSubject) { addPageMessage(noAccessMessage); throw new InsufficientPermissionException(Page.MENU_SERVLET, exceptionName, "1"); } } if (itemId > 0) { ItemBean item = (ItemBean) new ItemDAO(sm.getDataSource()).findByPK(itemId); dnb.setEntityName(item.getName()); request.setAttribute("item", item); } dnb.setEntityType(entityType); dnb.setColumn(column); dnb.setEntityId(entityId); dnb.setField(field); dnb.setParentDnId(parent.getId()); dnb.setCreatedDate(new Date()); // When a user is performing Data Entry, Initial Data Entry or // Double Data Entry and // have not received any validation warnings or messages for a // particular item, // they will see Annotation as the default type in the Add // Discrepancy note window. // When a user is performing Data Entry, Initial Data Entry or // Double Data Entry and they // have received a validation warning or message for a particular // item, // they can click on the flag and the default type should be Failed // Validation Check // When a user is viewing a CRF and they click on the flag icon, the // default type should be query. // when the type is query, we should also get the user id for the // person who completed data entry /* Mantis issue: tbh 08/31/2009 * 0004092: CRCs and Investigators allowed to close a Query CRCs and Investigators are allowed to choose Closed for a Query. they are also allowed to choose New. They should only be allowed to choose Updated or Resolution Proposed. */ // above extra business rule here, tbh if (parent.getId() == 0 || isNew) {// no parent, new note thread if (enteringData) { if (isInError) { dnb.setDiscrepancyNoteTypeId(DiscrepancyNoteType.FAILEDVAL.getId()); } else { dnb.setDiscrepancyNoteTypeId(DiscrepancyNoteType.ANNOTATION.getId()); dnb.setResolutionStatusId(ResolutionStatus.NOT_APPLICABLE.getId()); // >> tbh WHO bug: set an assigned user for the parent // note // dnb.setAssignedUser(ub); // dnb.setAssignedUserId(ub.getId()); // << tbh 08/2009 } if (isReasonForChange) { dnb.setDiscrepancyNoteTypeId(DiscrepancyNoteType.REASON_FOR_CHANGE.getId()); dnb.setResolutionStatusId(ResolutionStatus.NOT_APPLICABLE.getId()); } // << tbh 02/2010, trumps failed evaluation error checks // can we put this in admin editing request.setAttribute("autoView", "0"); // above set to automatically open up the user panel } else { // when the user is a CRC and is adding a note to the thread // it should default to Resolution Proposed, // and the assigned should be the user who logged the query, // NOT the one who is proposing the solution, tbh 02/2009 // if (currentRole.getRole().equals(Role.COORDINATOR)) { // dnb.setDiscrepancyNoteTypeId(DiscrepancyNoteType. // REASON_FOR_CHANGE.getId()); // request.setAttribute("autoView", "1"); // // above set to automatically open up the user panel // } else { dnb.setDiscrepancyNoteTypeId(DiscrepancyNoteType.QUERY.getId()); // remove this option for CRCs and Investigators //if (currentRole.getRole().equals(Role.RESEARCHASSISTANT) && currentStudy.getId() != currentStudy.getParentStudyId() if (currentRole.getRole().equals(Role.RESEARCHASSISTANT) || currentRole.getRole().equals(Role.RESEARCHASSISTANT2) || currentRole.getRole().equals(Role.INVESTIGATOR)) { request.setAttribute("autoView", "0"); } else { request.setAttribute("autoView", "1"); dnb.setAssignedUserId(preUserId); } // above set to automatically open up the user panel // } } } else if (parent.getDiscrepancyNoteTypeId() > 0) { dnb.setDiscrepancyNoteTypeId(parent.getDiscrepancyNoteTypeId()); // if it is a CRC then we should automatically propose a // solution, tbh // adding second rule here, tbh 08/2009 if ((currentRole.getRole().equals(Role.RESEARCHASSISTANT) || currentRole.getRole().equals(Role.RESEARCHASSISTANT2)) && currentStudy.getId() != currentStudy.getParentStudyId()) { dnb.setResolutionStatusId(ResolutionStatus.RESOLVED.getId()); request.setAttribute("autoView", "0"); // hide the panel, tbh } else { dnb.setResolutionStatusId(ResolutionStatus.UPDATED.getId()); } } dnb.setOwnerId(parent.getOwnerId()); String detailedDes = fp.getString("strErrMsg"); if (detailedDes != null) { dnb.setDetailedNotes(detailedDes); logger.debug("found strErrMsg: " + fp.getString("strErrMsg")); } // #4346 TBH 10/2009 //If the data entry form has not been saved yet, collecting info from parent page. dnb = getNoteInfo(dnb);// populate note infos if (dnb.getEventName() == null || dnb.getEventName().equals("")) { dnb.setEventName(fp.getString("eventName")); } if (dnb.getEventStart() == null) { dnb.setEventStart(fp.getDate("eventDate")); } if (dnb.getCrfName() == null || dnb.getCrfName().equals("")) { dnb.setCrfName(fp.getString("crfName")); } // // #4346 TBH 10/2009 request.setAttribute(DIS_NOTE, dnb); request.setAttribute("unlock", "0"); request.setAttribute(WRITE_TO_DB, writeToDB ? "1" : "0");//this should go from UI & here ArrayList userAccounts = this.generateUserAccounts(ub.getActiveStudyId(), subjectId); request.setAttribute(USER_ACCOUNTS, userAccounts); // ideally should be only two cases if ((currentRole.getRole().equals(Role.RESEARCHASSISTANT) || currentRole.getRole().equals(Role.RESEARCHASSISTANT2)) && currentStudy.getId() != currentStudy.getParentStudyId()) { // assigning back to OP, tbh request.setAttribute(USER_ACCOUNT_ID, Integer.valueOf(parent.getOwnerId()).toString()); logger.debug("assigned owner id: " + parent.getOwnerId()); } else if (dnb.getEventCRFId() > 0) { logger.debug("found a event crf id: " + dnb.getEventCRFId()); EventCRFDAO eventCrfDAO = new EventCRFDAO(sm.getDataSource()); EventCRFBean eventCrfBean = new EventCRFBean(); eventCrfBean = (EventCRFBean) eventCrfDAO.findByPK(dnb.getEventCRFId()); request.setAttribute(USER_ACCOUNT_ID, Integer.valueOf(eventCrfBean.getOwnerId()).toString()); logger.debug("assigned owner id: " + eventCrfBean.getOwnerId()); } else { // the end case } // set the user account id for the user who completed data entry forwardPage(Page.ADD_DISCREPANCY_NOTE); } else { FormDiscrepancyNotes noteTree = (FormDiscrepancyNotes) session .getAttribute(AddNewSubjectServlet.FORM_DISCREPANCY_NOTES_NAME); FormDiscrepancyNotes noteTree_RFC_REPEAT = (FormDiscrepancyNotes) session .getAttribute(FLAG_DISCREPANCY_RFC); ; if (noteTree_RFC_REPEAT == null) noteTree_RFC_REPEAT = new FormDiscrepancyNotes(); if (noteTree == null) { noteTree = new FormDiscrepancyNotes(); logger.debug("No note tree initailized in session"); } Validator v = new Validator(request); String description = fp.getString("description"); int typeId = fp.getInt("typeId"); int assignedUserAccountId = fp.getInt(SUBMITTED_USER_ACCOUNT_ID); int resStatusId = fp.getInt(RES_STATUS_ID); String detailedDes = fp.getString("detailedDes"); int sectionId = fp.getInt("sectionId"); DiscrepancyNoteBean note = new DiscrepancyNoteBean(); v.addValidation("description", Validator.NO_BLANKS); v.addValidation("description", Validator.LENGTH_NUMERIC_COMPARISON, NumericComparisonOperator.LESS_THAN_OR_EQUAL_TO, 255); v.addValidation("detailedDes", Validator.LENGTH_NUMERIC_COMPARISON, NumericComparisonOperator.LESS_THAN_OR_EQUAL_TO, 1000); v.addValidation("typeId", Validator.NO_BLANKS); HashMap errors = v.validate(); note.setDescription(description); note.setDetailedNotes(detailedDes); note.setOwner(ub); note.setOwnerId(ub.getId()); note.setCreatedDate(new Date()); note.setResolutionStatusId(resStatusId); note.setDiscrepancyNoteTypeId(typeId); note.setParentDnId(parent.getId()); if (typeId != DiscrepancyNoteType.ANNOTATION.getId() && typeId != DiscrepancyNoteType.FAILEDVAL.getId() && typeId != DiscrepancyNoteType.REASON_FOR_CHANGE.getId()) { if (assignedUserAccountId > 0) { note.setAssignedUserId(assignedUserAccountId); logger.debug("^^^ found assigned user id: " + assignedUserAccountId); } else { // a little bit of a workaround, should ideally be always from // the form note.setAssignedUserId(parent.getOwnerId()); logger.debug("found user assigned id, in the PARENT OWNER ID: " + parent.getOwnerId() + " note that user assgined id did not work: " + assignedUserAccountId); } } note.setField(field); if (DiscrepancyNoteType.ANNOTATION.getId() == note.getDiscrepancyNoteTypeId()) { updateStudyEvent(entityType, entityId); updateStudySubjectStatus(entityType, entityId); } if (DiscrepancyNoteType.ANNOTATION.getId() == note.getDiscrepancyNoteTypeId() || DiscrepancyNoteType.REASON_FOR_CHANGE.getId() == note.getDiscrepancyNoteTypeId()) { note.setResStatus(ResolutionStatus.NOT_APPLICABLE); note.setResolutionStatusId(ResolutionStatus.NOT_APPLICABLE.getId()); } if (DiscrepancyNoteType.FAILEDVAL.getId() == note.getDiscrepancyNoteTypeId() || DiscrepancyNoteType.QUERY.getId() == note.getDiscrepancyNoteTypeId()) { if (ResolutionStatus.NOT_APPLICABLE.getId() == note.getResolutionStatusId()) { Validator.addError(errors, RES_STATUS_ID, restext.getString("not_valid_res_status")); } } if (!parent.isActive()) { note.setEntityId(entityId); note.setEntityType(entityType); note.setColumn(column); } else { note.setEntityId(parent.getEntityId()); note.setEntityType(parent.getEntityType()); if (!StringUtils.isBlank(parent.getColumn())) { note.setColumn(parent.getColumn()); } else { note.setColumn(column); } note.setParentDnId(parent.getId()); } note.setStudyId(currentStudy.getId()); note = getNoteInfo(note);// populate note infos request.setAttribute(DIS_NOTE, note); request.setAttribute(WRITE_TO_DB, writeToDB ? "1" : "0");//this should go from UI & here ArrayList userAccounts = this.generateUserAccounts(ub.getActiveStudyId(), subjectId); request.setAttribute(USER_ACCOUNT_ID, Integer.valueOf(note.getAssignedUserId()).toString()); // formality more than anything else, we should go to say the note // is done Role r = currentRole.getRole(); if (r.equals(Role.MONITOR) || r.equals(Role.INVESTIGATOR) || r.equals(Role.RESEARCHASSISTANT) || r.equals(Role.RESEARCHASSISTANT2) || r.equals(Role.COORDINATOR)) { // investigator request.setAttribute("unlock", "1"); logger.debug("set UNLOCK to ONE"); } else { request.setAttribute("unlock", "0"); logger.debug("set UNLOCK to ZERO"); } request.setAttribute(USER_ACCOUNTS, userAccounts); if (errors.isEmpty()) { if (!writeToDB) { noteTree.addNote(field, note); noteTree.addIdNote(note.getEntityId(), field); noteTree_RFC_REPEAT.addNote(EVENT_CRF_ID + "_" + field, note); noteTree_RFC_REPEAT.addIdNote(note.getEntityId(), field); //-> catcher // FORM_DISCREPANCY_NOTES_NAME session.setAttribute(AddNewSubjectServlet.FORM_DISCREPANCY_NOTES_NAME, noteTree); session.setAttribute(FLAG_DISCREPANCY_RFC, noteTree_RFC_REPEAT); // /*Setting a marker to check later while saving administrative edited data. This is needed to make * sure the system flags error while changing data for items which already has a DiscrepanyNote*/ manageReasonForChangeState(session, eventCRFId + "_" + field); forwardPage(Page.ADD_DISCREPANCY_NOTE_DONE); } else { // if not creating a new thread(note), update exsiting notes // if necessary //if ("itemData".equalsIgnoreCase(entityType) && !isNew) { int pdnId = note != null ? note.getParentDnId() : 0; if (pdnId > 0) { logger.debug("Create:find parent note for item data:" + note.getEntityId()); DiscrepancyNoteBean pNote = (DiscrepancyNoteBean) dndao.findByPK(pdnId); logger.debug("setting DN owner id: " + pNote.getOwnerId()); note.setOwnerId(pNote.getOwnerId()); if (note.getDiscrepancyNoteTypeId() == pNote.getDiscrepancyNoteTypeId()) { if (note.getResolutionStatusId() != pNote.getResolutionStatusId()) { pNote.setResolutionStatusId(note.getResolutionStatusId()); dndao.update(pNote); } if (note.getAssignedUserId() != pNote.getAssignedUserId()) { pNote.setAssignedUserId(note.getAssignedUserId()); if (pNote.getAssignedUserId() > 0) { dndao.updateAssignedUser(pNote); } else { dndao.updateAssignedUserToNull(pNote); } } } } note = (DiscrepancyNoteBean) dndao.create(note); dndao.createMapping(note); request.setAttribute(DIS_NOTE, note); if (note.getParentDnId() == 0) { // see issue 2659 this is a new thread, we will create // two notes in this case, // This way one can be the parent that updates as the // status changes, but one also stays as New. note.setParentDnId(note.getId()); note = (DiscrepancyNoteBean) dndao.create(note); dndao.createMapping(note); } /*Setting a marker to check later while saving administrative edited data. This is needed to make * sure the system flags error while changing data for items which already has a DiscrepanyNote*/ //session.setAttribute(DataEntryServlet.NOTE_SUBMITTED, true); //session.setAttribute(DataEntryServlet.NOTE_SUBMITTED, true); // String field_id_for_RFC_hash = buildDiscrepancyNoteIdForRFCHash(eventCRFId,entityId, isGroup, field, ordinal_for_repeating_group_field); String field_id_for_RFC_hash = eventCRFId + "_" + field; manageReasonForChangeState(session, field_id_for_RFC_hash); logger.debug("found resolution status: " + note.getResolutionStatusId()); String email = fp.getString(EMAIL_USER_ACCOUNT); logger.debug("found email: " + email); if (note.getAssignedUserId() > 0 && "1".equals(email.trim()) && DiscrepancyNoteType.QUERY.getId() == note.getDiscrepancyNoteTypeId()) { logger.debug("++++++ found our way here: " + note.getDiscrepancyNoteTypeId() + " id number and " + note.getDisType().getName()); // generate email for user here StringBuffer message = new StringBuffer(); // generate message here UserAccountDAO userAccountDAO = new UserAccountDAO(sm.getDataSource()); ItemDAO itemDAO = new ItemDAO(sm.getDataSource()); ItemDataDAO iddao = new ItemDataDAO(sm.getDataSource()); ItemBean item = new ItemBean(); ItemDataBean itemData = new ItemDataBean(); SectionBean section = new SectionBean(); StudyDAO studyDAO = new StudyDAO(sm.getDataSource()); UserAccountBean assignedUser = (UserAccountBean) userAccountDAO .findByPK(note.getAssignedUserId()); String alertEmail = assignedUser.getEmail(); message.append(MessageFormat.format(respage.getString("mailDNHeader"), assignedUser.getFirstName(), assignedUser.getLastName())); message.append("<A HREF='" + SQLInitServlet.getField("sysURL.base") + "ViewNotes?module=submit&listNotes_f_discrepancyNoteBean.user=" + assignedUser.getName() + "&listNotes_f_entityName=" + note.getEntityName() + "'>" + SQLInitServlet.getField("sysURL.base") + "</A><BR/>"); message.append(respage.getString("you_received_this_from")); StudyBean study = (StudyBean) studyDAO.findByPK(note.getStudyId()); SectionDAO sectionDAO = new SectionDAO(sm.getDataSource()); if ("itemData".equalsIgnoreCase(entityType)) { itemData = (ItemDataBean) iddao.findByPK(note.getEntityId()); item = (ItemBean) itemDAO.findByPK(itemData.getItemId()); if (sectionId > 0) { section = (SectionBean) sectionDAO.findByPK(sectionId); } else { //Todo section should be initialized when sectionId = 0 } } message.append(respage.getString("email_body_separator")); message.append(respage.getString("disc_note_info")); message.append(respage.getString("email_body_separator")); message.append(MessageFormat.format(respage.getString("mailDNParameters1"), note.getDescription(), note.getDetailedNotes(), ub.getName())); message.append(respage.getString("email_body_separator")); message.append(respage.getString("entity_information")); message.append(respage.getString("email_body_separator")); message.append(MessageFormat.format(respage.getString("mailDNParameters2"), study.getName(), note.getSubjectName())); if (!("studySub".equalsIgnoreCase(entityType) || "subject".equalsIgnoreCase(entityType))) { message.append(MessageFormat.format(respage.getString("mailDNParameters3"), note.getEventName())); if (!"studyEvent".equalsIgnoreCase(note.getEntityType())) { message.append(MessageFormat.format(respage.getString("mailDNParameters4"), note.getCrfName())); if (!"eventCrf".equalsIgnoreCase(note.getEntityType())) { if (sectionId > 0) { message.append(MessageFormat.format(respage.getString("mailDNParameters5"), section.getName())); } message.append(MessageFormat.format(respage.getString("mailDNParameters6"), item.getName())); } } } message.append(respage.getString("email_body_separator")); message.append(MessageFormat.format(respage.getString("mailDNThanks"), study.getName())); message.append(respage.getString("email_body_separator")); message.append(respage.getString("disclaimer")); message.append(respage.getString("email_body_separator")); message.append(respage.getString("email_footer")); String emailBodyString = message.toString(); sendEmail(alertEmail.trim(), EmailEngine.getAdminEmail(), MessageFormat .format(respage.getString("mailDNSubject"), study.getName(), note.getEntityName()), emailBodyString, true, null, null, true); } else { logger.debug("did not send email, but did save DN"); } // addPageMessage( // "Your discrepancy note has been saved into database."); addPageMessage(respage.getString("note_saved_into_db")); addPageMessage(respage.getString("page_close_automatically")); forwardPage(Page.ADD_DISCREPANCY_NOTE_SAVE_DONE); } } else { if (parentId > 0) { if (note.getResolutionStatusId() == ResolutionStatus.NOT_APPLICABLE.getId()) { request.setAttribute("autoView", "0"); } } else { if (note.getDiscrepancyNoteTypeId() == DiscrepancyNoteType.QUERY.getId()) { request.setAttribute("autoView", "1"); } else { request.setAttribute("autoView", "0"); } } setInputMessages(errors); forwardPage(Page.ADD_DISCREPANCY_NOTE); } } }
From source file:org.telegram.messenger.MessagesController.java
public void getDifference() { registerForPush(UserConfig.pushString); if (MessagesStorage.lastDateValue == 0) { loadCurrentState();/*from www .j a va 2 s . c o m*/ return; } if (gettingDifference) { return; } if (!firstGettingTask) { getNewDeleteTask(null); firstGettingTask = true; } gettingDifference = true; TLRPC.TL_updates_getDifference req = new TLRPC.TL_updates_getDifference(); req.pts = MessagesStorage.lastPtsValue; req.date = MessagesStorage.lastDateValue; req.qts = MessagesStorage.lastQtsValue; FileLog.e("tmessages", "start getDifference with date = " + MessagesStorage.lastDateValue + " pts = " + MessagesStorage.lastPtsValue + " seq = " + MessagesStorage.lastSeqValue); if (ConnectionsManager.Instance.connectionState == 0) { ConnectionsManager.Instance.connectionState = 3; final int stateCopy = ConnectionsManager.Instance.connectionState; Utilities.RunOnUIThread(new Runnable() { @Override public void run() { NotificationCenter.Instance.postNotificationName(703, stateCopy); } }); } ConnectionsManager.Instance.performRpc(req, new RPCRequest.RPCRequestDelegate() { @Override public void run(TLObject response, TLRPC.TL_error error) { gettingDifferenceAgain = false; if (error == null) { final TLRPC.updates_Difference res = (TLRPC.updates_Difference) response; gettingDifferenceAgain = res instanceof TLRPC.TL_updates_differenceSlice; final HashMap<Integer, TLRPC.User> usersDict = new HashMap<Integer, TLRPC.User>(); for (TLRPC.User user : res.users) { usersDict.put(user.id, user); } final ArrayList<TLRPC.TL_updateMessageID> msgUpdates = new ArrayList<TLRPC.TL_updateMessageID>(); if (!res.other_updates.isEmpty()) { for (int a = 0; a < res.other_updates.size(); a++) { TLRPC.Update upd = res.other_updates.get(a); if (upd instanceof TLRPC.TL_updateMessageID) { msgUpdates.add((TLRPC.TL_updateMessageID) upd); res.other_updates.remove(a); a--; } } } Utilities.RunOnUIThread(new Runnable() { @Override public void run() { for (TLRPC.User user : res.users) { users.put(user.id, user); if (user.id == UserConfig.clientUserId) { UserConfig.currentUser = user; } } for (TLRPC.Chat chat : res.chats) { chats.put(chat.id, chat); } } }); MessagesStorage.Instance.storageQueue.postRunnable(new Runnable() { @Override public void run() { if (!msgUpdates.isEmpty()) { final HashMap<Integer, Integer> corrected = new HashMap<Integer, Integer>(); for (TLRPC.TL_updateMessageID update : msgUpdates) { Integer oldId = MessagesStorage.Instance .updateMessageStateAndId(update.random_id, null, update.id, 0, false); if (oldId != null) { corrected.put(oldId, update.id); } } if (!corrected.isEmpty()) { Utilities.RunOnUIThread(new Runnable() { @Override public void run() { for (HashMap.Entry<Integer, Integer> entry : corrected.entrySet()) { Integer oldId = entry.getKey(); sendingMessages.remove(oldId); Integer newId = entry.getValue(); NotificationCenter.Instance.postNotificationName( messageReceivedByServer, oldId, newId); } } }); } } Utilities.stageQueue.postRunnable(new Runnable() { @Override public void run() { if (!res.new_messages.isEmpty() || !res.new_encrypted_messages.isEmpty()) { final HashMap<Long, ArrayList<MessageObject>> messages = new HashMap<Long, ArrayList<MessageObject>>(); for (TLRPC.EncryptedMessage encryptedMessage : res.new_encrypted_messages) { TLRPC.Message message = decryptMessage(encryptedMessage); if (message != null) { res.new_messages.add(message); } } MessageObject lastMessage = null; for (TLRPC.Message message : res.new_messages) { MessageObject obj = new MessageObject(message, usersDict); long dialog_id = obj.messageOwner.dialog_id; if (dialog_id == 0) { if (obj.messageOwner.to_id.chat_id != 0) { dialog_id = -obj.messageOwner.to_id.chat_id; } else { dialog_id = obj.messageOwner.to_id.user_id; } } if (!(res instanceof TLRPC.TL_updates_differenceSlice)) { if ((dialog_id != openned_dialog_id || ApplicationLoader.lastPauseTime != 0) && !obj.messageOwner.out && obj.messageOwner.unread && (lastMessage == null || lastMessage.messageOwner.date < obj.messageOwner.date)) { lastMessage = obj; } } long uid; if (message.dialog_id != 0) { uid = message.dialog_id; } else { if (message.to_id.chat_id != 0) { uid = -message.to_id.chat_id; } else { if (message.to_id.user_id == UserConfig.clientUserId) { message.to_id.user_id = message.from_id; } uid = message.to_id.user_id; } } ArrayList<MessageObject> arr = messages.get(uid); if (arr == null) { arr = new ArrayList<MessageObject>(); messages.put(uid, arr); } arr.add(obj); } final MessageObject object = lastMessage; Utilities.RunOnUIThread(new Runnable() { @Override public void run() { for (HashMap.Entry<Long, ArrayList<MessageObject>> pair : messages .entrySet()) { Long key = pair.getKey(); ArrayList<MessageObject> value = pair.getValue(); updateInterfaceWithMessages(key, value); } NotificationCenter.Instance.postNotificationName(dialogsNeedReload); if (object != null) { showInAppNotification(object); } } }); MessagesStorage.Instance.storageQueue.postRunnable(new Runnable() { @Override public void run() { MessagesStorage.Instance.startTransaction(false); MessagesStorage.Instance.putMessages(res.new_messages, false, false); MessagesStorage.Instance.putUsersAndChats(res.users, res.chats, false, false); MessagesStorage.Instance.commitTransaction(false); } }); } if (res != null && !res.other_updates.isEmpty()) { processUpdateArray(res.other_updates, res.users, res.chats); } gettingDifference = false; if (res instanceof TLRPC.TL_updates_difference) { MessagesStorage.lastSeqValue = res.state.seq; MessagesStorage.lastDateValue = res.state.date; MessagesStorage.lastPtsValue = res.state.pts; MessagesStorage.lastQtsValue = res.state.qts; ConnectionsManager.Instance.connectionState = 0; processUpdatesQueue(true); } else if (res instanceof TLRPC.TL_updates_differenceSlice) { MessagesStorage.lastSeqValue = res.intermediate_state.seq; MessagesStorage.lastDateValue = res.intermediate_state.date; MessagesStorage.lastPtsValue = res.intermediate_state.pts; MessagesStorage.lastQtsValue = res.intermediate_state.qts; gettingDifferenceAgain = true; getDifference(); } else if (res instanceof TLRPC.TL_updates_differenceEmpty) { MessagesStorage.lastSeqValue = res.seq; MessagesStorage.lastDateValue = res.date; ConnectionsManager.Instance.connectionState = 0; processUpdatesQueue(true); } MessagesStorage.Instance.saveDiffParams(MessagesStorage.lastSeqValue, MessagesStorage.lastPtsValue, MessagesStorage.lastDateValue, MessagesStorage.lastQtsValue); FileLog.e("tmessages", "received difference with date = " + MessagesStorage.lastDateValue + " pts = " + MessagesStorage.lastPtsValue + " seq = " + MessagesStorage.lastSeqValue); FileLog.e("tmessages", "messages = " + res.new_messages.size() + " users = " + res.users.size() + " chats = " + res.chats.size() + " other updates = " + res.other_updates.size()); } }); } }); } else { gettingDifference = false; loadCurrentState(); FileLog.e("tmessages", "get difference error, don't know what to do :("); } } }, null, true, RPCRequest.RPCRequestClassGeneric); }
From source file:org.kuali.rice.krad.uif.element.GroupValidationMessages.java
/** * Adds dataAttributes that are appropriate for group level validationMessages data. * * <p>This data is used by the validation framework clientside. Some special handling at this level includes * retrieving the groups and fields generated by different collection layouts and handling page and fieldGroup * scenarios slightly differently due to the nature of how they are built out in the js.</p> * * @param parent component that is the parent of the validation messages *///from ww w.ja v a 2s. c o m protected void addValidationMessageDataAttributes(Component parent) { HashMap<String, Object> validationMessagesDataAttributes = new HashMap<String, Object>(); Map<String, Object> parentContext = parent.getContext(); Object parentContainer = parentContext == null ? null : parentContext.get(UifConstants.ContextVariableNames.PARENT); List<? extends Component> items = ((Container) parent).getItems(); boolean skipSections = false; boolean isTableCollection = false; // Handle the special CollectionGroup case by getting the StackedGroups and DataFields generated by them if (parent instanceof CollectionGroup && ((CollectionGroup) parent).getLayoutManager() instanceof StackedLayoutManager) { items = ((StackedLayoutManager) ((CollectionGroup) parent).getLayoutManager()).getStackedGroups(); } else if ((parent instanceof CollectionGroup && ((CollectionGroup) parent).getLayoutManager() instanceof TableLayoutManager) || parent instanceof LightTable) { // order is not needed so null items items = null; skipSections = true; isTableCollection = true; } List<String> sectionIds = new ArrayList<String>(); List<String> fieldOrder = new ArrayList<String>(); collectIdsFromItems(items, sectionIds, fieldOrder, skipSections); boolean pageLevel = false; boolean forceShow = false; boolean showPageSummaryHeader = true; if (parent instanceof PageGroup) { pageLevel = true; forceShow = true; parent.addDataAttribute(UifConstants.DataAttributes.SERVER_MESSAGES, Boolean.toString(GlobalVariables.getMessageMap().hasMessages())); if (this instanceof PageValidationMessages) { showPageSummaryHeader = ((PageValidationMessages) this).isShowPageSummaryHeader(); } } else if (parentContainer instanceof FieldGroup) { Map<String, String> parentFieldGroupDataAttributes = ((FieldGroup) parentContainer).getDataAttributes(); String role = parentFieldGroupDataAttributes == null ? null : parentFieldGroupDataAttributes.get(UifConstants.DataAttributes.ROLE); if (StringUtils.isNotBlank(role) && role.equals("detailsFieldGroup")) { forceShow = false; } else { //note this means container of the parent is a FieldGroup forceShow = true; } } boolean hasMessages = false; if (!this.getErrors().isEmpty() || !this.getWarnings().isEmpty() || !this.getInfos().isEmpty()) { hasMessages = true; } Map<String, String> dataDefaults = (Map<String, String>) (KRADServiceLocatorWeb.getDataDictionaryService() .getDictionaryBean("Uif-GroupValidationMessages-DataDefaults")); //add necessary data attributes to map //display related this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SUMMARIZE, true); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.DISPLAY_MESSAGES, this.isDisplayMessages()); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.CLOSEABLE, this.isCloseable()); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.COLLAPSE_FIELD_MESSAGES, collapseAdditionalFieldLinkMessages); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SHOW_PAGE_SUMMARY_HEADER, showPageSummaryHeader); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.DISPLAY_LABEL, displayFieldLabelWithMessages); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.DISPLAY_HEADER_SUMMARY, displayHeaderMessageSummary); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.IS_TABLE_COLLECTION, isTableCollection); //options this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.HAS_OWN_MESSAGES, hasMessages); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.PAGE_LEVEL, pageLevel); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.FORCE_SHOW, forceShow); //order related this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SECTIONS, sectionIds); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.ORDER, fieldOrder); //server messages this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SERVER_ERRORS, ScriptUtils.escapeHtml(this.getErrors())); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SERVER_WARNINGS, ScriptUtils.escapeHtml(this.getWarnings())); this.addValidationDataSettingsValue(validationMessagesDataAttributes, dataDefaults, UifConstants.DataAttributes.SERVER_INFO, ScriptUtils.escapeHtml(this.getInfos())); if (!validationMessagesDataAttributes.isEmpty()) { parent.addScriptDataAttribute(UifConstants.DataAttributes.VALIDATION_MESSAGES, ScriptUtils.translateValue(validationMessagesDataAttributes)); } }
From source file:net.daboross.bukkitdev.skywars.config.RandomChestConfiguration.java
private void load() throws IOException, InvalidConfigurationException, SkyConfigurationException { SkyStatic.debug("[RandomChests] Loading chests.yml"); Path chestsFile = plugin.getDataFolder().toPath().resolve("chests.yml"); if (!Files.exists(chestsFile)) { plugin.saveResource("chests.yml", true); }/*from w w w . j av a 2s . c o m*/ YamlConfiguration config = new YamlConfiguration(); config.load(chestsFile.toFile()); int version = config.getInt("version", 1); if (version > 1) { throw new InvalidConfigurationException("Future version in chests.yml!"); } config.set("version", 1); levels.clear(); HashMap<String, ChestLevel> incompleteLevels = new HashMap<>(); ConfigurationSection levelsSection = config.getConfigurationSection("levels"); ConfigurationSection itemsSection = config.getConfigurationSection("items"); if (levelsSection == null || itemsSection == null || levelsSection.getKeys(false).isEmpty() || itemsSection.getKeys(false).isEmpty()) { YamlConfiguration defaultConfig = new YamlConfiguration(); try (InputStream stream = plugin.getResourceAsStream("chests.yml"); Reader reader = new InputStreamReader(stream); BufferedReader bufferedReader = new BufferedReader(reader)) { defaultConfig.load(bufferedReader); } if (levelsSection == null || levelsSection.getKeys(false).isEmpty()) { levelsSection = defaultConfig.getConfigurationSection("levels"); config.set("levels", levelsSection); } if (itemsSection == null || itemsSection.getKeys(false).isEmpty()) { itemsSection = defaultConfig.getConfigurationSection("items"); config.set("items", itemsSection); } config.options().header(defaultConfig.options().header()); config.save(chestsFile.toFile()); } if (levelsSection == null) { plugin.getLogger().log(Level.WARNING, "Not loading chests.yml: no levels section found"); return; } if (itemsSection == null) { plugin.getLogger().log(Level.WARNING, "Not loading chests.yml: no items section found"); return; } for (String key : levelsSection.getKeys(false)) { if (levelsSection.isConfigurationSection(key)) { // is this bad? I'm not sure. it's a hack, for sure - but it does let us use getSetInt(), // and it allows us to display accurate paths in case of error. ConfigurationSection levelSection = levelsSection.getConfigurationSection(key); if (!levelSection.isInt("item-value")) { throw new SkyConfigurationException( "Invalid chests.yml: level `" + key + "` is missing item-value!"); } if (!levelSection.isInt("chance")) { throw new SkyConfigurationException( "Invalid chests.yml: level `" + key + "` is missing chance!"); } int itemValue = levelSection.getInt("item-value"); int chance = levelSection.getInt("chance"); incompleteLevels.put(key, new ChestLevel(key, itemValue, chance, null)); } else { throw new SkyConfigurationException( "Invalid chests.yml: non-map thing in levels: " + levelsSection.get(key)); } } for (String key : itemsSection.getKeys(false)) { if (itemsSection.isList(key)) { ChestLevel incompleteLevel = incompleteLevels.remove(key); if (incompleteLevel == null) { throw new SkyConfigurationException("Invalid chests.yml: level `" + key + "` has a section under items, but no section under levels!"); } List<?> objectList = itemsSection.getList(key); List<SkyKitItem> itemList = new ArrayList<>(objectList.size()); for (Object o : objectList) { if (o instanceof Map) { @SuppressWarnings("unchecked") SkyKitItem item = SkyKitDecoder.decodeItem((Map<String, Object>) o); itemList.add(item); } else if (o instanceof String) { String string = o.toString(); String materialString; int amount; if (!string.contains(",")) { materialString = string; amount = 1; } else { String[] split = string.split(",", 2); materialString = split[0]; try { amount = Integer.parseInt(split[1]); } catch (NumberFormatException ex) { throw new SkyConfigurationException( "Invalid amount number for item in chests.yml: not an integer: " + split[1]); } } Material material = Material.matchMaterial(materialString); if (material == null) { throw new SkyConfigurationException("Error in chests.yml: the type string '" + materialString + "' is invalid. Check https://dabo.guru/projects/skywars/configuring-kits for a list of valid material names (at the bottom of the page)."); } itemList.add(new SkyKitItemConfig(material, amount, null, null)); } else { throw new SkyConfigurationException( "Invalid thing in items list for level in chests.yml: " + o); } } if (itemList.isEmpty()) { throw new SkyConfigurationException( "Invalid chests.yml: level `" + key + "` items list is empty!"); } levels.add(new ChestLevel(key, incompleteLevel.itemValue, incompleteLevel.chance, itemList)); } else { throw new SkyConfigurationException( "Invalid chests.yml: non-list thing in items: " + itemsSection.get(key)); } } if (!incompleteLevels.isEmpty()) { if (incompleteLevels.size() == 1) { throw new SkyConfigurationException( "Invalid chests.yml: level " + incompleteLevels.keySet().iterator().next() + " has a section under levels, but no section under items!"); } else { throw new SkyConfigurationException( "Invalid chests.yml: multiple levels (" + new ArrayList<>(incompleteLevels.keySet()) + ") have sections under levels but no sections under items!"); } } }
From source file:com.cloud.hypervisor.vmware.resource.VmwareResource.java
protected StartAnswer execute(StartCommand cmd) { if (s_logger.isInfoEnabled()) { s_logger.info("Executing resource StartCommand: " + _gson.toJson(cmd)); }// ww w .j a v a 2 s . co m VirtualMachineTO vmSpec = cmd.getVirtualMachine(); boolean vmAlreadyExistsInVcenter = false; String existingVmName = null; VirtualMachineFileInfo existingVmFileInfo = null; VirtualMachineFileLayoutEx existingVmFileLayout = null; Pair<String, String> names = composeVmNames(vmSpec); String vmInternalCSName = names.first(); String vmNameOnVcenter = names.second(); String dataDiskController = vmSpec.getDetails().get(VmDetailConstants.DATA_DISK_CONTROLLER); String rootDiskController = vmSpec.getDetails().get(VmDetailConstants.ROOT_DISK_CONTROLLER); // If root disk controller is scsi, then data disk controller would also be scsi instead of using 'osdefault' // This helps avoid mix of different scsi subtype controllers in instance. if (DiskControllerType.lsilogic == DiskControllerType.getType(rootDiskController)) { dataDiskController = DiskControllerType.scsi.toString(); } // Validate the controller types dataDiskController = DiskControllerType.getType(dataDiskController).toString(); rootDiskController = DiskControllerType.getType(rootDiskController).toString(); if (DiskControllerType.getType(rootDiskController) == DiskControllerType.none) { throw new CloudRuntimeException("Invalid root disk controller detected : " + rootDiskController); } if (DiskControllerType.getType(dataDiskController) == DiskControllerType.none) { throw new CloudRuntimeException("Invalid data disk controller detected : " + dataDiskController); } Pair<String, String> controllerInfo = new Pair<String, String>(rootDiskController, dataDiskController); Boolean systemVm = vmSpec.getType().isUsedBySystem(); // Thus, vmInternalCSName always holds i-x-y, the cloudstack generated internal VM name. VmwareContext context = getServiceContext(); DatacenterMO dcMo = null; try { VmwareManager mgr = context.getStockObject(VmwareManager.CONTEXT_STOCK_NAME); VmwareHypervisorHost hyperHost = getHyperHost(context); dcMo = new DatacenterMO(hyperHost.getContext(), hyperHost.getHyperHostDatacenter()); // Validate VM name is unique in Datacenter VirtualMachineMO vmInVcenter = dcMo.checkIfVmAlreadyExistsInVcenter(vmNameOnVcenter, vmInternalCSName); if (vmInVcenter != null) { vmAlreadyExistsInVcenter = true; String msg = "VM with name: " + vmNameOnVcenter + " already exists in vCenter."; s_logger.error(msg); throw new Exception(msg); } String guestOsId = translateGuestOsIdentifier(vmSpec.getArch(), vmSpec.getOs(), vmSpec.getPlatformEmulator()).value(); DiskTO[] disks = validateDisks(vmSpec.getDisks()); assert (disks.length > 0); NicTO[] nics = vmSpec.getNics(); HashMap<String, Pair<ManagedObjectReference, DatastoreMO>> dataStoresDetails = inferDatastoreDetailsFromDiskInfo( hyperHost, context, disks, cmd); if ((dataStoresDetails == null) || (dataStoresDetails.isEmpty())) { String msg = "Unable to locate datastore details of the volumes to be attached"; s_logger.error(msg); throw new Exception(msg); } DatastoreMO dsRootVolumeIsOn = getDatastoreThatRootDiskIsOn(dataStoresDetails, disks); if (dsRootVolumeIsOn == null) { String msg = "Unable to locate datastore details of root volume"; s_logger.error(msg); throw new Exception(msg); } VirtualMachineDiskInfoBuilder diskInfoBuilder = null; VirtualMachineMO vmMo = hyperHost.findVmOnHyperHost(vmInternalCSName); DiskControllerType systemVmScsiControllerType = DiskControllerType.lsilogic; int firstScsiControllerBusNum = 0; int numScsiControllerForSystemVm = 1; boolean hasSnapshot = false; if (vmMo != null) { s_logger.info("VM " + vmInternalCSName + " already exists, tear down devices for reconfiguration"); if (getVmPowerState(vmMo) != PowerState.PowerOff) vmMo.safePowerOff(_shutdownWaitMs); // retrieve disk information before we tear down diskInfoBuilder = vmMo.getDiskInfoBuilder(); hasSnapshot = vmMo.hasSnapshot(); if (!hasSnapshot) vmMo.tearDownDevices(new Class<?>[] { VirtualDisk.class, VirtualEthernetCard.class }); else vmMo.tearDownDevices(new Class<?>[] { VirtualEthernetCard.class }); if (systemVm) { ensureScsiDiskControllers(vmMo, systemVmScsiControllerType.toString(), numScsiControllerForSystemVm, firstScsiControllerBusNum); } else { ensureDiskControllers(vmMo, controllerInfo); } } else { ManagedObjectReference morDc = hyperHost.getHyperHostDatacenter(); assert (morDc != null); vmMo = hyperHost.findVmOnPeerHyperHost(vmInternalCSName); if (vmMo != null) { if (s_logger.isInfoEnabled()) { s_logger.info("Found vm " + vmInternalCSName + " at other host, relocate to " + hyperHost.getHyperHostName()); } takeVmFromOtherHyperHost(hyperHost, vmInternalCSName); if (getVmPowerState(vmMo) != PowerState.PowerOff) vmMo.safePowerOff(_shutdownWaitMs); diskInfoBuilder = vmMo.getDiskInfoBuilder(); hasSnapshot = vmMo.hasSnapshot(); if (!hasSnapshot) vmMo.tearDownDevices(new Class<?>[] { VirtualDisk.class, VirtualEthernetCard.class }); else vmMo.tearDownDevices(new Class<?>[] { VirtualEthernetCard.class }); if (systemVm) { // System volumes doesn't require more than 1 SCSI controller as there is no requirement for data volumes. ensureScsiDiskControllers(vmMo, systemVmScsiControllerType.toString(), numScsiControllerForSystemVm, firstScsiControllerBusNum); } else { ensureDiskControllers(vmMo, controllerInfo); } } else { // If a VM with the same name is found in a different cluster in the DC, unregister the old VM and configure a new VM (cold-migration). VirtualMachineMO existingVmInDc = dcMo.findVm(vmInternalCSName); if (existingVmInDc != null) { s_logger.debug("Found VM: " + vmInternalCSName + " on a host in a different cluster. Unregistering the exisitng VM."); existingVmName = existingVmInDc.getName(); existingVmFileInfo = existingVmInDc.getFileInfo(); existingVmFileLayout = existingVmInDc.getFileLayout(); existingVmInDc.unregisterVm(); } Pair<ManagedObjectReference, DatastoreMO> rootDiskDataStoreDetails = null; for (DiskTO vol : disks) { if (vol.getType() == Volume.Type.ROOT) { Map<String, String> details = vol.getDetails(); boolean managed = false; if (details != null) { managed = Boolean.parseBoolean(details.get(DiskTO.MANAGED)); } if (managed) { String datastoreName = VmwareResource.getDatastoreName(details.get(DiskTO.IQN)); rootDiskDataStoreDetails = dataStoresDetails.get(datastoreName); } else { DataStoreTO primaryStore = vol.getData().getDataStore(); rootDiskDataStoreDetails = dataStoresDetails.get(primaryStore.getUuid()); } } } assert (vmSpec.getMinSpeed() != null) && (rootDiskDataStoreDetails != null); boolean vmFolderExists = rootDiskDataStoreDetails.second().folderExists( String.format("[%s]", rootDiskDataStoreDetails.second().getName()), vmNameOnVcenter); String vmxFileFullPath = dsRootVolumeIsOn.searchFileInSubFolders(vmNameOnVcenter + ".vmx", false); if (vmFolderExists && vmxFileFullPath != null) { // VM can be registered only if .vmx is present. registerVm(vmNameOnVcenter, dsRootVolumeIsOn); vmMo = hyperHost.findVmOnHyperHost(vmInternalCSName); tearDownVm(vmMo); } else if (!hyperHost.createBlankVm(vmNameOnVcenter, vmInternalCSName, vmSpec.getCpus(), vmSpec.getMaxSpeed().intValue(), getReservedCpuMHZ(vmSpec), vmSpec.getLimitCpuUse(), (int) (vmSpec.getMaxRam() / (1024 * 1024)), getReservedMemoryMb(vmSpec), guestOsId, rootDiskDataStoreDetails.first(), false, controllerInfo, systemVm)) { throw new Exception("Failed to create VM. vmName: " + vmInternalCSName); } } vmMo = hyperHost.findVmOnHyperHost(vmInternalCSName); if (vmMo == null) { throw new Exception( "Failed to find the newly create or relocated VM. vmName: " + vmInternalCSName); } } int totalChangeDevices = disks.length + nics.length; DiskTO volIso = null; if (vmSpec.getType() != VirtualMachine.Type.User) { // system VM needs a patch ISO totalChangeDevices++; } else { volIso = getIsoDiskTO(disks); if (volIso == null) totalChangeDevices++; } VirtualMachineConfigSpec vmConfigSpec = new VirtualMachineConfigSpec(); VmwareHelper.setBasicVmConfig(vmConfigSpec, vmSpec.getCpus(), vmSpec.getMaxSpeed(), getReservedCpuMHZ(vmSpec), (int) (vmSpec.getMaxRam() / (1024 * 1024)), getReservedMemoryMb(vmSpec), guestOsId, vmSpec.getLimitCpuUse()); // Check for multi-cores per socket settings int numCoresPerSocket = 1; String coresPerSocket = vmSpec.getDetails().get("cpu.corespersocket"); if (coresPerSocket != null) { String apiVersion = HypervisorHostHelper.getVcenterApiVersion(vmMo.getContext()); // Property 'numCoresPerSocket' is supported since vSphere API 5.0 if (apiVersion.compareTo("5.0") >= 0) { numCoresPerSocket = NumbersUtil.parseInt(coresPerSocket, 1); vmConfigSpec.setNumCoresPerSocket(numCoresPerSocket); } } // Check for hotadd settings vmConfigSpec.setMemoryHotAddEnabled(vmMo.isMemoryHotAddSupported(guestOsId)); String hostApiVersion = ((HostMO) hyperHost).getHostAboutInfo().getApiVersion(); if (numCoresPerSocket > 1 && hostApiVersion.compareTo("5.0") < 0) { s_logger.warn( "Dynamic scaling of CPU is not supported for Virtual Machines with multi-core vCPUs in case of ESXi hosts 4.1 and prior. Hence CpuHotAdd will not be" + " enabled for Virtual Machine: " + vmInternalCSName); vmConfigSpec.setCpuHotAddEnabled(false); } else { vmConfigSpec.setCpuHotAddEnabled(vmMo.isCpuHotAddSupported(guestOsId)); } configNestedHVSupport(vmMo, vmSpec, vmConfigSpec); VirtualDeviceConfigSpec[] deviceConfigSpecArray = new VirtualDeviceConfigSpec[totalChangeDevices]; int i = 0; int ideUnitNumber = 0; int scsiUnitNumber = 0; int nicUnitNumber = 0; int ideControllerKey = vmMo.getIDEDeviceControllerKey(); int scsiControllerKey = vmMo.getGenericScsiDeviceControllerKeyNoException(); int controllerKey; // // Setup ISO device // // prepare systemvm patch ISO if (vmSpec.getType() != VirtualMachine.Type.User) { // attach ISO (for patching of system VM) Pair<String, Long> secStoreUrlAndId = mgr.getSecondaryStorageStoreUrlAndId(Long.parseLong(_dcId)); String secStoreUrl = secStoreUrlAndId.first(); Long secStoreId = secStoreUrlAndId.second(); if (secStoreUrl == null) { String msg = "secondary storage for dc " + _dcId + " is not ready yet?"; throw new Exception(msg); } mgr.prepareSecondaryStorageStore(secStoreUrl, secStoreId); ManagedObjectReference morSecDs = prepareSecondaryDatastoreOnHost(secStoreUrl); if (morSecDs == null) { String msg = "Failed to prepare secondary storage on host, secondary store url: " + secStoreUrl; throw new Exception(msg); } DatastoreMO secDsMo = new DatastoreMO(hyperHost.getContext(), morSecDs); deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec(); Pair<VirtualDevice, Boolean> isoInfo = VmwareHelper.prepareIsoDevice(vmMo, String.format("[%s] systemvm/%s", secDsMo.getName(), mgr.getSystemVMIsoFileNameOnDatastore()), secDsMo.getMor(), true, true, ideUnitNumber++, i + 1); deviceConfigSpecArray[i].setDevice(isoInfo.first()); if (isoInfo.second()) { if (s_logger.isDebugEnabled()) s_logger.debug("Prepare ISO volume at new device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.ADD); } else { if (s_logger.isDebugEnabled()) s_logger.debug("Prepare ISO volume at existing device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.EDIT); } } else { // Note: we will always plug a CDROM device if (volIso != null) { TemplateObjectTO iso = (TemplateObjectTO) volIso.getData(); if (iso.getPath() != null && !iso.getPath().isEmpty()) { DataStoreTO imageStore = iso.getDataStore(); if (!(imageStore instanceof NfsTO)) { s_logger.debug("unsupported protocol"); throw new Exception("unsupported protocol"); } NfsTO nfsImageStore = (NfsTO) imageStore; String isoPath = nfsImageStore.getUrl() + File.separator + iso.getPath(); Pair<String, ManagedObjectReference> isoDatastoreInfo = getIsoDatastoreInfo(hyperHost, isoPath); assert (isoDatastoreInfo != null); assert (isoDatastoreInfo.second() != null); deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec(); Pair<VirtualDevice, Boolean> isoInfo = VmwareHelper.prepareIsoDevice(vmMo, isoDatastoreInfo.first(), isoDatastoreInfo.second(), true, true, ideUnitNumber++, i + 1); deviceConfigSpecArray[i].setDevice(isoInfo.first()); if (isoInfo.second()) { if (s_logger.isDebugEnabled()) s_logger.debug("Prepare ISO volume at new device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.ADD); } else { if (s_logger.isDebugEnabled()) s_logger.debug( "Prepare ISO volume at existing device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.EDIT); } } } else { deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec(); Pair<VirtualDevice, Boolean> isoInfo = VmwareHelper.prepareIsoDevice(vmMo, null, null, true, true, ideUnitNumber++, i + 1); deviceConfigSpecArray[i].setDevice(isoInfo.first()); if (isoInfo.second()) { if (s_logger.isDebugEnabled()) s_logger.debug( "Prepare ISO volume at existing device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.ADD); } else { if (s_logger.isDebugEnabled()) s_logger.debug( "Prepare ISO volume at existing device " + _gson.toJson(isoInfo.first())); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.EDIT); } } } i++; // // Setup ROOT/DATA disk devices // DiskTO[] sortedDisks = sortVolumesByDeviceId(disks); for (DiskTO vol : sortedDisks) { if (vol.getType() == Volume.Type.ISO) continue; VirtualMachineDiskInfo matchingExistingDisk = getMatchingExistingDisk(diskInfoBuilder, vol, hyperHost, context); controllerKey = getDiskController(matchingExistingDisk, vol, vmSpec, ideControllerKey, scsiControllerKey); String diskController = getDiskController(vmMo, matchingExistingDisk, vol, new Pair<String, String>(rootDiskController, dataDiskController)); if (DiskControllerType.getType(diskController) == DiskControllerType.osdefault) { diskController = vmMo.getRecommendedDiskController(null); } if (DiskControllerType.getType(diskController) == DiskControllerType.ide) { controllerKey = vmMo.getIDEControllerKey(ideUnitNumber); if (vol.getType() == Volume.Type.DATADISK) { // Could be result of flip due to user configured setting or "osdefault" for data disks // Ensure maximum of 2 data volumes over IDE controller, 3 includeing root volume if (vmMo.getNumberOfVirtualDisks() > 3) { throw new CloudRuntimeException("Found more than 3 virtual disks attached to this VM [" + vmMo.getVmName() + "]. Unable to implement the disks over " + diskController + " controller, as maximum number of devices supported over IDE controller is 4 includeing CDROM device."); } } } else { controllerKey = vmMo.getScsiDiskControllerKeyNoException(diskController); if (controllerKey == -1) { // This may happen for ROOT legacy VMs which doesn't have recommended disk controller when global configuration parameter 'vmware.root.disk.controller' is set to "osdefault" // Retrieve existing controller and use. Ternary<Integer, Integer, DiskControllerType> vmScsiControllerInfo = vmMo .getScsiControllerInfo(); DiskControllerType existingControllerType = vmScsiControllerInfo.third(); controllerKey = vmMo.getScsiDiskControllerKeyNoException(existingControllerType.toString()); } } if (!hasSnapshot) { deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec(); VolumeObjectTO volumeTO = (VolumeObjectTO) vol.getData(); DataStoreTO primaryStore = volumeTO.getDataStore(); Map<String, String> details = vol.getDetails(); boolean managed = false; String iScsiName = null; if (details != null) { managed = Boolean.parseBoolean(details.get(DiskTO.MANAGED)); iScsiName = details.get(DiskTO.IQN); } // if the storage is managed, iScsiName should not be null String datastoreName = managed ? VmwareResource.getDatastoreName(iScsiName) : primaryStore.getUuid(); Pair<ManagedObjectReference, DatastoreMO> volumeDsDetails = dataStoresDetails .get(datastoreName); assert (volumeDsDetails != null); String[] diskChain = syncDiskChain(dcMo, vmMo, vmSpec, vol, matchingExistingDisk, dataStoresDetails); if (controllerKey == scsiControllerKey && VmwareHelper.isReservedScsiDeviceNumber(scsiUnitNumber)) scsiUnitNumber++; VirtualDevice device = VmwareHelper.prepareDiskDevice(vmMo, null, controllerKey, diskChain, volumeDsDetails.first(), (controllerKey == vmMo.getIDEControllerKey(ideUnitNumber)) ? ((ideUnitNumber++) % VmwareHelper.MAX_IDE_CONTROLLER_COUNT) : scsiUnitNumber++, i + 1); deviceConfigSpecArray[i].setDevice(device); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.ADD); if (s_logger.isDebugEnabled()) s_logger.debug("Prepare volume at new device " + _gson.toJson(device)); i++; } else { if (controllerKey == scsiControllerKey && VmwareHelper.isReservedScsiDeviceNumber(scsiUnitNumber)) scsiUnitNumber++; if (controllerKey == vmMo.getIDEControllerKey(ideUnitNumber)) ideUnitNumber++; else scsiUnitNumber++; } } // // Setup NIC devices // VirtualDevice nic; int nicMask = 0; int nicCount = 0; VirtualEthernetCardType nicDeviceType = VirtualEthernetCardType .valueOf(vmSpec.getDetails().get(VmDetailConstants.NIC_ADAPTER)); if (s_logger.isDebugEnabled()) s_logger.debug( "VM " + vmInternalCSName + " will be started with NIC device type: " + nicDeviceType); NiciraNvpApiVersion.logNiciraApiVersion(); Map<String, String> nicUuidToDvSwitchUuid = new HashMap<String, String>(); for (NicTO nicTo : sortNicsByDeviceId(nics)) { s_logger.info("Prepare NIC device based on NicTO: " + _gson.toJson(nicTo)); boolean configureVServiceInNexus = (nicTo.getType() == TrafficType.Guest) && (vmSpec.getDetails().containsKey("ConfigureVServiceInNexus")); VirtualMachine.Type vmType = cmd.getVirtualMachine().getType(); Pair<ManagedObjectReference, String> networkInfo = prepareNetworkFromNicInfo(vmMo.getRunningHost(), nicTo, configureVServiceInNexus, vmType); if ((nicTo.getBroadcastType() != BroadcastDomainType.Lswitch) || (nicTo.getBroadcastType() == BroadcastDomainType.Lswitch && NiciraNvpApiVersion.isApiVersionLowerThan("4.2"))) { if (VmwareHelper.isDvPortGroup(networkInfo.first())) { String dvSwitchUuid; ManagedObjectReference dcMor = hyperHost.getHyperHostDatacenter(); DatacenterMO dataCenterMo = new DatacenterMO(context, dcMor); ManagedObjectReference dvsMor = dataCenterMo.getDvSwitchMor(networkInfo.first()); dvSwitchUuid = dataCenterMo.getDvSwitchUuid(dvsMor); s_logger.info("Preparing NIC device on dvSwitch : " + dvSwitchUuid); nic = VmwareHelper.prepareDvNicDevice(vmMo, networkInfo.first(), nicDeviceType, networkInfo.second(), dvSwitchUuid, nicTo.getMac(), nicUnitNumber++, i + 1, true, true); if (nicTo.getUuid() != null) { nicUuidToDvSwitchUuid.put(nicTo.getUuid(), dvSwitchUuid); } } else { s_logger.info("Preparing NIC device on network " + networkInfo.second()); nic = VmwareHelper.prepareNicDevice(vmMo, networkInfo.first(), nicDeviceType, networkInfo.second(), nicTo.getMac(), nicUnitNumber++, i + 1, true, true); } } else { //if NSX API VERSION >= 4.2, connect to br-int (nsx.network), do not create portgroup else previous behaviour nic = VmwareHelper.prepareNicOpaque(vmMo, nicDeviceType, networkInfo.second(), nicTo.getMac(), nicUnitNumber++, i + 1, true, true); } deviceConfigSpecArray[i] = new VirtualDeviceConfigSpec(); deviceConfigSpecArray[i].setDevice(nic); deviceConfigSpecArray[i].setOperation(VirtualDeviceConfigSpecOperation.ADD); if (s_logger.isDebugEnabled()) s_logger.debug("Prepare NIC at new device " + _gson.toJson(deviceConfigSpecArray[i])); // this is really a hacking for DomR, upon DomR startup, we will reset all the NIC allocation after eth3 if (nicCount < 3) nicMask |= (1 << nicCount); i++; nicCount++; } for (int j = 0; j < i; j++) vmConfigSpec.getDeviceChange().add(deviceConfigSpecArray[j]); // // Setup VM options // // pass boot arguments through machine.id & perform customized options to VMX ArrayList<OptionValue> extraOptions = new ArrayList<OptionValue>(); configBasicExtraOption(extraOptions, vmSpec); configNvpExtraOption(extraOptions, vmSpec, nicUuidToDvSwitchUuid); configCustomExtraOption(extraOptions, vmSpec); // config VNC String keyboardLayout = null; if (vmSpec.getDetails() != null) keyboardLayout = vmSpec.getDetails().get(VmDetailConstants.KEYBOARD); vmConfigSpec.getExtraConfig() .addAll(Arrays.asList(configureVnc(extraOptions.toArray(new OptionValue[0]), hyperHost, vmInternalCSName, vmSpec.getVncPassword(), keyboardLayout))); // // Configure VM // if (!vmMo.configureVm(vmConfigSpec)) { throw new Exception("Failed to configure VM before start. vmName: " + vmInternalCSName); } // // Post Configuration // vmMo.setCustomFieldValue(CustomFieldConstants.CLOUD_NIC_MASK, String.valueOf(nicMask)); postNvpConfigBeforeStart(vmMo, vmSpec); Map<String, String> iqnToPath = new HashMap<String, String>(); postDiskConfigBeforeStart(vmMo, vmSpec, sortedDisks, ideControllerKey, scsiControllerKey, iqnToPath, hyperHost, context); postVideoCardMemoryConfigBeforeStart(vmMo, vmSpec); // // Power-on VM // if (!vmMo.powerOn()) { throw new Exception( "Failed to start VM. vmName: " + vmInternalCSName + " with hostname " + vmNameOnVcenter); } StartAnswer startAnswer = new StartAnswer(cmd); startAnswer.setIqnToPath(iqnToPath); // Since VM was successfully powered-on, if there was an existing VM in a different cluster that was unregistered, delete all the files associated with it. if (existingVmName != null && existingVmFileLayout != null) { deleteUnregisteredVmFiles(existingVmFileLayout, dcMo, true); } return startAnswer; } catch (Throwable e) { if (e instanceof RemoteException) { s_logger.warn("Encounter remote exception to vCenter, invalidate VMware session context"); invalidateServiceContext(); } String msg = "StartCommand failed due to " + VmwareHelper.getExceptionMessage(e); s_logger.warn(msg, e); StartAnswer startAnswer = new StartAnswer(cmd, msg); if (vmAlreadyExistsInVcenter) { startAnswer.setContextParam("stopRetry", "true"); } // Since VM start failed, if there was an existing VM in a different cluster that was unregistered, register it back. if (existingVmName != null && existingVmFileInfo != null) { s_logger.debug("Since VM start failed, registering back an existing VM: " + existingVmName + " that was unregistered"); try { DatastoreFile fileInDatastore = new DatastoreFile(existingVmFileInfo.getVmPathName()); DatastoreMO existingVmDsMo = new DatastoreMO(dcMo.getContext(), dcMo.findDatastore(fileInDatastore.getDatastoreName())); registerVm(existingVmName, existingVmDsMo); } catch (Exception ex) { String message = "Failed to register an existing VM: " + existingVmName + " due to " + VmwareHelper.getExceptionMessage(ex); s_logger.warn(message, ex); } } return startAnswer; } finally { } }
From source file:cms.service.app.ServiceController.java
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HashMap<String, String> userdata; AccessToken access = null;// w w w. j a v a 2 s.c om AccessToken regaccess = null; RequestDispatcher rd; String strNextPage = null; boolean isvalidtoken = false; boolean subscription = false; String reguser = "registration"; String regpassword = "reg$56*123"; String baseurl = request.getRequestURL().toString().split("/service")[0]; String remotehost = request.getRemoteHost(); String remoteaddress = request.getRemoteAddr(); String username = request.getParameter("username"); String password = request.getParameter("password"); String useraction = request.getParameter("useraction"); String adminuser = request.getParameter("adminuser"); String usertoken = request.getParameter("usertoken"); String servicekey = request.getParameter("servicekey"); String amount = request.getParameter("amount"); String objid = request.getParameter("refobjid"); String description = request.getParameter("description"); String leadactions = "campaign whitepapers contactus training"; String sendreference = request.getParameter("sendreference"); ; Date date = new Date(); //set remote address request.setCharacterEncoding("UTF-8"); /*if(custom!=null&&!custom.equals("")){ custom="dan@softlean.com#-1624640437#127.0.0.1"; item_name="xyx"; item_number="899"; } subscription=(custom!=null&&!custom.equals(""))?true:false; logger.info(date.toString()+":-subscription="+subscription+" custom="+custom); if(subscription){ custret=custom.split("-"); if(custret.length==6) username=custret[3]; usertoken=custret[4]; tu.updatePayment(custret); } */ //first verify remote client whether the request is from same client if (!tu.isEmptyValue(servicekey)) { try { String val = new String(Base64Util.decode(servicekey.getBytes())); logger.info("servicekey=" + val); if (!tu.isEmptyValue(val)) { String[] items = val.split(CONST.IPSEPERATOR); if (items.length >= 2) { username = items[0]; password = items[1]; } if (items.length == 3) { useraction = items[2]; if (useraction.equalsIgnoreCase("campaign") && tu.isEmptyValue(sendreference)) { sendreference = "2"; } } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } //logger.info("\n"+date.toString()+":-username="+username+ " password="+password+" useraction="+useraction); if (CONST.GENERATE_LOG) { logger.info("\n" + date.toString() + ":-URI=" + request.getRequestURI()); logger.info("\n" + date.toString() + ":-Request Remote address=" + remoteaddress + " Remote Host=" + remotehost); } //Identify the current page as login page if (!tu.isEmptyValue(username) && !tu.isEmptyValue(password)) { //Do not check license for hosting solution access = m_service.verifyLogin(username, password, remoteaddress); } else { regaccess = m_service.verifyLogin(reguser, regpassword, remoteaddress); } if (access != null && tu.isEmptyValue(useraction)) { logger.info("\n" + date.toString() + " Loged in User:=" + username + " " + ":-Assigned Token=" + access.getToken() + ":-Assigned Modules=" + access.getModules()); //access token is appended with Client IP in the indexpage strNextPage = "/src/index.jsp?username=" + username + "&firstname=" + access.getFirstname() + "&usertoken=" + access.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + username + "&baseurl=" + baseurl + "&modules=" + access.getModules(); } else if (access != null && !tu.isEmptyValue(useraction) && leadactions.toLowerCase().contains(useraction.toLowerCase())) { strNextPage = "/src/campaign/" + useraction + ".jsp?username=" + username + "&firstname=" + access.getFirstname() + "&usertoken=" + access.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + username + "&baseurl=" + baseurl + "&campaignid=" + objid + "&sendreference=" + sendreference; //logger.info("\n"+date.toString()+"strNextPage:="+strNextPage); //}else if(access!=null && !tu.isEmptyValue(useraction) && useraction.equalsIgnoreCase("whitepapers")){ //strNextPage="/src/campaign/white_paper.jsp?username="+username+"&firstname="+access.getFirstname()+"&usertoken="+access.getToken()+CONST.IPSEPERATOR+remoteaddress+CONST.IPSEPERATOR+username+"&baseurl=" // +baseurl+"&campaignid="+objid; //logger.info("\n"+date.toString()+"strNextPage:="+strNextPage); } else if (subscription) { userdata = m_service.verifyUserToken(usertoken); isvalidtoken = usertoken != null && !usertoken.equals("") && userdata != null; if (isvalidtoken) { strNextPage = "/src/index.jsp?username=" + username + "&usertoken=" + usertoken + "&baseurl=" + baseurl; } } else if (!tu.isEmptyValue(useraction) && useraction.equalsIgnoreCase("missingpassword")) { strNextPage = "/src/password.jsp?token=" + regaccess.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + reguser + "&baseurl=" + baseurl; } else if (!tu.isEmptyValue(useraction) && useraction.equalsIgnoreCase("demoregistration")) { strNextPage = "/src/demologin.jsp?token=" + regaccess.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + reguser + "&baseurl=" + baseurl; } else if (!tu.isEmptyValue(adminuser) && adminuser.equals("sa")) { strNextPage = "/src/admin.jsp?token=" + regaccess.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + reguser + "&baseurl=" + baseurl; } else if (!tu.isEmptyValue(amount) && !tu.isEmptyValue(objid)) { userdata = m_service.verifyUserToken(usertoken); if (userdata != null && !userdata.isEmpty()) { strNextPage = "/src/java_sim/payment.jsp?email=" + username + "&objid=" + objid + "&amount=" + amount + "&description=" + description; } } else if (regaccess != null) { strNextPage = "/src/login.jsp?token=" + regaccess.getToken() + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + reguser + "&baseurl=" + baseurl; } else { strNextPage = "/src/error.jsp?token=null" + CONST.IPSEPERATOR + remoteaddress + CONST.IPSEPERATOR + reguser + "&baseurl=" + baseurl; } if (CONST.GENERATE_LOG) { logger.info("\n" + date.toString() + ":-Mapped Filename : " + strNextPage); } if (!strNextPage.equals("")) { rd = m_autoContext.getRequestDispatcher(strNextPage); // Forward the request to the target page try { if (rd != null) { rd.forward(request, response); } } catch (Exception e) { logger.info("ControllerServlet.doPost(): error in rd.forward"); e.printStackTrace(); } } else { // This should be logged. logger.info("Next Page is null"); super.doPost(request, response); } }
From source file:pt.ua.dicoogle.server.web.management.Services.java
/** * Performs a certain action to a service or plugin. * * @param action the action to perform./*w w w.j a v a 2 s .c o m*/ * @param svcName the name of the service or plugin to perform the action * at. * @param initStart the init-start param value. * @param port the set-port param value. * @param advSettings a HashMap with all the advanced settings and their * values. * @return an int value indicating what was wrong with the call (see * Services.RES_* for values/name pairs) */ @Deprecated public int performAction(int action, String svcName, boolean initStart, int port, HashMap<String, String[]> advSettings) { if ((svcName == null) || svcName.trim().isEmpty()) { return RES_NO_SERVICE_NAME; } try { switch (action) { case SVC_START: if (svcName.equalsIgnoreCase(webServerName)) { svcs.startWebServer(); return RES_OK; } else { if (svcName.equalsIgnoreCase(webServicesName)) { svcs.startWebServices(); return RES_OK; } else { if (svcName.equalsIgnoreCase(storageName)) { svcs.startStorage(); return RES_OK; } else { if (svcName.equalsIgnoreCase(queryRetrieveName)) { svcs.startQueryRetrieve(); return RES_OK; } else { //TODO: DELETED /*for (String plug : plgs.getPluginsNames()) { if (plug.equalsIgnoreCase(svcName)) { svcs.startPlugin(svcName); return RES_OK; } }*/ return RES_INVALID_SERVICE_NAME; } } } } case SVC_STOP: if (svcName.equalsIgnoreCase(webServerName)) { svcs.stopWebServer(); return RES_OK; } else { if (svcName.equalsIgnoreCase(webServicesName)) { svcs.stopWebServices(); return RES_OK; } else { if (svcName.equalsIgnoreCase(storageName)) { svcs.stopStorage(); return RES_OK; } else { if (svcName.equalsIgnoreCase(queryRetrieveName)) { svcs.stopQueryRetrieve(); return RES_OK; } else { //TODO: DELETED /*for (String plug : plgs.getPluginsNames()) { if (svcName.equalsIgnoreCase(plug)) { svcs.stopPlugin(svcName); return RES_OK; } }*/ return RES_INVALID_SERVICE_NAME; } } } } case SVC_INIT_START: if (svcName.equalsIgnoreCase(webServerName)) { setWebServerStart(initStart); return RES_OK; } else { if (svcName.equalsIgnoreCase(webServicesName)) { setWebServicesStart(initStart); return RES_OK; } else { if (svcName.equalsIgnoreCase(storageName)) { setStorageStart(initStart); return RES_OK; } else { if (svcName.equalsIgnoreCase(queryRetrieveName)) { setQueryRetrieveStart(initStart); return RES_OK; } else { //TODO: DELETED /*for (String plug : plgs.getPluginsNames()) { if (svcName.equalsIgnoreCase(plug)) { cfgs.setAutoStartPlugin(svcName, initStart); return RES_OK; } }*/ return RES_INVALID_SERVICE_NAME; } } } } case SVC_SET_PORT: { if ((port < 0) || (port > 65535)) { return RES_INVALID_ACTION_PARAMETER; } if (svcName.equalsIgnoreCase(webServerName)) { setWebServerPort(port); return RES_OK; } else { if (svcName.equalsIgnoreCase(webServicesName)) { setWebServicesPort(port); return RES_OK; } else { if (svcName.equalsIgnoreCase(storageName)) { setStoragePort(port); return RES_OK; } else { if (svcName.equalsIgnoreCase(queryRetrieveName)) { setQueryRetrievePort(port); return RES_OK; } else { if (svcName.equalsIgnoreCase(remoteGUIName)) { setRemoteGUIPort(port); return RES_OK; } else { // TODO for plugins return RES_INVALID_SERVICE_NAME; } } } } } } case SVC_SET_SETTINGS: if (advSettings == null || advSettings.isEmpty()) { return RES_INVALID_ACTION_PARAMETER; } else { if (svcName.equalsIgnoreCase(storageName)) { HashMap<String, Object> settings = cfgs.getStorageSettings(); processAdvancedSettings(settings, advSettings); // try to apply the settings if (cfgs.tryStorageSettings(settings)) { cfgs.setStorageSettings(settings); saveSettings(); return RES_OK; } else { return RES_INVALID_ACTION_PARAMETER; } } else { if (svcName.equalsIgnoreCase(queryRetrieveName)) { HashMap<String, Object> settings = cfgs.getQueryRetrieveSettings(); processAdvancedSettings(settings, advSettings); // try to apply the settings if (cfgs.tryQueryRetrieveSettings(settings)) { cfgs.setQueryRetrieveSettings(settings); saveSettings(); return RES_OK; } else { return RES_INVALID_ACTION_PARAMETER; } } else { if (svcName.equalsIgnoreCase(remoteGUIName)) { HashMap<String, Object> settings = cfgs.getRGUISettings(); processAdvancedSettings(settings, advSettings); // try to apply the settings if (cfgs.tryRGUISettings(settings)) { cfgs.setRGUISettings(settings); saveSettings(); return RES_OK; } else { return RES_INVALID_ACTION_PARAMETER; } } else { if (plgs.hasAdvancedSettings(svcName)) { //TODO: DELETED //HashMap<String, Object> settings = plgs.getAdvancedSettings(svcName); //processAdvancedSettings(settings, advSettings); // try to apply the settings if (/*plgs.trySettings(svcName, settings)*/true) { //plgs.setSettings(svcName, settings); //plgs.saveSettings(); return RES_OK; } else { return RES_INVALID_ACTION_PARAMETER; } } return RES_INVALID_SERVICE_NAME; } } } } case SVC_NO_ACTION: default: return RES_INVALID_ACTION; } } catch (IOException ex) { return RES_WARNING; } }