List of usage examples for java.util GregorianCalendar getTime
public final Date getTime()
Date
object representing this Calendar
's time value (millisecond offset from the Epoch"). From source file:edu.umm.radonc.ca_dash.model.TxInstanceFacade.java
public TreeMap<Date, SynchronizedDescriptiveStatistics> getMonthlySummaryStats(Date startDate, Date endDate, Long hospitalser, String filter, boolean includeWeekends, boolean ptflag, boolean scheduledFlag) { Calendar cal = new GregorianCalendar(); TreeMap<Date, SynchronizedDescriptiveStatistics> retval = new TreeMap<>(); GregorianCalendar oc = new GregorianCalendar(); List<Object[]> events; events = getDailyCounts(startDate, endDate, hospitalser, filter, false, ptflag, scheduledFlag); cal.setTime(startDate);/* w ww . ja v a 2 s. c o m*/ int mo = cal.get(Calendar.MONTH); int yr = cal.get(Calendar.YEAR); int currYr = yr; int currMo = mo; int prevMo = -1; int prevYr = -1; SynchronizedDescriptiveStatistics currStats = new SynchronizedDescriptiveStatistics(); int i = 0; while (cal.getTime().before(endDate) && i < events.size()) { Object[] event = events.get(i); Date d = (Date) event[0]; Long count = (Long) event[1]; prevMo = currMo; prevYr = currYr; cal.setTime(d); mo = cal.get(Calendar.MONTH); yr = cal.get(Calendar.YEAR); currMo = mo; currYr = yr; if (prevMo != currMo || prevYr != currYr) { oc.set(Calendar.MONTH, prevMo); oc.set(Calendar.YEAR, prevYr); oc.set(Calendar.DAY_OF_MONTH, 1); retval.put(oc.getTime(), currStats); currStats = new SynchronizedDescriptiveStatistics(); } currStats.addValue(count); i++; } oc.set(Calendar.MONTH, currMo); oc.set(Calendar.YEAR, currYr); oc.set(Calendar.DAY_OF_MONTH, 1); retval.put(oc.getTime(), currStats); return retval; }
From source file:com.aerospike.examples.timeseries.TimeSeriesManipulator.java
private void retrieveResult(String[] ticker, Set<Long> dateList) throws ParseException { // TODO Auto-generated method stub Record[] records;//www . ja v a 2 s . c o m String pk; int size = dateList.size(); int numTickers = 0; if (ticker != null) numTickers = ticker.length; Key[] keys = new Key[size]; Long count = new Long(0); Double sum; Double startVal; Double endVal; GregorianCalendar cal = new GregorianCalendar(); // Random rand = new Random(); // long randomNum = 0; // long overallRndNum = 0 + rand.nextInt((1000000 - 0) + 1); long currTime = GregorianCalendar.getInstance().getTimeInMillis(); Key summaryKey = new Key("test", "overallsummary", currTime); String tksummKey = null; for (int j = 0; j < numTickers; j++) { boolean firstRec = false; count = new Long(0); sum = new Double(0); startVal = new Double(0); endVal = new Double(0); // randomNum = 0 + rand.nextInt((1000000 - 0) + 1); Key tsKey = null; //Key tsKey = new Key("test", "tickersummary", randomNum); for (int i = 0; i < size; i++) { Object[] dateArr = dateList.toArray(); Long token = (Long) dateArr[i]; pk = ticker[j] + token; tksummKey = ticker[j] + "Summary" + currTime; tsKey = new Key("test", "tickersummary", tksummKey); keys[i] = new Key("test", "timeseries", pk); cal.setTimeInMillis(token); String formattedDate = dateOp.dateFormatter(cal.getTime()); Record record = client.operate(wPolicy, keys[i], MapOperation.getByRank("stock", -1, MapReturnType.VALUE), MapOperation.getByRank("stock", -1, MapReturnType.INDEX), MapOperation.getByRank("stock", 0, MapReturnType.VALUE), MapOperation.getByRank("stock", 0, MapReturnType.INDEX), MapOperation.getByIndex("stock", 0, MapReturnType.VALUE), MapOperation.getByIndex("stock", -1, MapReturnType.VALUE), Operation.get("sum"), MapOperation.size("stock")); if (record != null) { ArrayList<Double> outList = (ArrayList<Double>) record.getList("stock"); sum = sum + (Double) record.getValue("sum"); Object countTemp = outList.get(6); count = count + (Long) countTemp; if (!firstRec) { startVal = outList.get(4); firstRec = true; } endVal = outList.get(5); Record recMax = client.operate(wPolicy, tsKey, MapOperation.put(mPolicy, "max", Value.get(formattedDate), Value.get(outList.get(0))), MapOperation.put(mPolicy, "min", Value.get(formattedDate), Value.get(outList.get(2)))); String maxIndex = dateOp.getTimeStamp(outList.get(1)); String minIndex = dateOp.getTimeStamp(outList.get(3)); System.out.println( "Reading Data for " + formattedDate + " with Primary Key: " + pk + "\n\t: MaxValue: " + Double.parseDouble(new DecimalFormat("##.##").format(outList.get(0))) + " Time of Day: " + maxIndex + "\n\t: MinValue: " + Double.parseDouble(new DecimalFormat("##.##").format(outList.get(2))) + " Time of Day: " + minIndex); } } summaryPrint(tsKey, sum, count, startVal, endVal, tksummKey, ticker[j]); double difference = endVal - startVal; Record recSumm = client.operate(wPolicy, summaryKey, MapOperation.put(mPolicy, "difference", Value.get(ticker[j]), Value.get(difference))); firstRec = false; } summaryPrint(count, summaryKey, currTime, numTickers); }
From source file:org.apache.manifoldcf.crawler.connectors.cmis.CmisRepositoryConnector.java
/** Process a set of documents. * This is the method that should cause each document to be fetched, processed, and the results either added * to the queue of documents for the current job, and/or entered into the incremental ingestion manager. * The document specification allows this class to filter what is done based on the job. * The connector will be connected before this method can be called. *@param documentIdentifiers is the set of document identifiers to process. *@param statuses are the currently-stored document versions for each document in the set of document identifiers * passed in above.// ww w.j a v a2 s. c o m *@param activities is the interface this method should use to queue up new document references * and ingest documents. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one. */ @Override public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec, IProcessActivity activities, int jobMode, boolean usesDefaultAuthority) throws ManifoldCFException, ServiceInterruption { // Extract what we need from the spec String cmisQuery = StringUtils.EMPTY; for (int i = 0; i < spec.getChildCount(); i++) { SpecificationNode sn = spec.getChild(i); if (sn.getType().equals(JOB_STARTPOINT_NODE_TYPE)) { cmisQuery = sn.getAttributeValue(CmisConfig.CMIS_QUERY_PARAM); break; } } for (String documentIdentifier : documentIdentifiers) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("CMIS: Processing document identifier '" + documentIdentifier + "'"); getSession(); // Load the object. If this fails, it has been deleted. CmisObject cmisObject; try { cmisObject = session.getObject(documentIdentifier); } catch (CmisObjectNotFoundException e) { cmisObject = null; } if (cmisObject == null) { //System.out.println(" doesn't exist"); activities.deleteDocument(documentIdentifier); continue; } String versionString; if (cmisObject.getBaseType().getId().equals(CMIS_DOCUMENT_BASE_TYPE)) { Document document = (Document) cmisObject; // Since documents that are not current have different node id's, we can return a constant version, // EXCEPT when the document is not the current one (in which case we delete) boolean isCurrentVersion; try { Document d = document.getObjectOfLatestVersion(false); isCurrentVersion = d.getId().equals(documentIdentifier); } catch (CmisObjectNotFoundException e) { isCurrentVersion = false; } if (isCurrentVersion) { //System.out.println(" is latest version"); versionString = documentIdentifier + ":" + cmisQuery; } else { //System.out.println(" is NOT latest vrersion"); activities.deleteDocument(documentIdentifier); continue; } } else { //a CMIS folder will always be processed //System.out.println(" is folder"); versionString = StringUtils.EMPTY; } if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) { // Index this document String errorCode = null; String errorDesc = null; Long fileLengthLong = null; long startTime = System.currentTimeMillis(); try { String baseTypeId = cmisObject.getBaseType().getId(); if (baseTypeId.equals(CMIS_FOLDER_BASE_TYPE)) { // adding all the children for a folder Folder folder = (Folder) cmisObject; ItemIterable<CmisObject> children = folder.getChildren(); for (CmisObject child : children) { activities.addDocumentReference(child.getId(), documentIdentifier, RELATIONSHIP_CHILD); } } else if (baseTypeId.equals(CMIS_DOCUMENT_BASE_TYPE)) { // content ingestion Document document = (Document) cmisObject; Date createdDate = document.getCreationDate().getTime(); Date modifiedDate = document.getLastModificationDate().getTime(); long fileLength = document.getContentStreamLength(); String fileName = document.getContentStreamFileName(); String mimeType = document.getContentStreamMimeType(); //documentURI String documentURI = CmisRepositoryConnectorUtils.getDocumentURL(document, session); // Do any filtering (which will save us work) if (!activities.checkURLIndexable(documentURI)) { activities.noDocument(documentIdentifier, versionString); errorCode = activities.EXCLUDED_URL; errorDesc = "Excluding due to URL ('" + documentURI + "')"; continue; } if (!activities.checkMimeTypeIndexable(mimeType)) { activities.noDocument(documentIdentifier, versionString); errorCode = activities.EXCLUDED_MIMETYPE; errorDesc = "Excluding due to mime type (" + mimeType + ")"; continue; } if (!activities.checkLengthIndexable(fileLength)) { activities.noDocument(documentIdentifier, versionString); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Excluding due to length (" + fileLength + ")"; continue; } if (!activities.checkDateIndexable(modifiedDate)) { activities.noDocument(documentIdentifier, versionString); errorCode = activities.EXCLUDED_DATE; errorDesc = "Excluding due to date (" + modifiedDate + ")"; continue; } RepositoryDocument rd = new RepositoryDocument(); rd.setFileName(fileName); rd.setMimeType(mimeType); rd.setCreatedDate(createdDate); rd.setModifiedDate(modifiedDate); InputStream is; try { if (fileLength > 0) is = document.getContentStream().getStream(); else is = null; } catch (CmisObjectNotFoundException e) { // Document gone activities.deleteDocument(documentIdentifier); continue; } try { //binary if (is != null) { rd.setBinary(is, fileLength); } else { rd.setBinary(new NullInputStream(0), 0); } //properties List<Property<?>> properties = document.getProperties(); String id = StringUtils.EMPTY; for (Property<?> property : properties) { String propertyId = property.getId(); if (CmisRepositoryConnectorUtils.existsInSelectClause(cmisQuery, propertyId)) { if (propertyId.endsWith(Constants.PARAM_OBJECT_ID)) { id = (String) property.getValue(); if (property.getValue() != null || property.getValues() != null) { PropertyType propertyType = property.getType(); switch (propertyType) { case STRING: case ID: case URI: case HTML: if (property.isMultiValued()) { List<String> htmlPropertyValues = (List<String>) property .getValues(); for (String htmlPropertyValue : htmlPropertyValues) { rd.addField(propertyId, htmlPropertyValue); } } else { String stringValue = (String) property.getValue(); if (StringUtils.isNotEmpty(stringValue)) { rd.addField(propertyId, stringValue); } } break; case BOOLEAN: if (property.isMultiValued()) { List<Boolean> booleanPropertyValues = (List<Boolean>) property .getValues(); for (Boolean booleanPropertyValue : booleanPropertyValues) { rd.addField(propertyId, booleanPropertyValue.toString()); } } else { Boolean booleanValue = (Boolean) property.getValue(); if (booleanValue != null) { rd.addField(propertyId, booleanValue.toString()); } } break; case INTEGER: if (property.isMultiValued()) { List<BigInteger> integerPropertyValues = (List<BigInteger>) property .getValues(); for (BigInteger integerPropertyValue : integerPropertyValues) { rd.addField(propertyId, integerPropertyValue.toString()); } } else { BigInteger integerValue = (BigInteger) property.getValue(); if (integerValue != null) { rd.addField(propertyId, integerValue.toString()); } } break; case DECIMAL: if (property.isMultiValued()) { List<BigDecimal> decimalPropertyValues = (List<BigDecimal>) property .getValues(); for (BigDecimal decimalPropertyValue : decimalPropertyValues) { rd.addField(propertyId, decimalPropertyValue.toString()); } } else { BigDecimal decimalValue = (BigDecimal) property.getValue(); if (decimalValue != null) { rd.addField(propertyId, decimalValue.toString()); } } break; case DATETIME: if (property.isMultiValued()) { List<GregorianCalendar> datePropertyValues = (List<GregorianCalendar>) property .getValues(); for (GregorianCalendar datePropertyValue : datePropertyValues) { rd.addField(propertyId, ISO8601_DATE_FORMATTER .format(datePropertyValue.getTime())); } } else { GregorianCalendar dateValue = (GregorianCalendar) property .getValue(); if (dateValue != null) { rd.addField(propertyId, ISO8601_DATE_FORMATTER.format(dateValue.getTime())); } } break; default: break; } } } } } //ingestion try { activities.ingestDocumentWithException(documentIdentifier, versionString, documentURI, rd); fileLengthLong = new Long(fileLength); errorCode = "OK"; } catch (IOException e) { errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = e.getMessage(); handleIOException(e, "reading file input stream"); } } finally { try { if (is != null) { is.close(); } } catch (IOException e) { errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = e.getMessage(); handleIOException(e, "closing file input stream"); } } } else { // Unrecognized document type activities.noDocument(documentIdentifier, versionString); errorCode = "UNKNOWNTYPE"; errorDesc = "Document type is unrecognized: '" + baseTypeId + "'"; } } catch (ManifoldCFException e) { if (e.getErrorCode() == ManifoldCFException.INTERRUPTED) errorCode = null; throw e; } finally { if (errorCode != null) activities.recordActivity(new Long(startTime), ACTIVITY_READ, fileLengthLong, documentIdentifier, errorCode, errorDesc, null); } } } }
From source file:eu.cloudscale.showcase.generate.GenerateHibernate.java
@Override public void populateOrdersAndCC_XACTSTable() { GregorianCalendar cal; String[] credit_cards = { "VISA", "MASTERCARD", "DISCOVER", "AMEX", "DINERS" }; int num_card_types = 5; String[] ship_types = { "AIR", "UPS", "FEDEX", "SHIP", "COURIER", "MAIL" }; int num_ship_types = 6; String[] status_types = { "PROCESSING", "SHIPPED", "PENDING", "DENIED" }; int num_status_types = 4; // Order variables int O_C_ID;//from w ww . j a va 2 s . c om java.sql.Timestamp O_DATE; double O_SUB_TOTAL; double O_TAX; double O_TOTAL; String O_SHIP_TYPE; java.sql.Timestamp O_SHIP_DATE; int O_BILL_ADDR_ID, O_SHIP_ADDR_ID; String O_STATUS; String CX_TYPE; int CX_NUM; String CX_NAME; java.sql.Date CX_EXPIRY; String CX_AUTH_ID; int CX_CO_ID; System.out.println("Populating ORDERS, ORDER_LINES, CC_XACTS with " + NUM_ORDERS + " orders"); System.out.print("Complete (in 10,000's): "); Session session = sessionFactory.openSession(); Transaction tx = session.beginTransaction(); for (int i = 1; i <= NUM_ORDERS; i++) { if (i % 1000 == 0) { session.flush(); session.clear(); System.out.print(i / 1000 + " "); } if (i % 10000 == 0) { System.out.println(); } int num_items = getRandomInt(1, 5); cal = new GregorianCalendar(); cal.add(Calendar.DAY_OF_YEAR, -1 * getRandomInt(1, 60)); O_DATE = new java.sql.Timestamp(cal.getTime().getTime()); O_SUB_TOTAL = (double) getRandomInt(1000, 999999) / 100; O_TAX = O_SUB_TOTAL * 0.0825; O_TOTAL = O_SUB_TOTAL + O_TAX + 3.00 + num_items; O_SHIP_TYPE = ship_types[getRandomInt(0, num_ship_types - 1)]; cal.add(Calendar.DAY_OF_YEAR, getRandomInt(0, 7)); O_SHIP_DATE = new java.sql.Timestamp(cal.getTime().getTime()); O_STATUS = status_types[getRandomInt(0, num_status_types - 1)]; Orders order = new Orders(); ICustomer customer = this.customers.get(getRandomInt(1, this.customers.size() - 1)); IAddress billAddress = this.addresses.get(getRandomInt(1, this.addresses.size() - 1)); IAddress shipAddress = this.addresses.get(getRandomInt(1, this.addresses.size() - 1)); // int OL_I_ID = getRandomInt( 1, NUM_ITEMS ); // IItem item = itemDao.findById( OL_I_ID ); // Set parameter order.setCustomer(customer); order.setODate(new Date(O_DATE.getTime())); order.setOSubTotal(O_SUB_TOTAL); order.setOTax(O_TAX); order.setOTotal(O_TOTAL); order.setOShipType(O_SHIP_TYPE); order.setOShipDate(O_SHIP_DATE); order.setAddressByOBillAddrId(billAddress); order.setAddressByOShipAddrId(shipAddress); order.setOStatus(O_STATUS); order.setCcXactses(new HashSet<ICcXacts>()); order.setOrderLines(new HashSet<IOrderLine>()); session.save(order); for (int j = 1; j <= num_items; j++) { int OL_ID = j; int OL_O_ID = i; int OL_QTY = getRandomInt(1, 300); double OL_DISCOUNT = (double) getRandomInt(0, 30) / 100; String OL_COMMENTS = getRandomAString(20, 100); OrderLine orderLine = new OrderLine(); orderLine.setItem(this.items.get(getRandomInt(1, this.items.size() - 1))); orderLine.setOlQty(OL_QTY); orderLine.setOlDiscount(OL_DISCOUNT); orderLine.setOlComment(OL_COMMENTS); orderLine.setOrders(order); session.save(orderLine); order.getOrderLines().add(orderLine); } CX_TYPE = credit_cards[getRandomInt(0, num_card_types - 1)]; CX_NUM = getRandomNString(16); CX_NAME = getRandomAString(14, 30); cal = new GregorianCalendar(); cal.add(Calendar.DAY_OF_YEAR, getRandomInt(10, 730)); CX_EXPIRY = new java.sql.Date(cal.getTime().getTime()); CX_AUTH_ID = getRandomAString(15); CcXacts ccXacts = new CcXacts(); ccXacts.setCountry(this.countries.get(getRandomInt(1, this.countries.size() - 1))); ccXacts.setOrders(order); ccXacts.setCxType(CX_TYPE); ccXacts.setCxNum(CX_NUM); ccXacts.setCxName(CX_NAME); ccXacts.setCxExpiry(CX_EXPIRY); ccXacts.setCxAuthId(CX_AUTH_ID); ccXacts.setCxXactAmt(O_TOTAL); ccXacts.setCxXactDate(O_SHIP_DATE); // ccXacts.setOrders( order ); order.getCcXactses().add(ccXacts); session.save(ccXacts); } tx.commit(); session.close(); System.out.println(""); }
From source file:com.collabnet.ccf.teamforge.TFWriter.java
private PlanningFolderDO createPlanningFolder(GenericArtifact ga, String project, Connection connection) { String targetSystemTimezone = ga.getTargetSystemTimezone(); String parentId = project;/*from ww w.ja v a 2s .c o m*/ String targetParentArtifactId = ga.getDepParentTargetArtifactId(); if (targetParentArtifactId != null && !targetParentArtifactId.equals(GenericArtifact.VALUE_NONE) && !targetParentArtifactId.equals(GenericArtifact.VALUE_UNKNOWN)) { parentId = targetParentArtifactId; } String title = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.title.getFieldName(), ga); String description = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.description.getFieldName(), ga); Date startDate = null; Date endDate = null; GenericArtifactField startDateField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.startDate.getFieldName(), ga); if (startDateField != null) { GregorianCalendar gc = (GregorianCalendar) startDateField.getFieldValue(); if (gc != null) { Date dateValue = gc.getTime(); if (DateUtil.isAbsoluteDateInTimezone(dateValue, "GMT")) { startDate = DateUtil.convertGMTToTimezoneAbsoluteDate(dateValue, targetSystemTimezone); } else { startDate = dateValue; } } } GenericArtifactField endDateField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.endDate.getFieldName(), ga); if (endDateField != null) { GregorianCalendar gc = (GregorianCalendar) endDateField.getFieldValue(); if (gc != null) { Date dateValue = gc.getTime(); if (DateUtil.isAbsoluteDateInTimezone(dateValue, "GMT")) { endDate = DateUtil.convertGMTToTimezoneAbsoluteDate(dateValue, targetSystemTimezone); } else { endDate = dateValue; } } } int capacity = 0; String status = null; String releaseId = null; if (connection.supports54()) { capacity = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.capacity.getFieldName(), ga); status = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.status.getFieldName(), ga); releaseId = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.releaseId.getFieldName(), ga); } //To support display effort in field added in TF 7.0 String trackerUnitId = null; if (connection.supports62()) { trackerUnitId = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.trackerUnitId.getFieldName(), ga); } //For points capacity field added in TF 7.1 int pointsCapacity = 0; if (connection.supports63()) { pointsCapacity = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.pointsCapacity.getFieldName(), ga); } PlanningFolderDO planningFolder = null; try { if (translateTechnicalReleaseIds) { releaseId = TFTrackerHandler.convertReleaseIdForProject(connection, releaseId, project, isReleaseIdFieldsContainFileReleasePackageName() ? getPackageReleaseSeparatorString() : null); } String trackerUnitIdValue = !StringUtils.isEmpty(trackerUnitId) ? trackerUnitId : "Hours"; trackerUnitId = TFTrackerHandler.getTrackerUnitId(connection, trackerUnitIdValue, project); planningFolder = connection.getPlanningClient().createPlanningFolder(parentId, title, description, startDate, endDate, status, capacity, pointsCapacity, releaseId, trackerUnitId, null); } catch (RemoteException e) { String cause = "Could not create planning folder: " + e.getMessage(); log.error(cause, e); ga.setErrorCode(GenericArtifact.ERROR_EXTERNAL_SYSTEM_WRITE); throw new CCFRuntimeException(cause, e); } log.info("Created planning folder " + planningFolder.getId() + " in project " + project + " with parent " + parentId + " other system id: " + ga.getSourceArtifactId() + " in repository: " + ga.getSourceRepositoryId()); return planningFolder; }
From source file:org.bimserver.geometry.StreamingGeometryGenerator.java
@SuppressWarnings("unchecked") public GenerateGeometryResult generateGeometry(long uoid, final DatabaseSession databaseSession, QueryContext queryContext, long nrObjects) throws BimserverDatabaseException, GeometryGeneratingException { GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult(); packageMetaData = queryContext.getPackageMetaData(); productClass = packageMetaData.getEClass("IfcProduct"); geometryFeature = productClass.getEStructuralFeature("geometry"); representationFeature = productClass.getEStructuralFeature("Representation"); representationsFeature = packageMetaData.getEClass("IfcProductDefinitionShape") .getEStructuralFeature("Representations"); itemsFeature = packageMetaData.getEClass("IfcShapeRepresentation").getEStructuralFeature("Items"); mappingSourceFeature = packageMetaData.getEClass("IfcMappedItem").getEStructuralFeature("MappingSource"); GregorianCalendar now = new GregorianCalendar(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); debugIdentifier = dateFormat.format(now.getTime()) + " (" + report.getOriginalIfcFileName() + ")"; long start = System.nanoTime(); String pluginName = ""; if (queryContext.getPackageMetaData().getSchema() == Schema.IFC4) { pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepStreamingSerializerPlugin"; } else if (queryContext.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) { pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepStreamingSerializerPlugin"; } else {/*from www. ja v a2 s.c o m*/ throw new GeometryGeneratingException( "Unknown schema " + queryContext.getPackageMetaData().getSchema()); } reuseGeometry = bimServer.getServerSettingsCache().getServerSettings().isReuseGeometry(); optimizeMappedItems = bimServer.getServerSettingsCache().getServerSettings().isOptimizeMappedItems(); report.setStart(new GregorianCalendar()); report.setIfcSchema(queryContext.getPackageMetaData().getSchema()); report.setUseMappingOptimization(optimizeMappedItems); report.setReuseGeometry(reuseGeometry); try { final StreamingSerializerPlugin ifcSerializerPlugin = (StreamingSerializerPlugin) bimServer .getPluginManager().getPlugin(pluginName, true); if (ifcSerializerPlugin == null) { throw new UserException("No IFC serializer found"); } User user = (User) databaseSession.get(uoid, org.bimserver.database.OldQuery.getDefault()); UserSettings userSettings = user.getUserSettings(); report.setUserName(user.getName()); report.setUserUserName(user.getUsername()); RenderEnginePluginConfiguration renderEngine = null; if (eoid != -1) { renderEngine = databaseSession.get(eoid, OldQuery.getDefault()); } else { renderEngine = userSettings.getDefaultRenderEngine(); } if (renderEngine == null) { throw new UserException("No default render engine has been selected for this user"); } renderEngineName = renderEngine.getName(); int availableProcessors = Runtime.getRuntime().availableProcessors(); report.setAvailableProcessors(availableProcessors); int maxSimultanousThreads = Math.min( bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(), availableProcessors); if (maxSimultanousThreads < 1) { maxSimultanousThreads = 1; } final RenderEngineSettings settings = new RenderEngineSettings(); settings.setPrecision(Precision.SINGLE); settings.setIndexFormat(IndexFormat.AUTO_DETECT); settings.setGenerateNormals(true); settings.setGenerateTriangles(true); settings.setGenerateWireFrame(false); final RenderEngineFilter renderEngineFilter = new RenderEngineFilter(); RenderEnginePool renderEnginePool = bimServer.getRenderEnginePools().getRenderEnginePool( packageMetaData.getSchema(), renderEngine.getPluginDescriptor().getPluginClassName(), bimServer.getPluginSettingsCache().getPluginSettings(renderEngine.getOid())); report.setRenderEngineName(renderEngine.getName()); report.setRenderEnginePluginVersion( renderEngine.getPluginDescriptor().getPluginBundleVersion().getVersion()); try (RenderEngine engine = renderEnginePool.borrowObject()) { VersionInfo versionInfo = renderEnginePool.getRenderEngineFactory().getVersionInfo(); report.setRenderEngineVersion(versionInfo); applyLayerSets = engine.isApplyLayerSets(); report.setApplyLayersets(applyLayerSets); calculateQuantities = engine.isCalculateQuantities(); report.setCalculateQuantities(calculateQuantities); } // TODO reuse, pool the pools :) Or something smarter // TODO reuse queue, or try to determine a realistic size, or don't use a fixed-size queue ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads, 24, TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000000)); JsonQueryObjectModelConverter jsonQueryObjectModelConverter = new JsonQueryObjectModelConverter( packageMetaData); String queryNameSpace = packageMetaData.getSchema().name().toLowerCase() + "-stdlib"; // Al references should already be direct, since this is now done in BimServer on startup, quite the hack... Include objectPlacement = jsonQueryObjectModelConverter .getDefineFromFile(queryNameSpace + ":ObjectPlacement", true); Set<EClass> classes = null; if (queryContext.getOidCounters() != null) { classes = queryContext.getOidCounters().keySet(); } else { classes = packageMetaData.getEClasses(); } float multiplierToMm = processUnits(databaseSession, queryContext); generateGeometryResult.setMultiplierToMm(multiplierToMm); // Phase 1 (mapped item detection) sometimes detects that mapped items have invalid (unsupported) RepresentationIdentifier values, this set keeps track of objects to skip in Phase 2 because of that Set<Long> toSkip = new HashSet<>(); for (EClass eClass : classes) { if (packageMetaData.getEClass("IfcProduct").isSuperTypeOf(eClass)) { int nrObjectsForType = 0; Query query2 = new Query(eClass.getName() + "Main query", packageMetaData); QueryPart queryPart2 = query2.createQueryPart(); queryPart2.addType(eClass, false); Include representationInclude = queryPart2.createInclude(); representationInclude.addType(eClass, false); representationInclude.addFieldDirect("Representation"); Include representationsInclude = representationInclude.createInclude(); representationsInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true); representationsInclude.addFieldDirect("Representations"); Include itemsInclude = representationsInclude.createInclude(); itemsInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), false); itemsInclude.addFieldDirect("Items"); itemsInclude.addFieldDirect("ContextOfItems"); Include mappingSourceInclude = itemsInclude.createInclude(); mappingSourceInclude.addType(packageMetaData.getEClass("IfcMappedItem"), false); mappingSourceInclude.addFieldDirect("MappingSource"); mappingSourceInclude.addFieldDirect("MappingTarget"); Include representationMap = mappingSourceInclude.createInclude(); representationMap.addType(packageMetaData.getEClass("IfcRepresentationMap"), false); representationMap.addFieldDirect("MappedRepresentation"); Include createInclude = representationMap.createInclude(); createInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), true); Include targetInclude = mappingSourceInclude.createInclude(); targetInclude.addType(packageMetaData.getEClass("IfcCartesianTransformationOperator3D"), false); targetInclude.addFieldDirect("Axis1"); targetInclude.addFieldDirect("Axis2"); targetInclude.addFieldDirect("Axis3"); targetInclude.addFieldDirect("LocalOrigin"); queryPart2.addInclude(objectPlacement); Map<Long, Map<Long, ProductDef>> representationMapToProduct = new HashMap<>(); QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query2, Collections.singleton(queryContext.getRoid()), packageMetaData); HashMapVirtualObject next = queryObjectProvider2.next(); int nrProductsWithRepresentation = 0; while (next != null) { if (next.eClass() == eClass) { AbstractHashMapVirtualObject representation = next .getDirectFeature(representationFeature); if (representation != null) { Set<HashMapVirtualObject> representations = representation .getDirectListFeature(representationsFeature); if (representations != null) { boolean foundValidContext = false; for (HashMapVirtualObject representationItem : representations) { if (usableContext(representationItem)) { foundValidContext = true; } } if (foundValidContext) { nrProductsWithRepresentation++; } for (HashMapVirtualObject representationItem : representations) { if (!usableContext(representationItem) && foundValidContext) { continue; } if (hasValidRepresentationIdentifier(representationItem)) { Set<HashMapVirtualObject> items = representationItem .getDirectListFeature(itemsFeature); if (items == null || items.size() > 1) { // Only if there is just one item, we'll store this for reuse continue; } // So this next loop always results in 1 (or no) loops for (HashMapVirtualObject item : items) { report.addRepresentationItem(item.eClass().getName()); if (!packageMetaData.getEClass("IfcMappedItem") .isSuperTypeOf(item.eClass())) { nrObjectsForType++; continue; // All non IfcMappedItem objects will be done in phase 2 } AbstractHashMapVirtualObject mappingTarget = item .getDirectFeature(packageMetaData .getEReference("IfcMappedItem", "MappingTarget")); AbstractHashMapVirtualObject mappingSourceOfMappedItem = item .getDirectFeature(packageMetaData .getEReference("IfcMappedItem", "MappingSource")); if (mappingSourceOfMappedItem == null) { LOGGER.info("No mapping source"); continue; } AbstractHashMapVirtualObject mappedRepresentation = mappingSourceOfMappedItem .getDirectFeature(packageMetaData.getEReference( "IfcRepresentationMap", "MappedRepresentation")); if (!hasValidRepresentationIdentifier(mappedRepresentation)) { // Skip this mapping, we should store somewhere that this object should also be skipped in the normal way // TODO too many log statements, should log only 1 line for the complete model // LOGGER.info("Skipping because of invalid RepresentationIdentifier in mapped item (" + (String) mappedRepresentation.get("RepresentationIdentifier") + ")"); report.addSkippedBecauseOfInvalidRepresentationIdentifier( (String) mappedRepresentation .get("RepresentationIdentifier")); toSkip.add(next.getOid()); continue; } double[] mappingMatrix = Matrix.identity(); double[] productMatrix = Matrix.identity(); if (mappingTarget != null) { AbstractHashMapVirtualObject axis1 = mappingTarget .getDirectFeature(packageMetaData.getEReference( "IfcCartesianTransformationOperator", "Axis1")); AbstractHashMapVirtualObject axis2 = mappingTarget .getDirectFeature(packageMetaData.getEReference( "IfcCartesianTransformationOperator", "Axis2")); AbstractHashMapVirtualObject axis3 = mappingTarget .getDirectFeature(packageMetaData.getEReference( "IfcCartesianTransformationOperator", "Axis3")); AbstractHashMapVirtualObject localOrigin = mappingTarget .getDirectFeature(packageMetaData.getEReference( "IfcCartesianTransformationOperator", "LocalOrigin")); double[] a1 = null; double[] a2 = null; double[] a3 = null; if (axis3 != null) { List<Double> list = (List<Double>) axis3 .get("DirectionRatios"); a3 = new double[] { list.get(0), list.get(1), list.get(2) }; } else { a3 = new double[] { 0, 0, 1, 1 }; Vector.normalize(a3); } if (axis1 != null) { List<Double> list = (List<Double>) axis1 .get("DirectionRatios"); a1 = new double[] { list.get(0), list.get(1), list.get(2) }; Vector.normalize(a1); } else { // if (a3[0] == 1 && a3[1] == 0 && a3[2] == 0) { a1 = new double[] { 1, 0, 0, 1 }; // } else { // a1 = new double[]{0, 1, 0, 1}; // } } double[] xVec = Vector.scalarProduct(Vector.dot(a1, a3), a3); double[] xAxis = Vector.subtract(a1, xVec); Vector.normalize(xAxis); if (axis2 != null) { List<Double> list = (List<Double>) axis2 .get("DirectionRatios"); a2 = new double[] { list.get(0), list.get(1), list.get(2) }; Vector.normalize(a2); } else { a2 = new double[] { 0, 1, 0, 1 }; } double[] tmp = Vector.scalarProduct(Vector.dot(a2, a3), a3); double[] yAxis = Vector.subtract(a2, tmp); tmp = Vector.scalarProduct(Vector.dot(a2, xAxis), xAxis); yAxis = Vector.subtract(yAxis, tmp); Vector.normalize(yAxis); a2 = yAxis; a1 = xAxis; List<Double> t = (List<Double>) localOrigin.get("Coordinates"); mappingMatrix = new double[] { a1[0], a1[1], a1[2], 0, a2[0], a2[1], a2[2], 0, a3[0], a3[1], a3[2], 0, t.get(0).doubleValue(), t.get(1).doubleValue(), t.get(2).doubleValue(), 1 }; } AbstractHashMapVirtualObject placement = next .getDirectFeature(packageMetaData .getEReference("IfcProduct", "ObjectPlacement")); if (placement != null) { productMatrix = placementToMatrix(placement); } AbstractHashMapVirtualObject mappingSource = item .getDirectFeature(mappingSourceFeature); if (mappingSource != null) { Map<Long, ProductDef> map = representationMapToProduct .get(((HashMapVirtualObject) mappingSource).getOid()); if (map == null) { map = new LinkedHashMap<>(); representationMapToProduct.put( ((HashMapVirtualObject) mappingSource).getOid(), map); } ProductDef pd = new ProductDef(next.getOid()); pd.setMappedItemOid(item.getOid()); pd.setObject(next); pd.setProductMatrix(productMatrix); pd.setMappingMatrix(mappingMatrix); map.put(next.getOid(), pd); } } } } } } } next = queryObjectProvider2.next(); } Set<Long> done = new HashSet<>(); for (Long repMapId : representationMapToProduct.keySet()) { Map<Long, ProductDef> map = representationMapToProduct.get(repMapId); // When there is more than one instance using this mapping if (map.size() > 1) { Query query = new Query("Reuse query " + eClass.getName(), packageMetaData); QueryPart queryPart = query.createQueryPart(); // QueryPart queryPart3 = query.createQueryPart(); queryPart.addType(eClass, false); // queryPart3.addType(packageMetaData.getEClass("IfcMappedItem"), false); long masterOid = map.values().iterator().next().getOid(); double[] inverted = Matrix.identity(); ProductDef masterProductDef = map.get(masterOid); if (!Matrix.invertM(inverted, 0, masterProductDef.getMappingMatrix(), 0)) { LOGGER.debug("No inverse, this mapping will be skipped and processed as normal"); // This is probably because of mirroring of something funky // TODO we should however be able to squeeze out a little more reuse by finding another master... continue; } for (ProductDef pd : map.values()) { done.add(pd.getOid()); if (!optimizeMappedItems) { queryPart.addOid(pd.getOid()); // In theory these should be fused together during querying // queryPart3.addOid(pd.getMappedItemOid()); } else { pd.setMasterOid(masterOid); } } if (optimizeMappedItems) { queryPart.addOid(masterOid); } LOGGER.debug("Running " + map.size() + " objects in one batch because of reused geometry " + (eClass.getName())); // queryPart3.addInclude(jsonQueryObjectModelConverter.getDefineFromFile("ifc2x3tc1-stdlib:IfcMappedItem")); processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, true, map, map.size()); } } Query query3 = new Query("Remaining " + eClass.getName(), packageMetaData); QueryPart queryPart3 = query3.createQueryPart(); queryPart3.addType(eClass, false); Include include3 = queryPart3.createInclude(); include3.addType(eClass, false); include3.addFieldDirect("Representation"); Include rInclude = include3.createInclude(); rInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true); rInclude.addFieldDirect("Representations"); Include representationsInclude2 = rInclude.createInclude(); representationsInclude2.addType(packageMetaData.getEClass("IfcShapeModel"), true); representationsInclude2.addFieldDirect("ContextOfItems"); queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query3, Collections.singleton(queryContext.getRoid()), packageMetaData); next = queryObjectProvider2.next(); Query query = new Query("Main " + eClass.getName(), packageMetaData); QueryPart queryPart = query.createQueryPart(); int written = 0; int maxObjectsPerFile = 0; if (nrProductsWithRepresentation <= 100) { maxObjectsPerFile = 1; } else if (nrProductsWithRepresentation < 10000) { maxObjectsPerFile = (int) (nrProductsWithRepresentation / 100); } else { maxObjectsPerFile = 100; } // LOGGER.info(report.getOriginalIfcFileName()); // LOGGER.info("Max objects per file: " + maxObjectsPerFile + " (" + eClass.getName() + ": " + nrProductsWithRepresentation + ")"); report.setMaxPerFile(maxObjectsPerFile); while (next != null) { if (next.eClass() == eClass && !done.contains(next.getOid()) && !toSkip.contains(next.getOid())) { AbstractHashMapVirtualObject representation = next .getDirectFeature(representationFeature); if (representation != null) { Set<HashMapVirtualObject> list = representation.getDirectListFeature(packageMetaData .getEReference("IfcProductRepresentation", "Representations")); boolean goForIt = goForIt(list); if (goForIt) { if (next.eClass() == eClass && !done.contains(next.getOid())) { representation = next.getDirectFeature(representationFeature); if (representation != null) { list = representation.getDirectListFeature(packageMetaData .getEReference("IfcProductRepresentation", "Representations")); boolean goForIt2 = goForIt(list); if (goForIt2) { queryPart.addOid(next.getOid()); written++; if (written >= maxObjectsPerFile) { processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, false, null, written); query = new Query("Main " + eClass.getName(), packageMetaData); queryPart = query.createQueryPart(); written = 0; } } } } } } } next = queryObjectProvider2.next(); } if (written > 0) { processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin, settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart, false, null, written); } } } allJobsPushed = true; executor.shutdown(); executor.awaitTermination(24, TimeUnit.HOURS); // Need total bounds // float[] quantizationMatrix = createQuantizationMatrixFromBounds(boundsMm); // ByteBuffer verticesQuantized = quantizeVertices(vertices, quantizationMatrix, generateGeometryResult.getMultiplierToMm()); // geometryData.setAttribute(GeometryPackage.eINSTANCE.getGeometryData_VerticesQuantized(), verticesQuantized.array()); LOGGER.debug("Generating quantized vertices"); double[] quantizationMatrix = createQuantizationMatrixFromBounds( generateGeometryResult.getBoundsUntransformed(), multiplierToMm); for (Long id : geometryDataMap.keySet()) { Tuple<HashMapVirtualObject, ByteBuffer> tuple = geometryDataMap.get(id); HashMapVirtualObject buffer = new HashMapVirtualObject(queryContext, GeometryPackage.eINSTANCE.getBuffer()); // Buffer buffer = databaseSession.create(Buffer.class); buffer.set("data", quantizeVertices(tuple.getB().asDoubleBuffer(), quantizationMatrix, multiplierToMm) .array()); // buffer.setData(quantizeVertices(tuple.getB(), quantizationMatrix, multiplierToMm).array()); // databaseSession.store(buffer); buffer.save(); HashMapVirtualObject geometryData = tuple.getA(); geometryData.set("verticesQuantized", buffer.getOid()); int reused = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_Reused()); int nrTriangles = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_NrIndices()) / 3; int saveableTriangles = Math.max(0, (reused - 1)) * nrTriangles; geometryData.set("saveableTriangles", saveableTriangles); // if (saveableTriangles > 0) { // System.out.println("Saveable triangles: " + saveableTriangles); // } geometryData.saveOverwrite(); } long end = System.nanoTime(); long total = totalBytes.get() - (bytesSavedByHash.get() + bytesSavedByTransformation.get() + bytesSavedByMapping.get()); LOGGER.info("Rendertime: " + Formatters.nanosToString(end - start) + ", " + "Reused (by hash): " + Formatters.bytesToString(bytesSavedByHash.get()) + ", Reused (by transformation): " + Formatters.bytesToString(bytesSavedByTransformation.get()) + ", Reused (by mapping): " + Formatters.bytesToString(bytesSavedByMapping.get()) + ", Total: " + Formatters.bytesToString(totalBytes.get()) + ", Final: " + Formatters.bytesToString(total)); if (report.getNumberOfDebugFiles() > 0) { LOGGER.error("Number of erroneous files: " + report.getNumberOfDebugFiles()); } Map<String, Integer> skipped = report.getSkippedBecauseOfInvalidRepresentationIdentifier(); if (skipped.size() > 0) { LOGGER.error("Number of representations skipped:"); for (String identifier : skipped.keySet()) { LOGGER.error("\t" + identifier + ": " + skipped.get(identifier)); } } String dump = geometryGenerationDebugger.dump(); if (dump != null) { LOGGER.info(dump); } } catch (Exception e) { running = false; LOGGER.error("", e); report.setEnd(new GregorianCalendar()); throw new GeometryGeneratingException(e); } report.setEnd(new GregorianCalendar()); try { if (report.getNumberOfDebugFiles() > 0) { writeDebugFile(); } } catch (IOException e) { LOGGER.debug("", e); } return generateGeometryResult; }
From source file:org.obm.icalendar.Ical4jHelper.java
@Override public List<Date> dateInInterval(EventRecurrence recurrence, Date eventDate, Date start, Date end, Set<Date> dateExce) { List<Date> ret = new LinkedList<Date>(); Recur recur = getRecur(recurrence, eventDate); if (recur == null) { ret.add(eventDate);/*from w ww .ja v a2 s.c o m*/ return ret; } if (end == null) { if (start.before(eventDate)) { ret.add(eventDate); return ret; } return ImmutableList.of(); } DateList dl = recur.getDates(new DateTime(eventDate), new DateTime(start), new DateTime(end), Value.DATE_TIME); for (Iterator<?> it = dl.iterator(); it.hasNext();) { Date evD = (Date) it.next(); GregorianCalendar cal = new GregorianCalendar(); cal.setTime(evD); cal.set(GregorianCalendar.MILLISECOND, 0); if (!dateExce.contains(cal.getTime())) { ret.add(evD); } } return ret; }
From source file:com.collabnet.ccf.teamforge.TFWriter.java
/** * Creates the artifact represented by the GenericArtifact object on the * target TF system/*w ww.j a v a2 s. co m*/ * * @param ga * - the GenericArtifact object * @param tracker * - The target TF tracker ID * @param connection * - The Connection object for the target TF system * @return - the newly created artifact's ArtifactSoapDO object */ private ArtifactDO createArtifact(GenericArtifact ga, String tracker, Connection connection) { ArrayList<String> flexFieldNames = new ArrayList<String>(); ArrayList<String> flexFieldTypes = new ArrayList<String>(); ArrayList<Object> flexFieldValues = new ArrayList<Object>(); List<GenericArtifactField> gaFields = ga .getAllGenericArtifactFieldsWithSameFieldType(GenericArtifactField.VALUE_FIELD_TYPE_FLEX_FIELD); String targetSystemTimezone = ga.getTargetSystemTimezone(); if (gaFields != null) { for (GenericArtifactField gaField : gaFields) { String fieldName = gaField.getFieldName(); if (COMMENT_TEXT.equals(fieldName)) { continue; } String trackerFieldValueType = TFArtifactMetaData .getTFFieldValueTypeForGAFieldType(gaField.getFieldValueType()); flexFieldNames.add(fieldName); flexFieldTypes.add(trackerFieldValueType); Object value = null; FieldValueTypeValue fieldType = gaField.getFieldValueType(); if (trackerFieldValueType.equals(TrackerFieldSoapDO.FIELD_VALUE_TYPE_DATE)) { if (fieldType == FieldValueTypeValue.DATE) { GregorianCalendar gc = (GregorianCalendar) gaField.getFieldValue(); if (gc != null) { Date dateValue = gc.getTime(); if (DateUtil.isAbsoluteDateInTimezone(dateValue, "GMT")) { value = DateUtil.convertGMTToTimezoneAbsoluteDate(dateValue, targetSystemTimezone); } else { value = dateValue; } } } else if (fieldType == FieldValueTypeValue.DATETIME) { value = gaField.getFieldValue(); } } else { value = gaField.getFieldValue(); } flexFieldValues.add(value); } } String folderId = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.folderId.getFieldName(), ga); String description = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.description.getFieldName(), ga); String category = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.category.getFieldName(), ga); String group = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.group.getFieldName(), ga); String status = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.status.getFieldName(), ga); String statusClass = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.statusClass.getFieldName(), ga); String customer = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.customer.getFieldName(), ga); int priority = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.priority.getFieldName(), ga); int estimatedEffort = 0; int actualEffort = 0; int remainingEffort = 0; String planningFolder = null; Boolean autosumming = false; estimatedEffort = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.estimatedHours.getFieldName(), ga); actualEffort = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.actualHours.getFieldName(), ga); if (connection.supports53()) { remainingEffort = GenericArtifactHelper .getIntMandatoryGAField(TFArtifactMetaData.TFFields.remainingEffort.getFieldName(), ga); planningFolder = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.planningFolder.getFieldName(), ga); autosumming = GenericArtifactHelper .getBooleanMandatoryGAField(TFArtifactMetaData.TFFields.autosumming.getFieldName(), ga); } int points = 0; if (connection.supports54()) { points = GenericArtifactHelper.getIntMandatoryGAField(TFArtifactMetaData.TFFields.points.getFieldName(), ga); } Date closeDate = GenericArtifactHelper .getDateMandatoryGAField(TFArtifactMetaData.TFFields.closeDate.getFieldName(), ga); String assignedTo = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.assignedTo.getFieldName(), ga); String reportedReleaseId = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.reportedReleaseId.getFieldName(), ga); String resolvedReleaseId = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.resolvedReleaseId.getFieldName(), ga); String title = GenericArtifactHelper .getStringMandatoryGAField(TFArtifactMetaData.TFFields.title.getFieldName(), ga); String[] comments = this.getComments(ga); ArtifactDO result = null; try { if (this.translateTechnicalReleaseIds) { reportedReleaseId = TFTrackerHandler.convertReleaseId(connection, reportedReleaseId, folderId, isReleaseIdFieldsContainFileReleasePackageName() ? getPackageReleaseSeparatorString() : null); resolvedReleaseId = TFTrackerHandler.convertReleaseId(connection, resolvedReleaseId, folderId, isReleaseIdFieldsContainFileReleasePackageName() ? getPackageReleaseSeparatorString() : null); } // now we have to deal with the parent dependencies String parentId = ga.getDepParentTargetArtifactId(); boolean associateWithParent = false; if (parentId != null && !parentId.equals(GenericArtifact.VALUE_UNKNOWN) && !parentId.equals(GenericArtifact.VALUE_NONE)) { // parent is a planning folder if (parentId.startsWith("plan")) { planningFolder = parentId; } else { associateWithParent = true; if (planningFolder == null) { ArtifactDO parentArtifact = connection.getTrackerClient().getArtifactData(parentId); planningFolder = parentArtifact.getPlanningFolderId(); } } } result = trackerHandler.createArtifact(connection, folderId, description, category, group, status, statusClass, customer, priority, estimatedEffort, actualEffort, closeDate, assignedTo, reportedReleaseId, resolvedReleaseId, flexFieldNames, flexFieldValues, flexFieldTypes, title, comments, remainingEffort, autosumming, planningFolder, points); // now create parent dependency if (associateWithParent) { trackerHandler.createArtifactDependency(connection, parentId, result.getId(), "CCF created parent-child dependency"); } log.info("New artifact " + result.getId() + " is created with the change from " + ga.getSourceArtifactId()); } catch (RemoteException e) { String cause = "While trying to create an artifact within TF, an error occured"; log.error(cause, e); ga.setErrorCode(GenericArtifact.ERROR_EXTERNAL_SYSTEM_WRITE); throw new CCFRuntimeException(cause, e); } catch (PlanningFolderRuleViolationException e) { String cause = "While trying to create an artifact within TF, a planning folder rule violation occured: " + e.getMessage(); log.error(cause, e); ga.setErrorCode(GenericArtifact.ERROR_EXTERNAL_SYSTEM_WRITE); throw new CCFRuntimeException(cause, e); } return result; }
From source file:org.obm.icalendar.Ical4jHelper.java
private Recur getRecurFrom(EventRecurrence eventRecurrence, String frequency) { if (eventRecurrence.getEnd() == null) { return new Recur(frequency, null); } else {//from w w w . j a va 2 s . c o m GregorianCalendar cal = new GregorianCalendar(); cal.setTime(eventRecurrence.getEnd()); cal.set(GregorianCalendar.SECOND, 0); return new Recur(frequency, new DateTime(cal.getTime())); } }
From source file:com.collabnet.ccf.teamforge.TFWriter.java
private PlanningFolderDO updatePlanningFolder(GenericArtifact ga, String project, Connection connection) throws RemoteException, PlanningFolderRuleViolationException { String id = ga.getTargetArtifactId(); String targetSystemTimezone = ga.getTargetSystemTimezone(); GenericArtifactField description = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.description.getFieldName(), ga); GenericArtifactField title = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.title.getFieldName(), ga); GenericArtifactField statusField = null; GenericArtifactField releaseIdField = null; GenericArtifactField capacityField = null; GenericArtifactField pointsCapacityField = null; GenericArtifactField trackerUnitIdField = null; if (connection.supports54()) { statusField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.status.getFieldName(), ga); releaseIdField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.releaseId.getFieldName(), ga); capacityField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.capacity.getFieldName(), ga); }/*from ww w . j av a 2 s . co m*/ if (connection.supports62()) { trackerUnitIdField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.trackerUnitId.getFieldName(), ga); } if (connection.supports63()) { pointsCapacityField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.pointsCapacity.getFieldName(), ga); } GenericArtifactField startDateField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.startDate.getFieldName(), ga); if (startDateField != null && startDateField.getFieldValueHasChanged()) { GregorianCalendar gc = (GregorianCalendar) startDateField.getFieldValue(); if (gc != null) { Date dateValue = gc.getTime(); if (DateUtil.isAbsoluteDateInTimezone(dateValue, "GMT")) { startDateField.setFieldValue( DateUtil.convertGMTToTimezoneAbsoluteDate(dateValue, targetSystemTimezone)); } else { startDateField.setFieldValue(dateValue); } } } GenericArtifactField endDateField = GenericArtifactHelper .getMandatoryGAField(TFArtifactMetaData.TFFields.endDate.getFieldName(), ga); if (endDateField != null && endDateField.getFieldValueHasChanged()) { GregorianCalendar gc = (GregorianCalendar) endDateField.getFieldValue(); if (gc != null) { Date dateValue = gc.getTime(); if (DateUtil.isAbsoluteDateInTimezone(dateValue, "GMT")) { endDateField.setFieldValue( DateUtil.convertGMTToTimezoneAbsoluteDate(dateValue, targetSystemTimezone)); } else { endDateField.setFieldValue(dateValue); } } } boolean planningFolderNotUpdated = true; PlanningFolderDO planningFolder = null; while (planningFolderNotUpdated) { try { planningFolderNotUpdated = false; planningFolder = connection.getPlanningClient().getPlanningFolderData(id); // do conflict resolution if (!AbstractWriter.handleConflicts(planningFolder.getVersion(), ga)) { return null; } if (title != null && title.getFieldValueHasChanged()) { planningFolder.setTitle((String) title.getFieldValue()); } if (description != null && description.getFieldValueHasChanged()) { planningFolder.setDescription((String) description.getFieldValue()); } if (startDateField != null && startDateField.getFieldValueHasChanged()) { planningFolder.setStartDate((Date) startDateField.getFieldValue()); } if (endDateField != null && endDateField.getFieldValueHasChanged()) { planningFolder.setEndDate((Date) endDateField.getFieldValue()); } if (statusField != null && statusField.getFieldValueHasChanged()) { planningFolder.setStatus((String) statusField.getFieldValue()); } if (releaseIdField != null && releaseIdField.getFieldValueHasChanged()) { String releaseId = (String) releaseIdField.getFieldValue(); if (translateTechnicalReleaseIds) { releaseId = TFTrackerHandler.convertReleaseIdForProject(connection, releaseId, project, isReleaseIdFieldsContainFileReleasePackageName() ? getPackageReleaseSeparatorString() : null); } planningFolder.setReleaseId(releaseId); } if (capacityField != null && capacityField.getFieldValueHasChanged()) { Object fieldValueObj = capacityField.getFieldValue(); int fieldValue = 0; if (fieldValueObj instanceof String) { String fieldValueString = (String) fieldValueObj; try { fieldValue = Integer.parseInt(fieldValueString); } catch (NumberFormatException e) { throw new CCFRuntimeException( "Could not parse value of mandatory field capacity: " + e.getMessage(), e); } } else if (fieldValueObj instanceof Integer) { fieldValue = ((Integer) fieldValueObj).intValue(); } planningFolder.setCapacity(fieldValue); } if (pointsCapacityField != null && pointsCapacityField.getFieldValueHasChanged()) { Object fieldValueObj = pointsCapacityField.getFieldValue(); int fieldValue = 0; if (fieldValueObj instanceof String) { String fieldValueString = (String) fieldValueObj; try { fieldValue = Integer.parseInt(fieldValueString); } catch (NumberFormatException e) { throw new CCFRuntimeException( "Could not parse value of mandatory field points capacity: " + e.getMessage(), e); } } else if (fieldValueObj instanceof Integer) { fieldValue = ((Integer) fieldValueObj).intValue(); } planningFolder.setPointsCapacity(fieldValue); if (trackerUnitIdField != null && trackerUnitIdField.getFieldValueHasChanged()) { String trackerUnitId = (String) trackerUnitIdField.getFieldValue(); String trackerUnitIdValue = !StringUtils.isEmpty(trackerUnitId) ? trackerUnitId : "Hours"; trackerUnitId = TFTrackerHandler.getTrackerUnitId(connection, trackerUnitIdValue, project); planningFolder.setTrackerUnitId(trackerUnitId); } } connection.getPlanningClient().setPlanningFolderData(planningFolder); } catch (AxisFault e) { javax.xml.namespace.QName faultCode = e.getFaultCode(); if (!faultCode.getLocalPart().equals("VersionMismatchFault")) { throw e; } logConflictResolutor.warn("Stale update for TF planning folder " + id + " in project " + project + ". Trying again ...", e); planningFolderNotUpdated = true; } } planningFolder = connection.getPlanningClient().getPlanningFolderData(id); // now we have to cope with moving planning folders around String parentArtifactId = ga.getDepParentTargetArtifactId(); // first of all, if parent is unknown or null, we do not change anything if (parentArtifactId != null && !parentArtifactId.equals(GenericArtifact.VALUE_UNKNOWN)) { // check for the special case this is a top level PF if (parentArtifactId.equals(GenericArtifact.VALUE_NONE)) { // check whether this is already a top level planning folder if (!planningFolder.getParentFolderId().startsWith("PlanningApp")) { // move to top connection.getPlanningClient().movePlanningFolder(id, project); planningFolder = connection.getPlanningClient().getPlanningFolderData(id); } } else { // check whether correct parent is already assigned if (!parentArtifactId.equals(planningFolder.getParentFolderId())) { connection.getPlanningClient().movePlanningFolder(id, parentArtifactId); planningFolder = connection.getPlanningClient().getPlanningFolderData(id); } } } log.info("Planning folder updated. TF Id: " + planningFolder.getId() + " in project " + project + " other system id: " + ga.getSourceArtifactId() + " in repository: " + ga.getSourceRepositoryId()); return planningFolder; }