List of usage examples for java.util Locale UK
Locale UK
To view the source code for java.util Locale UK.
Click Source Link
From source file:de.acosix.alfresco.mtsupport.repo.auth.ldap.EnhancedLDAPUserRegistry.java
/** * Sets the timestamp format. Unfortunately, this varies between directory servers. * * @param timestampFormat/*from w w w .j av a 2 s .co m*/ * the timestamp format * <ul> * <li>OpenLDAP: "yyyyMMddHHmmss'Z'" * <li>Active Directory: "yyyyMMddHHmmss'.0Z'" * </ul> */ public void setTimestampFormat(final String timestampFormat) { this.timestampFormat = new SimpleDateFormat(timestampFormat, Locale.UK); this.timestampFormat.setTimeZone(TimeZone.getTimeZone("GMT")); }
From source file:org.powertac.producer.pvfarm.IrradianceModelTest.java
@Test public void dataYearIrradianceOnTitledPlane() throws IOException { File file = new File("data/dataYearIrradianceOnTitledPlane.txt"); file.createNewFile();//from ww w. ja v a2 s. c o m PrintWriter pw = new PrintWriter(new File("data/dataYearIrradianceOnTitledPlane.txt")); double panelLatitude = 45; double panelTit = 80; for (int day = 1; day <= 365; day += 1) { double outsum = 0; double outsum2 = 0; for (double time = 0; time < 24; time += 0.1) { double sunaltitude = SolarModel.getSunAltitudeAngle(time, panelLatitude, day); if (sunaltitude >= 0) { double sunazimuth = SolarModel.getSunAzinuthAngle(sunaltitude, day, panelLatitude, time); double inci = SolarModel.getIncidenceAngle(sunaltitude, sunazimuth, 180, panelTit); double airmass = IrradianceModel.getAirMass(sunaltitude); double T0 = IrradianceModel.getT0(airmass, 3); double Tr = IrradianceModel.getTr(airmass); double aw = IrradianceModel.getaw(airmass, 0.2, 300); double f = IrradianceModel.getf(sunaltitude); double dir = IrradianceModel.getDirectIrradiance(sunaltitude, 1367, T0, Tr, aw, 0.95); double dif = IrradianceModel.getDiffuseIrradiance(sunaltitude, 1367, T0, Tr, aw, 0.95, 0.95, f); dir = IrradianceModel.getCloudModifiedIrradiance(dir, 0, 0.2, 0.95, 0.33, -1.06, 0, 0); dif = IrradianceModel.getCloudModifiedIrradiance(dif, 0, 0.2, 0.95, 0.33, -1.06, 0, 0); double res = dir + dif; if (inci <= 90) { res = IrradianceModel.getIrradiancOnTiltedPlane(dir, dif, inci, sunaltitude, panelTit, 0.2); } else { res = IrradianceModel.getIrradiancOnTiltedPlane(0, dif, inci, sunaltitude, panelTit, 0.2); } outsum = outsum + res / 10000; outsum2 = outsum2 + (dir + dif) / 10000; } } pw.printf(Locale.UK, "%d,%f,%f%n", day, outsum, outsum2); } pw.close(); }
From source file:mitm.common.security.certificate.GenerateTestCertificates.java
private void generateValidCertificate() throws Exception { X509CertificateBuilder certificateBuilder = securityFactory.createX509CertificateBuilder(); String encodedPrivateKey = "30820276020100300d06092a864886f70d0101010500048202603082025c" + "02010002818100a9fee3017954c99b248d1486830c71b2e0ea3f9b7a2763" + "1bed8a731f5bd7e1edf856bc3fb7c63dedbeb5bb0de474e7792b3aa7e7b2" + "274c03a47c7d89b1935eaef172c6395f2322f1ed9e61ae46d716b4b4394c" + "1a802db05a2d7c3d1d41a3e8afc65ff8dada7414744f1ee1540e50ee7fb8" + "db437b20c5ee33a82b9d575cfbc951020301000102818004f84ab2b45562" + "3f82e60cff91bd3f65b765a1ce6dd7d0f1f413e421ba91a92d47e161478b" + "9be41b9b43bce03f199bdad304b7fbf21d6bff7f439477fe150ce38c312f" + "c015f3c89291aaa42c4c106f623dfd9f76acad2f1c77b590f038ffbb25f9" + "14b6f7ead769808ddd0e2d648442620b50518d9b7fb132b2fa1fa3e9d628" + "41024100e69ab3765120d0e0ba5dc21bf384b2f553211b4b1902175454c6" + "2f1b0f8ad385d78490539308c9fd5145ae36cc2a6d364fdd97d83d9b6623" + "a987db239e716055024100bcb77acf1e9829ab5b2c9a5e73d343db857474" + "a529ba52ca256655eb7d760e85d3c68eec9500e3db0494c8f77cb8058593" + "6e52a9290149367392d74ecdc3510d024100bd15723b7cb024b56ffabad3" + "c26c3774f2b1bdb8690c0ee7060feec6088b737f56450b368be4740332e5" + "a8c0a3cdd1f8eba9adfd101ee0b43329036584604075024055465b9a27ea" + "fe394e33b375a6c4fa4ec1d943b4364cd9883aaa297d05ee48d5b4426ee6" + "fcd5b02091cb619c63a10bedb6170e071e5e5464e4889ffe1e007a290240" + "7b60d23994a2ec38db909678446ed56d32455bf684141b9ee0aec68b2025" + "1d4d94fd2beebf02074559b811ae1130d2e2aa3bec2e9bccb06969104856" + "00c70759"; String encodedPublicKey = "30819f300d06092a864886f70d010101050003818d0030818902818100a9" + "fee3017954c99b248d1486830c71b2e0ea3f9b7a27631bed8a731f5bd7e1" + "edf856bc3fb7c63dedbeb5bb0de474e7792b3aa7e7b2274c03a47c7d89b1" + "935eaef172c6395f2322f1ed9e61ae46d716b4b4394c1a802db05a2d7c3d" + "1d41a3e8afc65ff8dada7414744f1ee1540e50ee7fb8db437b20c5ee33a8" + "2b9d575cfbc9510203010001"; PrivateKey privateKey = decodePrivateKey(encodedPrivateKey); PublicKey publicKey = decodePublicKey(encodedPublicKey); X500PrincipalBuilder subjectBuilder = new X500PrincipalBuilder(); String email = "test@example.com"; subjectBuilder.setCommonName("Valid certificate"); subjectBuilder.setEmail(email);/* w w w . j a v a2 s.co m*/ subjectBuilder.setCountryCode("NL"); subjectBuilder.setLocality("Amsterdam"); subjectBuilder.setState("NH"); AltNamesBuilder altNamesBuider = new AltNamesBuilder(); altNamesBuider.setRFC822Names(email); X500Principal subject = subjectBuilder.buildPrincipal(); GeneralNames altNames = altNamesBuider.buildAltNames(); // use TreeSet because we want a deterministic certificate (ie. hash should not change) Set<KeyUsageType> keyUsage = new TreeSet<KeyUsageType>(); keyUsage.add(KeyUsageType.DIGITALSIGNATURE); keyUsage.add(KeyUsageType.KEYENCIPHERMENT); keyUsage.add(KeyUsageType.NONREPUDIATION); Set<ExtendedKeyUsageType> extendedKeyUsage = new TreeSet<ExtendedKeyUsageType>(); extendedKeyUsage.add(ExtendedKeyUsageType.CLIENTAUTH); extendedKeyUsage.add(ExtendedKeyUsageType.EMAILPROTECTION); BigInteger serialNumber = new BigInteger("115fcd741088707366e9727452c9770", 16); Date now = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM, Locale.UK) .parse("21-Nov-2007 10:38:35"); certificateBuilder.setSubject(subject); certificateBuilder.setAltNames(altNames, true); certificateBuilder.setKeyUsage(keyUsage, true); certificateBuilder.setExtendedKeyUsage(extendedKeyUsage, false); certificateBuilder.setNotBefore(DateUtils.addDays(now, -20)); certificateBuilder.setNotAfter(DateUtils.addYears(now, 20)); certificateBuilder.setPublicKey(publicKey); certificateBuilder.setSerialNumber(serialNumber); certificateBuilder.setSignatureAlgorithm("SHA1WithRSAEncryption"); certificateBuilder.addSubjectKeyIdentifier(true); X509Certificate certificate = certificateBuilder.generateCertificate(caPrivateKey, caCertificate); assertNotNull(certificate); certificates.add(certificate); Certificate[] chain = new Certificate[] { certificate, caCertificate, rootCertificate }; keyStore.setKeyEntry("ValidCertificate", privateKey, null, chain); }
From source file:org.alfresco.solr.tracker.AlfrescoSolrTrackerTest.java
@Test public void testTrackers() throws Exception { /*//from w w w . j a v a2s . com * Create and index an AclChangeSet. */ logger.info("######### Starting tracker test ###########"); AclChangeSet aclChangeSet = getAclChangeSet(1); Acl acl = getAcl(aclChangeSet); Acl acl2 = getAcl(aclChangeSet, Long.MAX_VALUE - 10); // Test with long value AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null); AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null); indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2)); //Check for the ACL state stamp. BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST)); builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST)); BooleanQuery waitForQuery = builder.build(); waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME); logger.info("#################### Passed First Test ##############################"); /* * Create and index a Transaction */ //First create a transaction. Transaction txn = getTransaction(0, 2); //Next create two nodes to update for the transaction Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED); Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED); Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED); logger.info("######### error node:" + errorNode.getId()); //Next create the NodeMetaData for each node. TODO: Add more metadata NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false); NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false); //The errorNodeMetaData will cause an exception. NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true); //Index the transaction, nodes, and nodeMetaDatas. //Note that the content is automatically created by the test framework. indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData)); //Check for the TXN state stamp. logger.info("#################### Started Second Test ##############################"); builder = new BooleanQuery.Builder(); builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST)); builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST)); waitForQuery = builder.build(); waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME); logger.info("#################### Passed Second Test ##############################"); /* * Query the index for the content */ waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, MAX_WAIT_TIME); waitForDocCount( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME); waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME); logger.info("#################### Passed Third Test ##############################"); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "t1:" + fileNode.getId()); //Query for an id in the content field. The node id is automatically populated into the content field by test framework params.add("qt", "/afts"); params.add("start", "0"); params.add("rows", "6"); params.add("sort", "id asc"); params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON"); SolrServletRequest req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"joel\"], \"tenants\": [ \"\" ]}"); assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']"); logger.info("#################### Passed Fourth Test ##############################"); //Check for the error doc waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME); logger.info("#################### Passed Fifth Test ##############################"); //Mark the folder as needing cascade Transaction txn1 = getTransaction(0, 1); //Update the properties on the Node and NodeMetaData to simulate an update to the Node. folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX, new StringPropertyValue(Long.toString(txn1.getId()))); folderNode.setTxnId(txn1.getId()); // Update the txnId folderMetaData.setTxnId(txn1.getId()); //Change the ancestor on the file just to see if it's been updated NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID()); fileMetaData.setAncestors(ancestors(nodeRef)); //This will add the PROP_CASCADE_TX property to the folder. logger.info("################### ADDING CASCADE TRANSACTION #################"); indexTransaction(txn1, list(folderNode), list(folderMetaData)); //Check for the TXN state stamp. builder = new BooleanQuery.Builder(); builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST)); builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST)); waitForDocCount(builder.build(), 1, MAX_WAIT_TIME); logger.info("#################### Passed Sixth Test ##############################"); TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString())); waitForDocCount(termQuery1, 1, MAX_WAIT_TIME); params = new ModifiableSolrParams(); params.add("q", QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\""); params.add("qt", "/afts"); params.add("start", "0"); params.add("rows", "6"); params.add("sort", "id asc"); params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON"); req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}"); assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']"); logger.info("#################### Passed Seventh Test ##############################"); //Check that both documents have been indexed and have content. waitForDocCount( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME); logger.info("#################### Passed Eighth Test ##############################"); //Add document with isContentIndexed=false Transaction txnNoContent = getTransaction(0, 1); Node noContentNode = getNode(txnNoContent, acl, Node.SolrApiNodeStatus.UPDATED); NodeMetaData noContentMetaData = getNodeMetaData(noContentNode, txnNoContent, acl, "mike", null, false); noContentMetaData.getProperties().put(ContentModel.PROP_IS_CONTENT_INDEXED, new StringPropertyValue("false")); noContentMetaData.getProperties().put(ContentModel.PROP_CONTENT, new ContentPropertyValue(Locale.UK, 298L, "UTF-8", "text/json", null)); indexTransaction(txnNoContent, list(noContentNode), list(noContentMetaData)); //This tests that the mime type has been added for this document. It is the only document with text/json in the index. waitForDocCount(new TermQuery( new Term("content@s__mimetype@{http://www.alfresco.org/model/content/1.0}content", "text/json")), 1, MAX_WAIT_TIME); //Many of the tests beyond this point rely on a specific count of documents in the index that have content. //This document should not have had the content indexed so the tests following will pass. //If the content had been indexed the tests following this one would have failed. //This proves that the ContentModel.PROP_IS_CONTENT_INDEXED property is being followed by the tracker //Try bulk loading Transaction txn2 = getTransaction(0, 550); List<Node> nodes = new ArrayList(); List<NodeMetaData> nodeMetaDatas = new ArrayList(); for (int i = 0; i < 550; i++) { Node n = getNode(txn2, acl, Node.SolrApiNodeStatus.UPDATED); NodeMetaData nm = getNodeMetaData(n, txn2, acl, "mike", ancestors(folderMetaData.getNodeRef()), false); nodes.add(n); nodeMetaDatas.add(nm); } logger.info("############################ Bulk Nodes:" + nodes.size()); indexTransaction(txn2, nodes, nodeMetaDatas); waitForDocCount( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME); logger.info("#################### Passed Ninth Test ##############################"); for (int i = 0; i < 1000; i++) { Transaction txnX = getTransaction(0, 1); List<Node> nodesX = new ArrayList(); List<NodeMetaData> nodeMetaDatasX = new ArrayList(); Node n = getNode(txnX, acl, Node.SolrApiNodeStatus.UPDATED); NodeMetaData nm = getNodeMetaData(n, txnX, acl, "mike", ancestors(folderMetaData.getNodeRef()), false); nodesX.add(n); nodeMetaDatasX.add(nm); indexTransaction(txnX, nodesX, nodeMetaDatasX); } waitForDocCount( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME); logger.info("#################### Passed Tenth Test ##############################"); //Test the maintenance methods fileMetaData.setOwner("amy"); reindexTransactionId(txn.getId()); folderMetaData.setOwner("jill"); reindexNodeId(folderNode.getId()); // Wait for a document that has the new owner and the content populated. builder = new BooleanQuery.Builder(); builder.add(new BooleanClause( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST)); builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST)); waitForDocCount(builder.build(), 1, MAX_WAIT_TIME); logger.info("#################### Passed Eleventh Test ##############################"); // Wait for a document that has the new owner and the content populated. builder = new BooleanQuery.Builder(); builder.add(new BooleanClause( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST)); builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST)); waitForDocCount(builder.build(), 1, MAX_WAIT_TIME); logger.info("#################### Passed Twelth Test ##############################"); params = new ModifiableSolrParams(); params.add("q", "t1:" + fileNode.getId()); //Query for an id in the content field. The node id is automatically populated into the content field by test framework params.add("qt", "/afts"); params.add("start", "0"); params.add("rows", "6"); params.add("sort", "id asc"); params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON"); req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"amy\"], \"tenants\": [ \"\" ]}"); assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']"); logger.info("#################### Passed Fourteenth Test ##############################"); params = new ModifiableSolrParams(); params.add("q", "t1:" + folderNode.getId()); //Query for an id in the content field. The node id is automatically populated into the content field by test framework params.add("qt", "/afts"); params.add("start", "0"); params.add("rows", "6"); params.add("sort", "id asc"); params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON"); req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"jill\"], \"tenants\": [ \"\" ]}"); assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + folderNode.getId() + "']"); logger.info("#################### Passed Fifteenth Test ##############################"); List<String> readers = aclReaders.getReaders(); readers.set(0, "andy"); // Change the aclReader indexAclId(acl.getId()); List<String> readers2 = aclReaders2.getReaders(); readers2.set(0, "ice"); // Change the aclReader reindexAclId(acl2.getId()); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 1, MAX_WAIT_TIME); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 1, MAX_WAIT_TIME); //Ice should have replaced jim in acl2. waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 0, MAX_WAIT_TIME); logger.info("#################### Passed Sixteenth Test ##############################"); params = new ModifiableSolrParams(); params.add("q", "t1:" + fileNode.getId()); //Query for an id in the content field. The node id is automatically populated into the content field by test framework params.add("qt", "/afts"); params.add("start", "0"); params.add("rows", "6"); params.add("sort", "id asc"); params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON"); req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"andy\"], \"tenants\": [ \"\" ]}"); //FIX ME assertQ(req, "*[count(//doc)=1]","//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']"); logger.info("#################### Passed Seventeenth Test ##############################"); readers.set(0, "alan"); // Change the aclReader readers2.set(0, "paul"); // Change the aclReader reindexAclChangeSetId(aclChangeSet.getId()); //This should replace "andy" and "ice" with "alan" and "paul" //Test that "alan" and "paul" are in the index waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 1, MAX_WAIT_TIME); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 1, MAX_WAIT_TIME); //Test that "andy" and "ice" are removed waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 0, MAX_WAIT_TIME); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 0, MAX_WAIT_TIME); //Test Maintenance acl purge purgeAclId(acl2.getId()); //Test Maintenance node purge purgeNodeId(fileNode.getId()); purgeTransactionId(txn2.getId()); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME); //paul should be purged waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME); waitForDocCount( new TermQuery( new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME); // Refects the purged node and transaction logger.info("#################### Passed Eighteenth Test ##############################"); purgeAclChangeSetId(aclChangeSet.getId()); waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 0, MAX_WAIT_TIME); //alan should be purged //Fix the error node errorMetaData.setNodeRef(new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID())); //Reload the error node. logger.info("Retry the error node"); retry(); //The error in the index should disappear. waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 0, MAX_WAIT_TIME); //And the error node should be present waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(errorNode.getId()))), 1, MAX_WAIT_TIME); logger.info("#################### Passed Nineteenth Test ##############################"); }
From source file:com.ibm.xsp.webdav.DavXMLResponsePlain.java
public void dateTag(String TagName, Date date) { if (date == null) { return;/* ww w. jav a 2s . com*/ } // "Sat, 26 Mar 2005 11:22:20 GMT"; String[] dateParsed = date.toString().split(" "); String timeZone = dateParsed[dateParsed.length - 2]; if (timeZone.equals("EEST")) { timeZone = "EET"; } if (timeZone.equals("EET") || timeZone.equals("ZE2")) { // if(TimeZone.getTimeZone(timeZone).getRawOffset()>0){ // LOGGER.info("Time zone="+timeZone); // LOGGER.info("Time offset="+ new Integer // (TimeZone.getTimeZone(timeZone).getRawOffset()).toString()); date = new Date(date.getTime() - TimeZone.getTimeZone(timeZone).getRawOffset()); timeZone = "GMT"; } String lastmodFormat = "EE', 'd' 'MMM' 'yyyy' 'HH':'mm':'ss"; //Locale.UK is the secret to make it work in Win7! SimpleDateFormat fmt = new SimpleDateFormat(lastmodFormat, Locale.UK); String datestring = fmt.format(date); this.simpleTag(TagName, datestring + " " + timeZone); }
From source file:org.powertac.producer.pvfarm.IrradianceModelTest.java
@Test public void dataTrSpline() throws IOException { double[] airmass = { 0.5, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 10.0, 30.0, 40 }; double[] Tr = { 0.9385, 0.8973, 0.8830, 0.8696, 0.8572, 0.8455, 0.8344, 0.7872, 0.7673, 0.7493, 0.7328, 0.7177, 0.7037, 0.6907, 0.6108, 0.4364, 0.41 }; assert (Tr.length == airmass.length); PolynomialSplineFunction spline = new SplineInterpolator().interpolate(airmass, Tr); File file = new File("data/dataTrSpline.txt"); file.createNewFile();//from w w w.ja v a 2 s .c o m PrintWriter pw = new PrintWriter(new File("data/dataTrSpline.txt")); for (double i = 0.5; i < 40.0; i += 0.1) { pw.printf(Locale.UK, "%f,%f%n", i, spline.value(i)); } pw.close(); }
From source file:org.openmrs.util.OpenmrsUtilTest.java
/** * @see OpenmrsUtil#getDateFormat(Locale) *///from w ww .j a va2 s . c o m @Test public void getDateFormat_shouldReturnAPatternWithFourYCharactersInIt() { Assert.assertEquals("MM/dd/yyyy", OpenmrsUtil.getDateFormat(Locale.US).toLocalizedPattern()); Assert.assertEquals("dd/MM/yyyy", OpenmrsUtil.getDateFormat(Locale.UK).toLocalizedPattern()); Assert.assertEquals("tt.MM.uuuu", OpenmrsUtil.getDateFormat(Locale.GERMAN).toLocalizedPattern()); Assert.assertEquals("dd-MM-yyyy", OpenmrsUtil.getDateFormat(new Locale("pt", "pt")).toLocalizedPattern()); }
From source file:fr.juanwolf.mysqlbinlogreplicator.component.DomainClassAnalyzerTest.java
@Test public void instantiateField_should_set_the_string_with_the_default_locale_of_the_server_if_no_dateouput_() throws ReflectiveOperationException, ParseException { // Given/* ww w . j av a 2s.co m*/ // We need to reload the domainClassAnalyzer String date = "Wed Jul 22 13:00:00 CEST 2015"; Date dateExpected = BINLOG_DATETIME_FORMATTER.parse(date); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy", Locale.UK); simpleDateFormat.setTimeZone(TimeZone.getTimeZone("Europe/Paris")); domainClassAnalyzer.postConstruct(); User user = (User) domainClassAnalyzer.generateInstanceFromName("user"); // When domainClassAnalyzer.instantiateField(user, user.getClass().getDeclaredField("dateString"), date, ColumnType.DATETIME.getCode(), "user"); // Then assertThat(BINLOG_DATETIME_FORMATTER.parse(user.getDateString())).isEqualTo(dateExpected); //assertThat(user.getDateString()).isEqualTo(date); }
From source file:ch.itemis.xdocker.ui.view.XdockerContainerBrowserView.java
@Override protected void processResult(XdockerJobStatus status) { if (status.isOK() && status.getArgument() instanceof List<?>) { Locale defLocale = Locale.getDefault(); try {//from ww w . java2 s . com Locale.setDefault(Locale.UK); // we support only English! PrettyTime pt = new PrettyTime(); @SuppressWarnings("unchecked") List<Container> images = (List<Container>) status.getArgument(); for (Object obj : images) { Container container = obj instanceof Container ? (Container) obj : null; if (container == null) return; // something is very odd here... go away! if (!match(container)) continue; // skip because unwanted! TableItem item = new TableItem(table, SWT.NONE); List<String> elements = new ArrayList<String>(); // elements.add(substring(container.getId(), 0, 12)); elements.add(container.getId()); elements.add(container.getImage()); elements.add(container.getCommand()); elements.add(pt.format(new Date(container.getCreated() * 1000))); elements.add(container.getStatus()); StringBuffer ports = new StringBuffer(); for (ContainerPort port : container.getPorts()) { boolean hasIP, hasPublPort; if (hasIP = isNotBlank(port.getIp())) { ports.append(port.getIp()); } if (hasPublPort = port.getPublicPort() != null) { if (hasIP) { ports.append(":"); } ports.append(String.valueOf(port.getPublicPort())); } if (port.getPrivatePort() != null) { if (hasIP || hasPublPort) { ports.append("->"); } ports.append(String.valueOf(port.getPrivatePort())); } if (port.getType() != null) { ports.append("/"); ports.append(port.getType()); } ports.append(", "); } elements.add(ports.toString().endsWith(", ") ? ports.toString().substring(0, ports.toString().length() - 2) : ports.toString()); elements.add(StringUtils.join(container.getNames())); item.setText(elements.toArray(new String[] {})); } for (int i = 0, n = table.getColumnCount(); i < n; i++) { table.getColumn(i).pack(); } table.getColumn(0).setWidth(120); table.getColumn(1).setWidth(120); table.getColumn(2).setWidth(120); } finally { Locale.setDefault(defLocale); // restore to default... } } }
From source file:ch.itemis.xdocker.ui.view.XdockerImageBrowserView.java
@Override protected void processResult(XdockerJobStatus status) { if (status.isOK() && status.getArgument() instanceof List<?>) { if (isLocal()) { @SuppressWarnings("unchecked") List<Image> images = (List<Image>) status.getArgument(); Locale defLocale = Locale.getDefault(); try { Locale.setDefault(Locale.UK); // we support only English! PrettyTime pt = new PrettyTime(); for (Object obj : images) { Image image = obj instanceof Image ? (Image) obj : null; if (image == null) return; TableItem item = new TableItem(table, SWT.NONE); List<String> elements = new ArrayList<String>(); String tagFragment = image.getRepoTags() != null && image.getRepoTags().length > 0 ? image.getRepoTags()[0] : null;/*ww w . j av a 2s. co m*/ String[] repoTag = tagFragment != null ? image.getRepoTags()[0].split(":") : null; if (repoTag != null) { if (repoTag.length > 0) elements.add(repoTag[0]); if (repoTag.length > 1) elements.add(repoTag[1]); } if (image.getId() != null && image.getId().length() >= 12) elements.add(image.getId().substring(0, 12)); elements.add(pt.format(new Date(image.getCreated() * 1000))); elements.add(String.valueOf(image.getVirtualSize())); item.setText(elements.toArray(new String[] {})); } } finally { Locale.setDefault(defLocale); // restore to default... } } else { @SuppressWarnings("unchecked") List<SearchItem> images = (List<SearchItem>) status.getArgument(); for (Object obj : images) { SearchItem image = obj instanceof SearchItem ? (SearchItem) obj : null; if (image == null) return; TableItem item = new TableItem(table, SWT.NONE); List<String> elements = new ArrayList<String>(); elements.add(image.getName()); elements.add(image.getDescription()); elements.add(String.valueOf(image.getStarCount())); elements.add(String.valueOf(image.isOfficial())); elements.add(String.valueOf(image.isTrusted())); item.setText(elements.toArray(new String[] {})); } } // resize columns for (int i = 0, n = table.getColumnCount(); i < n; i++) { if (isLocal() || i != 1) { // skip description because could be too long... table.getColumn(i).pack(); } } } }