List of usage examples for java.util List toString
public String toString()
From source file:controllers.WorkflowViewController.java
public String submitAndRegisterWf(String type, String id, Workflow workflow, List<DatasetBean> selectedDatasets) throws ConnectException, IllegalArgumentException, SubmitFailedException { SpaceAndProjectCodes spaceandproject = getSpaceAndProjects(type, id); String spaceCode = spaceandproject.space; String projectCode = spaceandproject.project; ParameterSet params = workflow.getParameters(); List<Property> factors = new ArrayList<Property>(); XMLParser xmlParser = new XMLParser(); for (Map.Entry<String, Parameter> entry : workflow.getData().getData().entrySet()) { String key = entry.getKey(); Parameter value = entry.getValue(); if (key.contains("input")) { List<String> files = (List<String>) value.getValue(); List<String> inputFiles = new ArrayList<String>(); for (String f : files) { String[] splitted = f.split("/"); String fileName = splitted[splitted.length - 1]; inputFiles.add(fileName); }/*from www . j a v a 2s . c om*/ System.out.println(inputFiles.toString()); String concat = String.join("; ", inputFiles); System.out.println(concat); Property newProperty = new Property("input_files", concat, PropertyType.Property); factors.add(newProperty); } else { Property newProperty = new Property("database", value.getValue().toString().replace("/lustre_cfc/qbic/reference_genomes/", ""), PropertyType.Property); factors.add(newProperty); } } for (String p : params.getParamNames()) { Parameter par = params.getParam(p); String[] splitted = par.getTitle().split("\\."); String parName = splitted[splitted.length - 1].replace(" ", "_").toLowerCase(); Property newProperty = new Property(parName, par.getValue().toString(), PropertyType.Property); factors.add(newProperty); } String qProperties = ""; try { qProperties = xmlParser.toString(xmlParser.createXMLFromProperties(factors)); System.out.println(qProperties); } catch (JAXBException e) { // TODO Auto-generated catch block e.printStackTrace(); } String experimentCode = registerWFExperiment(spaceCode, projectCode, workflow.getExperimentType(), workflow.getID(), workflow.getVersion(), user, qProperties); List<String> parents = getConnectedSamples(selectedDatasets); String sampleType = workflow.getSampleType(); String sampleCode = registerWFSample(spaceCode, projectCode, experimentCode, sampleType, parents, selectedDatasets); String openbisId = String.format("%s-%s-%s-%s", spaceCode, projectCode, experimentCode, sampleCode); LOGGER.info( "User: " + user + " is submitting workflow " + workflow.getID() + " openbis id is:" + openbisId); String submit_id = submitter.submit(workflow, openbisId, user); LOGGER.info("Workflow has guse id: " + submit_id); setWorkflowID(spaceCode, projectCode, experimentCode, submit_id); return openbisId; }
From source file:de.alpharogroup.lang.ObjectExtensionsTest.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Test(enabled = false)/*from w w w . j a v a2s. c o m*/ public void testCompareTo() throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { final List<Person> persons = new ArrayList<>(); final Person obelix = new Person(); obelix.setGender(Gender.MALE); obelix.setName("obelix"); final Person asterix = new Person(); asterix.setGender(Gender.MALE); asterix.setName("asterix"); final Person miraculix = new Person(); miraculix.setGender(Gender.MALE); miraculix.setName("miraculix"); final int i = ObjectExtensions.compareTo(asterix, obelix, "name"); System.out.println(i); persons.add(obelix); persons.add(asterix); persons.add(miraculix); System.out.println("Unsorted Persons:"); System.out.println(persons.toString()); final Comparator defaultComparator = new BeanComparator("name"); Collections.sort(persons, defaultComparator); System.out.println("Sorted Persons by name:"); System.out.println(persons.toString()); Collections.reverse(persons); System.out.println("Sorted Persons by name reversed:"); System.out.println(persons.toString()); }
From source file:net.sf.jtmt.clustering.SimulatedAnnealingClusterer.java
/** * Cluster.// ww w . j a v a2 s . c o m * * @param collection the collection * @return the list */ public List<Cluster> cluster(DocumentCollection collection) { // 1) Get initial set of clusters... int numDocs = collection.size(); int numClusters = (int) Math.floor(Math.sqrt(numDocs)); List<Cluster> clusters = new ArrayList<Cluster>(); for (int i = 0; i < numClusters; i++) { clusters.add(new Cluster("C" + i)); } // ...and set initial temperature parameter T. double temperature = initialTemperature; // Randomly assign documents to the k clusters. if (randomizeDocs) { collection.shuffle(); } for (int i = 0; i < numDocs; i++) { int targetCluster = i % numClusters; clusters.get(targetCluster).addDocument(collection.getDocumentNameAt(i), collection.getDocument(collection.getDocumentNameAt(i))); } log.debug("..Initial clusters: " + clusters.toString()); // 2) Repeat until temperature is reduced to the minimum. while (temperature > finalTemperature) { double previousAverageRadius = 0.0D; List<Cluster> prevClusters = new ArrayList<Cluster>(); // 2.1) Run loop NUM_LOOP times. for (int loop = 0; loop < numberOfLoops; loop++) { // 2.1.1) Find a new set of clusters by altering the membership of some // documents. // pick two clusters at random List<Integer> randomClusterIds = getRandomClusterIds(clusters); // pick two documents out of the clusters at random List<String> randomDocumentNames = getRandomDocumentNames(collection, randomClusterIds, clusters); // exchange the two random documents among the random clusters. clusters.get(randomClusterIds.get(0)).removeDocument(randomDocumentNames.get(0)); clusters.get(randomClusterIds.get(0)).addDocument(randomDocumentNames.get(1), collection.getDocument(randomDocumentNames.get(1))); clusters.get(randomClusterIds.get(1)).removeDocument(randomDocumentNames.get(1)); clusters.get(randomClusterIds.get(1)).addDocument(randomDocumentNames.get(0), collection.getDocument(randomDocumentNames.get(0))); // 2.1.2) Compare the difference between the values of the new and old // set of clusters. If there is an improvement, accept the new // set of clusters, otherwise accept the new set of clusters with // probability p. log.debug("..Intermediate clusters: " + clusters.toString()); double averageRadius = getAverageRadius(clusters); if (averageRadius > previousAverageRadius) { // possible downhill move, calculate the probability of it being // accepted double probability = Math.exp((previousAverageRadius - averageRadius) / temperature); if (probability < downhillProbabilityCutoff) { // go back to the cluster before the changes clusters.clear(); clusters.addAll(prevClusters); continue; } } prevClusters.clear(); prevClusters.addAll(clusters); previousAverageRadius = averageRadius; } // 2.2) Reduce the temperature based on the cooling schedule. temperature = temperature / 10; } // 3) Return the final set of clusters. return clusters; }
From source file:com.thinkbiganalytics.datalake.authorization.RangerAuthorizationService.java
@Override public void createReadOnlyHivePolicy(String categoryName, String feedName, List<String> securityGroupNames, String databaseName, List<String> tableNames) { RangerCreateOrUpdatePolicy rangerCreateOrUpdatePolicy = new RangerCreateOrUpdatePolicy(); List<String> hivePermissions = new ArrayList<>(); hivePermissions.add(HIVE_READ_ONLY_PERMISSION); String rangerHivePolicyName = getHivePolicyName(categoryName, feedName); String hiveDescription = "Ranger policy created for group list " + securityGroupNames.toString() + " for resource " + tableNames.toString(); String hiveTables = convertListToString(tableNames, ","); rangerCreateOrUpdatePolicy = new RangerCreateOrUpdatePolicy(); rangerCreateOrUpdatePolicy.setPolicyName(rangerHivePolicyName); rangerCreateOrUpdatePolicy.setDatabases(databaseName); rangerCreateOrUpdatePolicy.setTables(hiveTables); rangerCreateOrUpdatePolicy.setColumns(HIVE_COLUMN_PERMISSION); rangerCreateOrUpdatePolicy.setUdfs(""); rangerCreateOrUpdatePolicy.setDescription(hiveDescription); rangerCreateOrUpdatePolicy.setRepositoryName(rangerConnection.getHiveRepositoryName()); rangerCreateOrUpdatePolicy.setRepositoryType(HIVE_REPOSITORY_TYPE); rangerCreateOrUpdatePolicy.setIsAuditEnabled(IsAuditable); rangerCreateOrUpdatePolicy.setIsEnabled(IsEnable); rangerCreateOrUpdatePolicy.setPermMapList(securityGroupNames, hivePermissions); try {/*from w ww . j a va 2s . c o m*/ rangerRestClient.createPolicy(rangerCreateOrUpdatePolicy); } catch (Exception e) { log.error("Error creating Hive Ranger policy", e); throw new RuntimeException("Error creating Hive Ranger policy", e); } }
From source file:ch.epfl.eagle.daemon.nodemonitor.NodeMonitor.java
public void requestTaskReservations() { LOG.info(Logging.functionCall());//ww w .j av a 2s . co m // 1. call sendTaskReservations to n workers List<InetSocketAddress> listBackends = getCleanWorkersList(); // Get the big partition LOG.debug("STEALING: Initial node list size: " + listBackends.size()); int last_nodeID = (int) (listBackends.size() * bigPartition / 100); listBackends = listBackends.subList(0, last_nodeID); LOG.debug("STEALING: Using nodes from 0 to " + last_nodeID + ". List consists of : " + listBackends); LOG.debug("STEALING: New list of backends " + listBackends.toString()); Collections.shuffle(listBackends); InetSocketAddress chosenBackend = listBackends.get(0); try { InternalService.AsyncClient client = nodeMonitorClientPool.borrowClient(chosenBackend); stealingAttempts++; LOG.debug("STEALING: Launching sendTasksReservations on node: " + chosenBackend + " stealing attempts" + stealingAttempts); client.sendTasksReservations(new SendTaskReservationsCallback(chosenBackend, client)); LOG.debug("STEALING: Finished launching sendTasksReservations on node: " + chosenBackend); } catch (Exception e) { LOG.error("Error enqueuing task on node " + chosenBackend.toString() + ":" + e); } }
From source file:com.ibm.cloud.appid.android.LicenseCheck.java
@Test public void testLicensesHeaders() { try {/*ww w . j a v a 2 s . co m*/ List<String> missingLicenseFiles = new ArrayList<>(); File sourceDir = new File(new File("").getAbsolutePath() + "/lib/src/main/java/com/ibm/"); File sourceTestDir = new File(new File("").getAbsolutePath() + "/lib/src/test/java/com/ibm/"); List<File> sourceFiles = getListFiles(sourceDir); List<File> testsFiles = getListFiles(sourceTestDir); sourceFiles.addAll(testsFiles); for (File file : sourceFiles) { FileInputStream fisTargetFile = new FileInputStream(file); String targetFileStr = IOUtils.toString(fisTargetFile, "UTF-8"); if (!targetFileStr.startsWith(LICENSE)) { missingLicenseFiles.add(file.getPath()); } } Assert.assertTrue( "The following files missing the IBM License header: " + missingLicenseFiles.toString(), missingLicenseFiles.isEmpty()); } catch (IOException e) { e.printStackTrace(); } }
From source file:com.hp.mqm.atrf.core.configuration.FetchConfiguration.java
public void validateProperties() { //MUST PARAMETERS validateMustParameter(ALM_USER_PARAM); validateMustParameter(ALM_PROJECT_PARAM); validateMustParameter(ALM_DOMAIN_PARAM); validateMustParameter(ALM_SERVER_URL_PARAM); if (StringUtils.isEmpty(getOutputFile())) { //MUST//from w w w.j a v a 2 s . co m validateMustParameter(OCTANE_USER_PARAM); validateMustParameter(OCTANE_WORKSPACE_ID_PARAM); validateMustParameter(OCTANE_SHAREDSPACE_ID_PARAM); validateMustParameter(OCTANE_SERVER_URL_PARAM); //INTEGER validateIntegerParameter(OCTANE_WORKSPACE_ID_PARAM); validateIntegerParameter(OCTANE_SHAREDSPACE_ID_PARAM); } //INTEGER validateIntegerParameter(SYNC_BULK_SIZE_PARAM); validateIntegerParameter(SYNC_SLEEP_BETWEEN_POSTS_PARAM); validateIntegerParameter(PROXY_PORT_PARAM); //CUSTOM VALIDATIONS //ALM_RUN_FILTER_START_FROM_ID String startFromIdValue = getAlmRunFilterStartFromId(); if (StringUtils.isNotEmpty(startFromIdValue)) { boolean isValid = false; if (ALM_RUN_FILTER_START_FROM_ID_LAST_SENT.equalsIgnoreCase(startFromIdValue)) { isValid = true; } else { try { Integer.parseInt(startFromIdValue); isValid = true; } catch (NumberFormatException e) { isValid = false; } } if (!isValid) { throw new RuntimeException(String.format( "Configuration parameter '%s' can hold integer value or '%s' string, but contains '%s'", ALM_RUN_FILTER_START_FROM_ID_PARAM, ALM_RUN_FILTER_START_FROM_ID_LAST_SENT, startFromIdValue)); } } //ALM_RUN_FILTER_START_FROM_DATE String startFromDateValue = getAlmRunFilterStartFromDate(); if (StringUtils.isNotEmpty(startFromDateValue)) { SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT); try { Date d = dateFormat.parse(startFromDateValue); if (d.after(dateFormat.parse("2099-12-31")) || d.before(dateFormat.parse("1900-01-01"))) { throw new RuntimeException( String.format("Configuration parameter '%s' should be in range of 1900-2100", ALM_RUN_FILTER_START_FROM_DATE_PARAM)); } //The date 2017-12-06 is out of <1900-2100> range" } catch (ParseException e) { throw new RuntimeException(String.format( "Configuration parameter '%s' should contain date in the following format '%s'", ALM_RUN_FILTER_START_FROM_DATE_PARAM, DATE_FORMAT)); } } //ALM_RUN_FILTER_RELATED_ENTITY_ENTITY_ID_PARAM String relatedEntityType = getAlmRunFilterRelatedEntityType(); String relatedEntityId = getAlmRunFilterRelatedEntityId(); if (StringUtils.isNotEmpty(relatedEntityType) && StringUtils.isEmpty(relatedEntityId)) { throw new RuntimeException(String.format( "Configuration contains value for parameter '%s', but missing value for parameter '%s'", ALM_RUN_FILTER_RELATED_ENTITY_TYPE_PARAM, ALM_RUN_FILTER_RELATED_ENTITY_ID_PARAM)); } if (StringUtils.isNotEmpty(relatedEntityId) && StringUtils.isEmpty(relatedEntityType)) { throw new RuntimeException(String.format( "Configuration contains value for parameter '%s', but missing value for parameter '%s'", ALM_RUN_FILTER_RELATED_ENTITY_ID_PARAM, ALM_RUN_FILTER_RELATED_ENTITY_TYPE_PARAM)); } if (StringUtils.isNotEmpty(relatedEntityType)) { relatedEntityType = relatedEntityType.toLowerCase(); List<String> allowedEntityTypes = Arrays.asList("test", "testset", "sprint", "release"); if (!allowedEntityTypes.contains(relatedEntityType)) { throw new RuntimeException(String.format( "Configuration contains illegal value for parameter '%s', allowed values are %s", ALM_RUN_FILTER_RELATED_ENTITY_TYPE_PARAM, allowedEntityTypes.toString())); } } //FETCH LIMIT String fetchLimitStr = getProperty(ALM_RUN_FILTER_FETCH_LIMIT_PARAM); int fetchLimit = ALM_RUN_FILTER_FETCH_LIMIT_DEFAULT; if (StringUtils.isNotEmpty(fetchLimitStr)) { try { fetchLimit = Integer.parseInt(fetchLimitStr); } catch (Exception e) { throw new RuntimeException(String.format( "Configuration contains illegal value for parameter '%s', the value should be integer in range of 1-200000", ALM_RUN_FILTER_FETCH_LIMIT_PARAM)); } if (fetchLimit > ALM_RUN_FILTER_FETCH_LIMIT_MAX || fetchLimit < ALM_RUN_FILTER_FETCH_LIMIT_MIN) { throw new RuntimeException(String.format( "Configuration contains illegal value for parameter '%s', the value should be integer in range of 1-200000", ALM_RUN_FILTER_FETCH_LIMIT_PARAM)); } } else { fetchLimit = ALM_RUN_FILTER_FETCH_LIMIT_DEFAULT; } setProperty(ALM_RUN_FILTER_FETCH_LIMIT_PARAM, Integer.toString(fetchLimit)); //BULK SIZE String bulkSizeStr = getProperty(SYNC_BULK_SIZE_PARAM); int bulkSize = SYNC_BULK_SIZE_DEFAULT; if (StringUtils.isNotEmpty(bulkSizeStr)) { try { bulkSize = Integer.parseInt(bulkSizeStr); if (bulkSize < SYNC_BULK_SIZE_MIN || bulkSize > SYNC_BULK_SIZE_MAX) { bulkSize = SYNC_BULK_SIZE_DEFAULT; } } catch (Exception e) { bulkSize = SYNC_BULK_SIZE_DEFAULT; } } setProperty(SYNC_BULK_SIZE_PARAM, Integer.toString(bulkSize)); //SLEEP String sleepBetweenPostsStr = getProperty(SYNC_SLEEP_BETWEEN_POSTS_PARAM); int sleepBetweenPosts = SYNC_SLEEP_BETWEEN_POSTS_DEFAULT; if (StringUtils.isNotEmpty(sleepBetweenPostsStr)) { try { sleepBetweenPosts = Integer.parseInt(sleepBetweenPostsStr); if (sleepBetweenPosts < SYNC_SLEEP_BETWEEN_POSTS_MIN || sleepBetweenPosts > SYNC_SLEEP_BETWEEN_POSTS_MAX) { sleepBetweenPosts = SYNC_SLEEP_BETWEEN_POSTS_DEFAULT; } } catch (Exception e) { sleepBetweenPosts = SYNC_SLEEP_BETWEEN_POSTS_DEFAULT; } } setProperty(SYNC_SLEEP_BETWEEN_POSTS_PARAM, Integer.toString(sleepBetweenPosts)); }
From source file:com.chicm.cmraft.core.DefaultNodeConnection.java
@Override public AppendEntriesResponse appendEntries(long term, ServerInfo leaderId, long leaderCommit, long prevLogIndex, long prevLogTerm, List<RaftLogEntry> entries) throws ServiceException { Preconditions.checkNotNull(entries); AppendEntriesRequest.Builder builder = AppendEntriesRequest.newBuilder(); builder.setTerm(term);//from w w w. j av a 2 s. c o m builder.setLeaderId(leaderId.toServerId()); builder.setLeaderCommit(leaderCommit); builder.setPrevLogIndex(prevLogIndex); builder.setPrevLogTerm(prevLogTerm); builder.addAllEntries(entries); try { LOG.debug(leaderId + "making appendEntries call to: " + getRemoteServer()); AppendEntriesResponse response = rpcClient.getStub().appendEntries(null, builder.build()); return response; } catch (Exception e) { LOG.error("exception", e); LOG.error("remote server:" + getRemoteServer()); LOG.error("Log entries:" + entries.toString()); } return null; }
From source file:org.crce.interns.dao.impl.SendEmailDAOImpl.java
/** * /* ww w. j a v a 2 s . c o m*/ * @param receivers * @return */ @Override public String fetchStreamStudents(String receivers) { Session session = sessionFactory.openSession(); String senderList = ""; String SQL_QUERY = "Select userName from ProfessionalProfile where branch like '" + receivers + "' and year like '" + Integer.toString(Calendar.getInstance().get(Calendar.YEAR) + 1) + "'"; Query query = session.createQuery(SQL_QUERY); List list = query.list(); if (!list.isEmpty()) { System.out.println(list); } for (Object list1 : list) { System.out.println("senderList at start of loop " + senderList); System.out.println(list1); String recipient = list1.toString(); SQL_QUERY = "select emailId from PersonalProfile where userName like '" + recipient + "'"; query = session.createQuery(SQL_QUERY); list = query.list(); recipient = list.toString(); senderList = senderList.concat(recipient + " "); } System.out.println("final senderList " + senderList); return senderList; }
From source file:org.crce.interns.dao.impl.SendEmailDAOImpl.java
/** * /*from w ww . ja v a 2s . c om*/ * @return String */ @Override public String fetchCompsSTPC() { Session session = sessionFactory.openSession(); String senderList = ""; //String SQL_QUERY="Select emailId from PersonalProfile where userName like (select u.userName from UserDetails as u,ProfessionalProfile as p where u.userName=p.userName and u.roleId like '3' and p.branch like 'Computer Engineering')"; String SQL_QUERY = "select u.userName from UserDetails as u,ProfessionalProfile as p where u.userName=p.userName and u.roleId like '3' and p.branch like 'Computer Engineering' and p.year like '" + Integer.toString(Calendar.getInstance().get(Calendar.YEAR) + 1) + "'"; Query query = session.createQuery(SQL_QUERY); List list = query.list(); if (!list.isEmpty()) { System.out.println(list); } for (Object list1 : list) { System.out.println("senderList at start of loop " + senderList); System.out.println(list1); String recipient = list1.toString(); SQL_QUERY = "select emailId from PersonalProfile where userName like '" + recipient + "'"; query = session.createQuery(SQL_QUERY); list = query.list(); recipient = list.toString(); senderList = senderList.concat(recipient + " "); } System.out.println("final senderList " + senderList); session.close(); return senderList; }