List of usage examples for java.lang Long intValue
public int intValue()
From source file:fragment.web.WorkflowControllerTest.java
/** * Description: Private method to create a Tenant for the given Account Type * //from www.j av a 2 s . c o m * @author vinayv */ private Tenant createTenant(Long accountType) throws Exception { int prevTList = tenantDAO.count(); int prevUList = userDAO.count(); int prevBTList = businessTransactionDAO.count(); AccountType type = accountTypeDAO.find(accountType); Tenant tenant = new Tenant("Tenant" + random.nextInt(), type, getRootUser(), randomAddress(), true, currencyValueService.locateBYCurrencyCode("USD"), getPortalUser()); List<Country> countryList = countryDAO.findAll(null); Profile profile = profileDAO.find(8L); User user = new User("firstName", "lastName", "nageswarareddy.poli@citrix.com", "username14", "Portal123#", "91-9885098850", "GMT", null, profile, getRootUser()); user.setAddress(randomAddress()); tenant.setOwner(user); com.citrix.cpbm.access.User newUser = (com.citrix.cpbm.access.User) CustomProxy.newInstance(user); com.citrix.cpbm.access.Tenant newTenant = (com.citrix.cpbm.access.Tenant) CustomProxy.newInstance(tenant); TenantForm form = new TenantForm(); form.setAccountTypeId(accountType.toString()); form.setUser(newUser); form.setTenant(newTenant); form.setCountryList(countryList); // Trial AccountType ID is 5 if (accountType.intValue() == 5) { CampaignPromotion promo = campaignPromotionDAO.find(2L); promo.setTrial(true); campaignPromotionDAO.merge(promo); form.setTrialCode(promo.getCode()); } BeanPropertyBindingResult result = new BeanPropertyBindingResult(form, "validation"); String tenantCreation = tenantsController.create(form, result, map, status, request); logger.debug("RESULT :" + tenantCreation); Assert.assertTrue("verifying the form has zero error", tenantCreation.contains("0 errors")); Long newTenantId = (Long) map.get("tenantId"); Tenant obtainedTenant = tenantDAO.find(newTenantId); // If AccountType is Trial then linking the TrialAccount row to newly created Tenant if (accountType.intValue() == 5) { List<TrialAccount> trialAccountList = trialAccountDAO.findAll(null); for (int i = 0; i < trialAccountList.size(); i++) { if (trialAccountList.get(i).getTenant().equals(obtainedTenant)) { obtainedTenant.setTrialAccount(trialAccountList.get(0)); tenantDAO.merge(obtainedTenant); } } } obtainedTenant = tenantDAO.find(newTenantId); Assert.assertEquals(tenant.getOwner().getUsername(), obtainedTenant.getOwner().getUsername()); int afterTList = tenantDAO.count(); int afterUList = userDAO.count(); int afterBTList = businessTransactionDAO.count(); Assert.assertEquals("", prevTList + 1, afterTList); Assert.assertEquals("", prevUList + 1, afterUList); Assert.assertEquals("", prevBTList + 1, afterBTList); logger.debug("Tenants before ::" + prevTList + ", Tenants After :: " + afterTList); logger.debug("Users before ::" + prevUList + ", Users After :: " + afterUList); logger.debug( "businessTransactions before ::" + prevBTList + ", businessTransactions After :: " + afterBTList); List<PortalEvent> eventList = eventListener.getEvents(); Assert.assertTrue(eventList.get(0).getPayload() instanceof TenantActivation); return obtainedTenant; }
From source file:it.geosolutions.geobatch.destination.ingestion.GateIngestionProcess.java
/** * Imports the gate data from the exported file to database. * //from w ww . j a v a2 s . com * @param ignorePks Flag to indicates that should ignore pks in the xml file and * create a new one with a sequence manager * @throws IOException * * @return resume of the operation in a map */ public Map<String, Object> doProcess(boolean ignorePks, boolean copyFileAtEnd, String successPath, String failPath) throws IOException { Map<String, Object> result = new HashMap<String, Object>(); List<Long> ids = new ArrayList<Long>(); reset(); this.ignorePks = ignorePks; if (isValid()) { int process = -1; int trace = -1; int errors = 0; int total = 0; int processed = 0; float percent = 0; try { process = createProcess(); // write log for the imported file trace = logFile(process, DEFAULT_GATE_TYPE, PARTNER, PARTNER_CODE, date, false); // Read xml file ExportData exportData = null; try { exportData = JAXB.unmarshal(file, ExportData.class); } catch (Exception e) { String msg = "Unknown file format for gate ingestion"; updateProgress(90, msg); metadataHandler.logError(trace, errors, msg, getError(e), 0); throw new IOException(msg); } if (exportData != null && exportData.getTransits().size() == 1) { Transits transits = exportData.getTransits().get(0); total = transits.getTransit().size(); float ftot = new Float(total); // Insert one by one for (Transit transit : transits.getTransit()) { Long id = null; try { // Update status inputCount++; float fproc = new Float(++processed); String msg = "Importing data in transit table: " + (processed) + "/" + total; percent = (fproc++ / ftot); if (processed % 100 == 0) { updateProgress(percent * 100, msg); if (LOGGER.isInfoEnabled()) { LOGGER.info(msg); } } // insert id = createTransit(transit); // add to result if (id != null) { // Trace insert if (LOGGER.isTraceEnabled()) { LOGGER.trace("Correctly insert id " + id); } ids.add(id); } else { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Error on gate ingestion for element " + inputCount); } } } catch (Exception e) { errors++; if (id != null) { metadataHandler.logError(trace, errors, "Error on gate ingestion", getError(e), id.intValue()); /*String msg = "Error on gate ingestion for id " + id.intValue(); updateProgress(percent * 100, msg); LOGGER.error(msg);*/ } else { metadataHandler.logError(trace, errors, "Error importing element " + inputCount, getError(e), 0); /*String msg = "Error on gate ingestion for element " + inputCount; updateProgress(percent * 100, msg); LOGGER.error(msg);*/ } } } } else { LOGGER.error("Incorrect format for ingestion"); } // all complete importFinished(total, errors, "Data imported in transit table"); metadataHandler.updateLogFile(trace, total, errors, true); } catch (IOException e) { errors++; metadataHandler.logError(trace, errors, "Error importing data", getError(e), 0); // close current process phase process = closeProcess(process); throw e; } finally { if (process != -1) { // close current process phase metadataHandler.closeProcessPhase(process, "A"); } } // save counts result.put(ERROR_COUNT, errors); result.put(PROCESSED_COUNT, processed); result.put(TOTAL_COUNT, total); if (copyFileAtEnd) { if (errors > 0) { copyFile(file, failPath); } else { copyFile(file, successPath); } } // close current process phase process = closeProcess(process); } // save ids result.put(IDS, ids); return result; }
From source file:com.impetus.client.cassandra.pelops.PelopsDataHandler.java
public Object fromCounterSuperColumnThriftRow(Class clazz, EntityMetadata m, ThriftRow tr, List<String> relationNames, boolean isWrapReq) throws Exception { // Instantiate a new instance Object entity = null;//ww w. j a v a 2s . co m // Map to hold property-name=>foreign-entity relations Map<String, Set<String>> foreignKeysMap = new HashMap<String, Set<String>>(); // Get a name->field map for super-columns Map<String, Field> columnNameToFieldMap = new HashMap<String, Field>(); Map<String, Field> superColumnNameToFieldMap = new HashMap<String, Field>(); MetadataUtils.populateColumnAndSuperColumnMaps(m, columnNameToFieldMap, superColumnNameToFieldMap); // Add all super columns to entity Collection embeddedCollection = null; Field embeddedCollectionField = null; Map<String, Object> relations = new HashMap<String, Object>(); for (CounterSuperColumn sc : tr.getCounterSuperColumns()) { if (entity == null) { entity = clazz.newInstance(); // Set row-key PropertyAccessorHelper.setId(entity, m, tr.getId()); } String scName = PropertyAccessorFactory.STRING.fromBytes(String.class, sc.getName()); String scNamePrefix = null; // If this super column is variable in number (name#sequence // format) if (scName.indexOf(Constants.EMBEDDED_COLUMN_NAME_DELIMITER) != -1) { scNamePrefix = MetadataUtils.getEmbeddedCollectionPrefix(scName); embeddedCollectionField = superColumnNameToFieldMap.get(scNamePrefix); if (embeddedCollection == null) { embeddedCollection = MetadataUtils.getEmbeddedCollectionInstance(embeddedCollectionField); } Object embeddedObject = MetadataUtils.getEmbeddedGenericObjectInstance(embeddedCollectionField); for (CounterColumn column : sc.getColumns()) { String name = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); Long value = column.getValue(); if (value == null) { continue; } Field columnField = columnNameToFieldMap.get(name); if (columnField != null) { if ((columnField.getType().equals(Integer.class) || columnField.getType().equals(int.class)) && value != null) { int colValue = value.intValue(); PropertyAccessorHelper.set(embeddedObject, columnField, colValue); } else { PropertyAccessorHelper.set(embeddedObject, columnField, value); } // PropertyAccessorHelper.set(embeddedObject, // columnField, value); } else if (relationNames != null && !relationNames.isEmpty() && relationNames.contains(name)) { String valueAsStr = value.toString(); relations.put(name, valueAsStr); } } embeddedCollection.add(embeddedObject); // Add this embedded object to cache ElementCollectionCacheManager.getInstance().addElementCollectionCacheMapping(tr.getId(), embeddedObject, scName); } else { // For embedded super columns, create embedded entities and // add them to parent entity Field superColumnField = superColumnNameToFieldMap.get(scName); Object superColumnObj = null; if (superColumnField != null || (relationNames != null && !relationNames.isEmpty() && relationNames.contains(scName))) { Class superColumnClass = superColumnField != null ? superColumnField.getType() : null; for (CounterColumn column : sc.getColumns()) { String name = PropertyAccessorFactory.STRING.fromBytes(String.class, column.getName()); Long value = column.getValue(); Field columnField = columnNameToFieldMap.get(name); if (columnField != null) { try { if ((columnField.getType().equals(Integer.class) || columnField.getType().equals(int.class)) && value != null) { int colValue = value.intValue(); superColumnObj = populateColumnValue(superColumnClass, colValue, columnField); /* * if(superColumnClass.isPrimitive()) { * superColumnObj = colValue; } else { * superColumnObj = * PropertyAccessorHelper.getObject * (superColumnClass); * PropertyAccessorHelper * .set(superColumnObj, columnField, * colValue); } */} else { superColumnObj = populateColumnValue(superColumnClass, value, columnField); } } catch (PropertyAccessException e) { // This is an entity column to be retrieved // in a // super column family. It's stored as a // super // column that would // have just one column with the same name log.debug(e.getMessage() + ". Possible case of entity column in a super column family. Will be treated as a super column."); com.impetus.kundera.metadata.model.Column col = m.getColumn(name); if (col != null) { superColumnObj = value; } } } else { String valueAsStr = value.toString(); relations.put(name, valueAsStr); } } } if (superColumnField != null) { PropertyAccessorHelper.set(entity, superColumnField, superColumnObj); } } } if (embeddedCollection != null && !embeddedCollection.isEmpty()) { PropertyAccessorHelper.set(entity, embeddedCollectionField, embeddedCollection); } // EnhancedEntity e = EntityResolver.getEnhancedEntity(entity, // tr.getId(), foreignKeysMap); return isWrapReq && relations != null && !relations.isEmpty() ? new EnhanceEntity(entity, tr.getId(), relations) : entity; }
From source file:com.krawler.spring.crm.common.crmManagerDAOImpl.java
@Override public KwlReturnObject getAssignedLeadRoutingUsers(String companyid, HashMap<String, Object> requestParams) throws ServiceException { ArrayList filter_params = new ArrayList(); String selCountQuery = "select count(distinct u.userID) "; // String orderBy = " order by u.firstName,u.lastName"; StringBuilder hql = new StringBuilder( "from User u where u.userID in (select user.userID from LeadRoutingUsers where user.company.companyID = ? order by ordernum) and u.deleteflag=0 "); filter_params.add(companyid);//from w ww.j av a 2 s . c o m String filterQuery = ""; if (requestParams.containsKey("ss") && requestParams.get("ss") != null) { String ss = requestParams.get("ss").toString(); if (!StringUtil.isNullOrEmpty(ss)) { String[] searchcol = new String[] { "u.lastName", "u.firstName" }; StringUtil.insertParamSearchString(filter_params, ss, searchcol.length); filterQuery = StringUtil.getSearchString(ss, "and", searchcol); } } String countQuery = selCountQuery + hql + filterQuery; List ll = executeQuery(countQuery, filter_params.toArray()); Long dl = 0l; if (ll != null && !ll.isEmpty()) { dl = (Long) ll.get(0); } if (requestParams.containsKey("pagingFlag") && Boolean.TRUE.equals(requestParams.get("pagingFlag"))) { int start = 0; int limit = 15; boolean ispaging = requestParams.containsKey("start") && requestParams.containsKey("limit") && !StringUtil.isNullOrEmpty(requestParams.get("start").toString()) && !StringUtil.isNullOrEmpty(requestParams.get("start").toString()); if (ispaging) { start = Integer.parseInt(requestParams.get("start").toString()); limit = Integer.parseInt(requestParams.get("limit").toString()); } ll = executeQueryPaging(hql + filterQuery, filter_params.toArray(), new Integer[] { start, limit }); } else { ll = executeQuery(hql + filterQuery, filter_params.toArray()); } return new KwlReturnObject(true, KWLErrorMsgs.S01, "", ll, dl.intValue()); }
From source file:com.impetus.client.cassandra.pelops.PelopsDataHandler.java
public Object fromCounterColumnThriftRow(Class<?> clazz, EntityMetadata m, ThriftRow thriftRow, List<String> relationNames, boolean isWrapperReq) throws Exception { // Instantiate a new instance Object entity = null;/*from w w w . ja va 2 s .co m*/ Map<String, Object> relations = new HashMap<String, Object>(); // Set row-key. // PropertyAccessorHelper.setId(entity, m, thriftRow.getId()); // PropertyAccessorHelper.set(entity, m.getIdColumn().getField(), // thriftRow.getId()); // Iterate through each column for (CounterColumn c : thriftRow.getCounterColumns()) { if (entity == null) { entity = clazz.newInstance(); // Set row-key PropertyAccessorHelper.setId(entity, m, thriftRow.getId()); } String thriftColumnName = PropertyAccessorFactory.STRING.fromBytes(String.class, c.getName()); Long thriftColumnValue = c.getValue(); if (null == thriftColumnValue) { continue; } // Check if this is a property, or a column representing foreign // keys com.impetus.kundera.metadata.model.Column column = m.getColumn(thriftColumnName); if (column != null) { try { if ((column.getField().getType().equals(Integer.class) || column.getField().getType().equals(int.class)) && thriftColumnValue != null) { PropertyAccessorHelper.set(entity, column.getField(), thriftColumnValue.intValue()); } else { PropertyAccessorHelper.set(entity, column.getField(), thriftColumnValue); } } catch (PropertyAccessException pae) { log.warn(pae.getMessage()); } } else { if (relationNames != null && !relationNames.isEmpty() && relationNames.contains(thriftColumnName)) { // relations = new HashMap<String, Object>(); String value = thriftColumnValue.toString(); relations.put(thriftColumnName, value); // prepare EnhanceEntity and return it } } } return isWrapperReq && relations != null && !relations.isEmpty() ? new EnhanceEntity(entity, thriftRow.getId(), relations) : entity; }
From source file:com.krawler.spring.crm.common.crmManagerDAOImpl.java
@Override public KwlReturnObject getUnAssignedLeadRoutingUsers(String companyid, HashMap<String, Object> requestParams) throws ServiceException { String selCountQuery = "select count(distinct u.userID) "; StringBuilder hql = new StringBuilder( "from User u where u.userID not in (select user.userID from LeadRoutingUsers where user.company.companyID = ?) and u.deleteflag=0 and " + " company.companyID = ? "); String orderBy = " order by u.firstName,u.lastName"; ArrayList filter_params = new ArrayList(); filter_params.add(companyid);/*w w w . ja va 2 s .c om*/ filter_params.add(companyid); String filterQuery = ""; if (requestParams.containsKey("ss") && requestParams.get("ss") != null) { String ss = requestParams.get("ss").toString(); if (!StringUtil.isNullOrEmpty(ss)) { String[] searchcol = new String[] { "u.lastName", "u.firstName" }; StringUtil.insertParamSearchString(filter_params, ss, searchcol.length); filterQuery = StringUtil.getSearchString(ss, "and", searchcol); } } String countQuery = selCountQuery + hql + filterQuery; List ll = executeQuery(countQuery, filter_params.toArray()); Long dl = 0l; if (ll != null && !ll.isEmpty()) { dl = (Long) ll.get(0); } if (requestParams.containsKey("pagingFlag") && Boolean.TRUE.equals(requestParams.get("pagingFlag"))) { int start = 0; int limit = 15; boolean ispaging = requestParams.containsKey("start") && requestParams.containsKey("limit") && !StringUtil.isNullOrEmpty(requestParams.get("start").toString()) && !StringUtil.isNullOrEmpty(requestParams.get("start").toString()); if (ispaging) { start = Integer.parseInt(requestParams.get("start").toString()); limit = Integer.parseInt(requestParams.get("limit").toString()); } ll = executeQueryPaging(hql + filterQuery + orderBy, filter_params.toArray(), new Integer[] { start, limit }); } else { ll = executeQuery(hql + filterQuery + orderBy, filter_params.toArray()); } return new KwlReturnObject(true, KWLErrorMsgs.S01, "", ll, dl.intValue()); }
From source file:divconq.tool.release.Main.java
@Override public void run(Scanner scan, ApiSession api) { Path relpath = null;/*w ww . ja v a 2s . c om*/ Path gitpath = null; Path wikigitpath = null; XElement fldset = Hub.instance.getConfig().selectFirst("CommandLine/Settings"); if (fldset != null) { relpath = Paths.get(fldset.getAttribute("ReleasePath")); gitpath = Paths.get(fldset.getAttribute("GitPath")); wikigitpath = Paths.get(fldset.getAttribute("WikiGitPath")); } boolean running = true; while (running) { try { System.out.println(); System.out.println("-----------------------------------------------"); System.out.println(" Release Builder Menu"); System.out.println("-----------------------------------------------"); System.out.println("0) Exit"); if (relpath != null) System.out.println("1) Build release package from Settings File"); System.out.println("2) Build custom release package [under construction]"); System.out.println("4) Pack the .jar files"); if (gitpath != null) System.out.println("5) Copy Source to GitHub folder"); System.out.println("6) Update AWWW"); String opt = scan.nextLine(); Long mopt = StringUtil.parseInt(opt); if (mopt == null) continue; switch (mopt.intValue()) { case 0: running = false; break; case 1: { ReleasesHelper releases = new ReleasesHelper(); if (!releases.init(relpath)) break; System.out.println("Select a release to build"); System.out.println("0) None"); List<String> rnames = releases.names(); for (int i = 0; i < rnames.size(); i++) System.out.println((i + 1) + ") " + rnames.get(i)); System.out.println("Option #: "); opt = scan.nextLine(); mopt = StringUtil.parseInt(opt); if ((mopt == null) || (mopt == 0)) break; XElement relchoice = releases.get(mopt.intValue() - 1); if (relchoice == null) { System.out.println("Invalid option"); break; } PackagesHelper availpackages = new PackagesHelper(); availpackages.init(); InstallHelper inst = new InstallHelper(); if (!inst.init(availpackages, relchoice)) break; XElement prindesc = availpackages.get(inst.prinpackage); XElement prininst = prindesc.find("Install"); if (prininst == null) { System.out.println("Principle package: " + inst.prinpackagenm + " cannot be released directly, it must be part of another package."); break; } String relvers = prindesc.getAttribute("Version"); System.out.println("Building release version " + relvers); if (prindesc.hasAttribute("LastVersion")) System.out.println("Previous release version " + prindesc.getAttribute("LastVersion")); String rname = relchoice.getAttribute("Name"); Path destpath = relpath.resolve(rname + "/" + rname + "-" + relvers + "-bin.zip"); if (Files.exists(destpath)) { System.out.println("Version " + relvers + " already exists, overwrite? (y/n): "); if (!scan.nextLine().toLowerCase().startsWith("y")) break; Files.delete(destpath); } System.out.println("Preparing zip files"); AtomicBoolean errored = new AtomicBoolean(); Path tempfolder = FileUtil.allocateTempFolder2(); ListStruct ignorepaths = new ListStruct(); Set<String> nolongerdepends = new HashSet<>(); Set<String> dependson = new HashSet<>(); // put all the release files into a temp folder inst.instpkgs.forEach(pname -> { availpackages.get(pname).selectAll("DependsOn").stream() .filter(doel -> !doel.hasAttribute("Option") || inst.relopts.contains(doel.getAttribute("Option"))) .forEach(doel -> { // copy all libraries we rely on doel.selectAll("Library").forEach(libel -> { dependson.add(libel.getAttribute("File")); Path src = Paths.get("./lib/" + libel.getAttribute("File")); Path dest = tempfolder.resolve("lib/" + libel.getAttribute("File")); try { Files.createDirectories(dest.getParent()); if (Files.notExists(dest)) Files.copy(src, dest, StandardCopyOption.COPY_ATTRIBUTES); } catch (Exception x) { errored.set(true); System.out.println("Unable to copy file: " + src); } }); // copy all files we rely on doel.selectAll("File").forEach(libel -> { Path src = Paths.get("./" + libel.getAttribute("Path")); Path dest = tempfolder.resolve(libel.getAttribute("Path")); try { Files.createDirectories(dest.getParent()); if (Files.notExists(dest)) Files.copy(src, dest, StandardCopyOption.COPY_ATTRIBUTES); } catch (Exception x) { errored.set(true); System.out.println("Unable to copy file: " + src); } }); // copy all folders we rely on doel.selectAll("Folder").forEach(libel -> { Path src = Paths.get("./" + libel.getAttribute("Path")); Path dest = tempfolder.resolve(libel.getAttribute("Path")); try { Files.createDirectories(dest.getParent()); } catch (Exception x) { errored.set(true); System.out.println("Unable to copy file: " + src); } OperationResult cres = FileUtil.copyFileTree(src, dest); if (cres.hasErrors()) errored.set(true); }); }); availpackages.get(pname).selectAll("IgnorePaths/Ignore") .forEach(doel -> ignorepaths.addItem(doel.getAttribute("Path"))); // NoLongerDependsOn functionally currently only applies to libraries availpackages.get(pname).selectAll("NoLongerDependsOn/Library") .forEach(doel -> nolongerdepends.add(doel.getAttribute("File"))); // copy the released packages folders Path src = Paths.get("./packages/" + pname); Path dest = tempfolder.resolve("packages/" + pname); try { Files.createDirectories(dest.getParent()); } catch (Exception x) { errored.set(true); System.out.println("Unable to copy file: " + src); } // we may wish to enhance filter to allow .JAR sometimes, but this is meant to prevent copying of packages/pname/lib/abc.lib.jar files OperationResult cres = FileUtil.copyFileTree(src, dest, path -> !path.toString().endsWith(".jar")); if (cres.hasErrors()) errored.set(true); // copy the released packages libraries Path libsrc = Paths.get("./packages/" + pname + "/lib"); Path libdest = tempfolder.resolve("lib"); if (Files.exists(libsrc)) { cres = FileUtil.copyFileTree(libsrc, libdest); if (cres.hasErrors()) errored.set(true); } }); if (errored.get()) { System.out.println("Error with assembling package"); break; } // copy the principle config Path csrc = Paths.get("./packages/" + inst.prinpackage + "/config"); Path cdest = tempfolder.resolve("config/" + inst.prinpackagenm); if (Files.exists(csrc)) { Files.createDirectories(cdest); OperationResult cres = FileUtil.copyFileTree(csrc, cdest); if (cres.hasErrors()) { System.out.println("Error with prepping config"); break; } } boolean configpassed = true; // copy packages with config = true for (XElement pkg : relchoice.selectAll("Package")) { if (!"true".equals(pkg.getAttribute("Config"))) break; String pname = pkg.getAttribute("Name"); int pspos = pname.lastIndexOf('/'); String pnm = (pspos != -1) ? pname.substring(pspos + 1) : pname; csrc = Paths.get("./packages/" + pname + "/config"); cdest = tempfolder.resolve("config/" + pnm); if (Files.exists(csrc)) { Files.createDirectories(cdest); OperationResult cres = FileUtil.copyFileTree(csrc, cdest); if (cres.hasErrors()) { System.out.println("Error with prepping extra config"); configpassed = false; break; } } } if (!configpassed) break; // also copy installer config if being used if (inst.includeinstaller) { csrc = Paths.get("./packages/dc/dcInstall/config"); cdest = tempfolder.resolve("config/dcInstall"); if (Files.exists(csrc)) { Files.createDirectories(cdest); OperationResult cres = FileUtil.copyFileTree(csrc, cdest); if (cres.hasErrors()) { System.out.println("Error with prepping install config"); break; } } } // write out the deployed file RecordStruct deployed = new RecordStruct(); deployed.setField("Version", relvers); deployed.setField("PackageFolder", relpath.resolve(rname)); deployed.setField("PackagePrefix", rname); OperationResult d1res = IOUtil.saveEntireFile(tempfolder.resolve("config/deployed.json"), deployed.toPrettyString()); if (d1res.hasErrors()) { System.out.println("Error with prepping deployed"); break; } RecordStruct deployment = new RecordStruct(); deployment.setField("Version", relvers); if (prindesc.hasAttribute("LastVersion")) deployment.setField("DependsOn", prindesc.getAttribute("LastVersion")); deployment.setField("UpdateMessage", "This update is complete, you may accept this update as runnable."); nolongerdepends.removeAll(dependson); ListStruct deletefiles = new ListStruct(); nolongerdepends.forEach(fname -> deletefiles.addItem("lib/" + fname)); deployment.setField("DeleteFiles", deletefiles); deployment.setField("IgnorePaths", ignorepaths); d1res = IOUtil.saveEntireFile(tempfolder.resolve("deployment.json"), deployment.toPrettyString()); if (d1res.hasErrors()) { System.out.println("Error with prepping deployment"); break; } // write env file d1res = IOUtil.saveEntireFile(tempfolder.resolve("env.bat"), "set mem=" + relchoice.getAttribute("Memory", "2048") + "\r\n" + "SET project=" + inst.prinpackagenm + "\r\n" + "SET service=" + relchoice.getAttribute("Service", inst.prinpackagenm) + "\r\n" + "SET servicename=" + relchoice.getAttribute("ServiceName", inst.prinpackagenm + " Service") + "\r\n"); if (d1res.hasErrors()) { System.out.println("Error with prepping env"); break; } System.out.println("Packing Release file."); Path relbin = relpath.resolve(rname + "/" + rname + "-" + relvers + "-bin.zip"); if (Files.notExists(relbin.getParent())) Files.createDirectories(relbin.getParent()); ZipArchiveOutputStream zipout = new ZipArchiveOutputStream(relbin.toFile()); try { Files.walkFileTree(tempfolder, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { ZipArchiveEntry entry = new ZipArchiveEntry( tempfolder.relativize(file).toString()); entry.setSize(Files.size(file)); zipout.putArchiveEntry(entry); zipout.write(Files.readAllBytes(file)); zipout.closeArchiveEntry(); return FileVisitResult.CONTINUE; } }); } catch (IOException x) { System.out.println("Error building zip: " + x); } zipout.close(); System.out.println("Release file written"); FileUtil.deleteDirectory(tempfolder); break; } // end case 1 case 3: { System.out.println("Note these utilities are only good from the main console,"); System.out.println("if you are using a remote connection then the encryption will"); System.out.println("not work as expected. [we do not have access the master keys]"); System.out.println(); Foreground.utilityMenu(scan); break; } case 4: { System.out.println("Packing jar library files."); String[] packlist = new String[] { "divconq.core", "divconq.interchange", "divconq.web", "divconq.tasks", "divconq.tasks.api", "ncc.uploader.api", "ncc.uploader.core", "ncc.workflow", "sd.core" }; String[] packnames = new String[] { "dcCore", "dcInterchange", "dcWeb", "dcTasks", "dcTasksApi", "nccUploaderApi", "nccUploader", "nccWorkflow", "sd/sdBackend" }; for (int i = 0; i < packlist.length; i++) { String lib = packlist[i]; String pname = packnames[i]; Path relbin = Paths.get("./ext/" + lib + ".jar"); Path srcbin = Paths.get("./" + lib + "/bin"); Path packbin = Paths.get("./packages/" + pname + "/lib/" + lib + ".jar"); if (Files.notExists(relbin.getParent())) Files.createDirectories(relbin.getParent()); Files.deleteIfExists(relbin); ZipArchiveOutputStream zipout = new ZipArchiveOutputStream(relbin.toFile()); try { Files.walkFileTree(srcbin, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { ZipArchiveEntry entry = new ZipArchiveEntry( srcbin.relativize(file).toString()); entry.setSize(Files.size(file)); zipout.putArchiveEntry(entry); zipout.write(Files.readAllBytes(file)); zipout.closeArchiveEntry(); return FileVisitResult.CONTINUE; } }); } catch (IOException x) { System.out.println("Error building zip: " + x); } zipout.close(); Files.copy(relbin, packbin, StandardCopyOption.REPLACE_EXISTING); } System.out.println("Done"); break; } case 5: { System.out.println("Copying Source Files"); System.out.println("Cleaning folders"); OperationResult or = FileUtil.deleteDirectory(gitpath.resolve("divconq.core/src/main/java")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("divconq.core/src/main/resources")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("divconq.interchange/src/main/java")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("divconq.tasks/src/main/java")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("divconq.tasks.api/src/main/java")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("divconq.web/src/main/java")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectory(gitpath.resolve("packages")); if (or.hasErrors()) { System.out.println("Error deleting files"); break; } or = FileUtil.deleteDirectoryContent(wikigitpath, ".git"); if (or.hasErrors()) { System.out.println("Error deleting wiki files"); break; } System.out.println("Copying folders"); System.out.println("Copy tree ./divconq.core/src"); or = FileUtil.copyFileTree(Paths.get("./divconq.core/src/divconq"), gitpath.resolve("divconq.core/src/main/java/divconq"), new Predicate<Path>() { @Override public boolean test(Path file) { return file.getFileName().toString().endsWith(".java"); } }); if (or.hasErrors()) { System.out.println("Error copying files"); break; } or = FileUtil.copyFileTree(Paths.get("./divconq.core/src/org"), gitpath.resolve("divconq.core/src/main/java/org"), new Predicate<Path>() { @Override public boolean test(Path file) { return file.getFileName().toString().endsWith(".java"); } }); if (or.hasErrors()) { System.out.println("Error copying files"); break; } or = FileUtil.copyFileTree(Paths.get("./divconq.core/src/localize"), gitpath.resolve("divconq.core/src/main/resources/localize"), new Predicate<Path>() { @Override public boolean test(Path file) { return file.getFileName().toString().endsWith(".xml"); } }); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./divconq.interchange/src"); or = FileUtil.copyFileTree(Paths.get("./divconq.interchange/src"), gitpath.resolve("divconq.interchange/src/main/java")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./divconq.tasks/src"); or = FileUtil.copyFileTree(Paths.get("./divconq.tasks/src"), gitpath.resolve("divconq.tasks/src/main/java")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./divconq.tasks.api/src"); or = FileUtil.copyFileTree(Paths.get("./divconq.tasks.api/src"), gitpath.resolve("divconq.tasks.api/src/main/java")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./divconq.web/src"); or = FileUtil.copyFileTree(Paths.get("./divconq.web/src"), gitpath.resolve("divconq.web/src/main/java")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcCore"); or = FileUtil.copyFileTree(Paths.get("./packages/dcCore"), gitpath.resolve("packages/dcCore")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcCorePublic"); or = FileUtil.copyFileTree(Paths.get("./packages/dcCorePublic"), gitpath.resolve("packages/dcCorePublic")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcInterchange"); or = FileUtil.copyFileTree(Paths.get("./packages/dcInterchange"), gitpath.resolve("packages/dcInterchange")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcTasks"); or = FileUtil.copyFileTree(Paths.get("./packages/dcTasks"), gitpath.resolve("packages/dcTasks")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcTasksApi"); or = FileUtil.copyFileTree(Paths.get("./packages/dcTasksApi"), gitpath.resolve("packages/dcTasksApi")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcTasksWeb"); or = FileUtil.copyFileTree(Paths.get("./packages/dcTasksWeb"), gitpath.resolve("packages/dcTasksWeb")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcTest"); or = FileUtil.copyFileTree(Paths.get("./packages/dcTest"), gitpath.resolve("packages/dcTest")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./packages/dcWeb"); or = FileUtil.copyFileTree(Paths.get("./packages/dcWeb"), gitpath.resolve("packages/dcWeb")); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copy tree ./divconq.wiki/public"); or = FileUtil.copyFileTree(Paths.get("./divconq.wiki/public"), wikigitpath); if (or.hasErrors()) { System.out.println("Error copying files"); break; } System.out.println("Copying files"); Files.copy(Paths.get("./README.md"), gitpath.resolve("README.md"), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); Files.copy(Paths.get("./RELEASE_NOTES.md"), gitpath.resolve("RELEASE_NOTES.md"), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); Files.copy(Paths.get("./NOTICE.txt"), gitpath.resolve("NOTICE.txt"), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); Files.copy(Paths.get("./LICENSE.txt"), gitpath.resolve("LICENSE.txt"), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); System.out.println("Done"); break; } case 6: { System.out.println("Are you sure you want to update AWWW Server? (y/n): "); if (!scan.nextLine().toLowerCase().startsWith("y")) break; ReleasesHelper releases = new ReleasesHelper(); if (!releases.init(relpath)) break; XElement relchoice = releases.get("AWWWServer"); if (relchoice == null) { System.out.println("Invalid option"); break; } PackagesHelper availpackages = new PackagesHelper(); availpackages.init(); InstallHelper inst = new InstallHelper(); if (!inst.init(availpackages, relchoice)) break; ServerHelper ssh = new ServerHelper(); if (!ssh.init(relchoice.find("SSH"))) break; ChannelSftp sftp = null; try { Channel channel = ssh.session().openChannel("sftp"); channel.connect(); sftp = (ChannelSftp) channel; // go to routines folder sftp.cd("/usr/local/bin/dc/AWWWServer"); FileRepositoryBuilder builder = new FileRepositoryBuilder(); Repository repository = builder.setGitDir(new File(".git")).findGitDir() // scan up the file system tree .build(); String lastsync = releases.getData("AWWWServer").getFieldAsString("LastCommitSync"); RevWalk rw = new RevWalk(repository); ObjectId head1 = repository.resolve(Constants.HEAD); RevCommit commit1 = rw.parseCommit(head1); releases.getData("AWWWServer").setField("LastCommitSync", head1.name()); ObjectId rev2 = repository.resolve(lastsync); RevCommit parent = rw.parseCommit(rev2); //RevCommit parent2 = rw.parseCommit(parent.getParent(0).getId()); DiffFormatter df = new DiffFormatter(DisabledOutputStream.INSTANCE); df.setRepository(repository); df.setDiffComparator(RawTextComparator.DEFAULT); df.setDetectRenames(true); // list oldest first or change types are all wrong!! List<DiffEntry> diffs = df.scan(parent.getTree(), commit1.getTree()); for (DiffEntry diff : diffs) { String gnpath = diff.getNewPath(); String gopath = diff.getOldPath(); Path npath = Paths.get("./" + gnpath); Path opath = Paths.get("./" + gopath); if (diff.getChangeType() == ChangeType.DELETE) { if (inst.containsPathExtended(opath)) { System.out.println("- " + diff.getChangeType().name() + " - " + opath); try { sftp.rm(opath.toString()); System.out.println("deleted!!"); } catch (SftpException x) { System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); System.out.println("Sftp Error: " + x); System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); } } else { System.out.println("/ " + diff.getChangeType().name() + " - " + gopath + " !!!!!!!!!!!!!!!!!!!!!!!!!"); } } else if ((diff.getChangeType() == ChangeType.ADD) || (diff.getChangeType() == ChangeType.MODIFY) || (diff.getChangeType() == ChangeType.COPY)) { if (inst.containsPathExtended(npath)) { System.out.println("+ " + diff.getChangeType().name() + " - " + npath); try { ssh.makeDirSftp(sftp, npath.getParent()); sftp.put(npath.toString(), npath.toString(), ChannelSftp.OVERWRITE); sftp.chmod(npath.endsWith(".sh") ? 484 : 420, npath.toString()); // 644 octal = 420 dec, 744 octal = 484 dec System.out.println("uploaded!!"); } catch (SftpException x) { System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); System.out.println("Sftp Error: " + x); System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); } } else { System.out.println("> " + diff.getChangeType().name() + " - " + gnpath + " !!!!!!!!!!!!!!!!!!!!!!!!!"); } } else if (diff.getChangeType() == ChangeType.RENAME) { // remove the old if (inst.containsPathExtended(opath)) { System.out.println("- " + diff.getChangeType().name() + " - " + opath); try { sftp.rm(opath.toString()); System.out.println("deleted!!"); } catch (SftpException x) { System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); System.out.println("Sftp Error: " + x); System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); } } else { System.out.println("/ " + diff.getChangeType().name() + " - " + gopath + " !!!!!!!!!!!!!!!!!!!!!!!!!"); } // add the new path if (inst.containsPathExtended(npath)) { System.out.println("+ " + diff.getChangeType().name() + " - " + npath); try { ssh.makeDirSftp(sftp, npath.getParent()); sftp.put(npath.toString(), npath.toString(), ChannelSftp.OVERWRITE); sftp.chmod(npath.endsWith(".sh") ? 484 : 420, npath.toString()); // 644 octal = 420 dec, 744 octal = 484 dec System.out.println("uploaded!!"); } catch (SftpException x) { System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); System.out.println("Sftp Error: " + x); System.out.println( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); } } else { System.out.println("> " + diff.getChangeType().name() + " - " + gnpath + " !!!!!!!!!!!!!!!!!!!!!!!!!"); } } else { System.out.println("??????????????????????????????????????????????????????????"); System.out.println(": " + diff.getChangeType().name() + " - " + gnpath + " ?????????????????????????"); System.out.println("??????????????????????????????????????????????????????????"); } } rw.dispose(); repository.close(); releases.saveData(); } catch (JSchException x) { System.out.println("Sftp Error: " + x); } finally { if (sftp.isConnected()) sftp.exit(); ssh.close(); } break; } case 7: { Path sfolder = Paths.get("/Work/Projects/awww-current/dairy-graze/poly"); Path dfolder = Paths.get("/Work/Projects/awww-current/dairy-graze/poly-js"); Files.list(sfolder).forEach(file -> { String fname = file.getFileName().toString(); if (!fname.endsWith(".xml")) return; FuncResult<XElement> lres = XmlReader.loadFile(file, false); if (lres.isEmptyResult()) { System.out.println("Unable to parse: " + file); return; } String zc = fname.substring(5, 8); String code = "zipsData['" + zc + "'] = "; XElement root = lres.getResult(); /* <polyline1 lng="-90.620897" lat="45.377447"/> <polyline1 lng="-90.619327" lat="45.3805"/> [-71.196845,41.67757],[-71.120168,41.496831],[-71.317338,41.474923],[-71.196845,41.67757] */ ListStruct center = new ListStruct(); ListStruct cords = new ListStruct(); ListStruct currentPoly = null; //String currentName = null; for (XElement child : root.selectAll("*")) { String cname = child.getName(); if (cname.startsWith("marker")) { // not always accurate if (center.isEmpty()) center.addItem(Struct.objectToDecimal(child.getAttribute("lng")), Struct.objectToDecimal(child.getAttribute("lat"))); currentPoly = new ListStruct(); cords.addItem(new ListStruct(currentPoly)); continue; } /* if (cname.startsWith("info")) { System.out.println("areas: " + child.getAttribute("areas")); continue; } */ if (!cname.startsWith("polyline")) continue; if (currentPoly == null) { //if (!cname.equals(currentName)) { //if (currentName == null) { // currentName = cname; // System.out.println("new poly: " + cname); currentPoly = new ListStruct(); cords.addItem(new ListStruct(currentPoly)); } currentPoly.addItem(new ListStruct(Struct.objectToDecimal(child.getAttribute("lng")), Struct.objectToDecimal(child.getAttribute("lat")))); } RecordStruct feat = new RecordStruct().withField("type", "Feature") .withField("id", "zip" + zc) .withField("properties", new RecordStruct().withField("name", "Prefix " + zc).withField("alias", zc)) .withField("geometry", new RecordStruct().withField("type", "MultiPolygon") .withField("coordinates", cords)); RecordStruct entry = new RecordStruct().withField("code", zc).withField("geo", feat) .withField("center", center); IOUtil.saveEntireFile2(dfolder.resolve("us-zips-" + zc + ".js"), code + entry.toPrettyString() + ";"); }); break; } } } catch (Exception x) { System.out.println("CLI error: " + x); } } }
From source file:whitelabel.cloud.webapp.impl.service.NewCloudServerService.java
/** * @param model/*from www. j a v a 2 s .c o m*/ * @return */ public final AppWsResult invokeServerCreation(NewCloudServer model) { AppWsResult invokeResult = null; List<AppVirtualDiskDetails> appVDisks = new ArrayList<AppVirtualDiskDetails>(); AppVirtualDiskDetails appVirtualDisk0 = new AppVirtualDiskDetails(model.getSelectedDiskSize().get(0L), null, VirtualDiskTypes.PRIMARY_VIRTUAL_DISK); appVDisks.add(appVirtualDisk0); if (model.getSelectedDiskSize().get(1L) != null && model.getSelectedDiskSize().get(1L).intValue() > 0) { AppVirtualDiskDetails appVirtualDisk1 = new AppVirtualDiskDetails(model.getSelectedDiskSize().get(1L), null, VirtualDiskTypes.ADDITIONAL_VIRTUAL_DISK_1); appVDisks.add(appVirtualDisk1); } if (model.getSelectedDiskSize().get(2L) != null && model.getSelectedDiskSize().get(2L).intValue() > 0) { AppVirtualDiskDetails appVirtualDisk2 = new AppVirtualDiskDetails(model.getSelectedDiskSize().get(2L), null, VirtualDiskTypes.ADDITIONAL_VIRTUAL_DISK_2); appVDisks.add(appVirtualDisk2); } if (model.getSelectedDiskSize().get(3L) != null && model.getSelectedDiskSize().get(3L).intValue() > 0) { AppVirtualDiskDetails appVirtualDisk3 = new AppVirtualDiskDetails(model.getSelectedDiskSize().get(3L), null, VirtualDiskTypes.ADDITIONAL_VIRTUAL_DISK_3); appVDisks.add(appVirtualDisk3); } VDCResourceBoundsConfig vdcConfig = UserUtil.getVDCResourceConfiguration(); AppTemplateDetails serverOST = vdcConfig.getTemplate(model.getSelectedHypervisorType(), model.getSelectedTemplateProdId()); String serverName = model.getName(); String serverPwd = model.getPassword(); Long serverCpus = model.getSelectedCPUNum(); Long serverRams = model.getSelectedRAMNum(); // Preparing Ethernet(s) configurations AppNetworkAdapterConfiguration appNAC2 = null; AppNetworkAdapterConfiguration appNAC3 = null; if (model.getEth02_IP() != null) { AppPrivateVLanDetails vlan2 = new AppPrivateVLanDetails(model.getVlan_eth02().intValue(), model.getEth02_IP(), model.getEth02_NM()); appNAC2 = new AppNetworkAdapterConfiguration(NetworkAdapterTypes.ETHERNET_1, vlan2, null); } if (model.getEth03_IP() != null) { AppPrivateVLanDetails vlan3 = new AppPrivateVLanDetails(model.getVlan_eth03().intValue(), model.getEth03_IP(), model.getEth03_NM()); appNAC3 = new AppNetworkAdapterConfiguration(NetworkAdapterTypes.ETHERNET_2, vlan3, null); } AppWsResult response = UserUtil.getWsEndUserClient().purchaseIpAddress(); if (response == null) { return new AppWsResult(null); } else if (!response.isSuccess()) { return response; } Integer resourceId = ((IPAddress) response.getValue()).getResourceId(); //new Integer(22250);// // here we have a new public ip resource that we will associate to the new server.. // Prepare Ethernet 0 (public-ip) AppPublicIpAddressDetails pip1 = new AppPublicIpAddressDetails(resourceId, true); List<AppPublicIpAddressDetails> assigned = new ArrayList<AppPublicIpAddressDetails>(); assigned.add(pip1); AppNetworkAdapterConfiguration appNAC1 = new AppNetworkAdapterConfiguration(NetworkAdapterTypes.ETHERNET_0, null, assigned); List<AppNetworkAdapterConfiguration> appNetAdapConfigs = new ArrayList<AppNetworkAdapterConfiguration>(); appNetAdapConfigs.add(appNAC1); if (appNAC2 != null) { appNetAdapConfigs.add(appNAC2); } if (appNAC3 != null) { appNetAdapConfigs.add(appNAC3); } AppNewServer appNewServer = new AppNewServer(serverName, serverPwd, serverOST.getId(), serverCpus.intValue(), serverRams.intValue(), appVDisks, appNetAdapConfigs); invokeResult = UserUtil.getWsEndUserClient().setEnqueueServerCreation(appNewServer); return invokeResult; }
From source file:it.ventuland.ytd.YTDownloadThread.java
void savebinarydata(BufferedInputStream binaryreader, Long iBytesMax) throws IOException { FileOutputStream fos = null;/*from w w w . j a v a 2 s . com*/ try { File f; Integer idupcount = 0; String sdirectorychoosed = YtdConfigManager.getInstance().getSaveDirectoryPath(); String sfilename = this.getTitle(); debugoutput("title: ".concat(this.getTitle()).concat("sfilename: ").concat(sfilename)); do { f = new File(sdirectorychoosed, sfilename.concat((idupcount > 0 ? "(".concat(idupcount.toString()).concat(")") : "")) .concat(".") .concat(this.sContentType.replaceFirst("video/", "").replaceAll("x-", ""))); idupcount += 1; } while (f.exists()); this.setFileName(f.getAbsolutePath()); Long iBytesReadSum = (long) 0; Long iPercentage = (long) -1; fos = new FileOutputStream(f); debugoutput(String.format("writing %d bytes to: %s", iBytesMax, this.getFileName())); output(("file size of \"").concat(this.getTitle()).concat("\" = ").concat(iBytesMax.toString()) .concat(" Bytes").concat(" ~ ").concat(Long.toString((iBytesMax / 1024)).concat(" KiB")) .concat(" ~ ").concat(Long.toString((iBytesMax / 1024 / 1024)).concat(" MiB"))); byte[] bytes = new byte[4096]; Integer iBytesRead = 1; // adjust blocks of percentage to output - larger files are shown with smaller pieces Integer iblocks = 10; if (iBytesMax > 20 * 1024 * 1024) { iblocks = 4; } if (iBytesMax > 32 * 1024 * 1024) { iblocks = 2; } if (iBytesMax > 56 * 1024 * 1024) { iblocks = 1; } while (!isWorkerInterrupted() && iBytesRead > 0) { iBytesRead = binaryreader.read(bytes); iBytesReadSum += iBytesRead; // drop a line every x% of the download if ((((iBytesReadSum * 100 / iBytesMax) / iblocks) * iblocks) > iPercentage) { iPercentage = (((iBytesReadSum * 100 / iBytesMax) / iblocks) * iblocks); processDownloadEvent(new DownloadEvent(this, iThreadNo, DOWNLOAD_STATUS.DOWNLOADING, iPercentage.intValue(), sURL, null)); } // TODO calculate and show ETA for bigger downloads (remaining time > 60s) - every 20%? try { fos.write(bytes, 0, iBytesRead); } catch (IndexOutOfBoundsException ioob) { } } // rename files if download was interrupted before completion of download if (isWorkerInterrupted() && iBytesReadSum < iBytesMax) { try { // this part is especially for our M$-Windows users because of the different behavior of File.renameTo() in contrast to non-windows // see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6213298 and others // even with Java 1.6.0_22 the renameTo() does not work directly on M$-Windows! fos.close(); } catch (Exception e) { } // System.gc(); // we don't have to do this but to be sure the file handle gets released we do a thread sleep try { Thread.sleep(50); } catch (InterruptedException e) { } // this part runs on *ix platforms without closing the FileOutputStream explicitly debugoutput(String.format("download canceled. (%d)", (iBytesRead))); changeFileNamewith("CANCELED."); String smsg = "renaming unfinished file to: ".concat(this.getFileName()); output(smsg); debugoutput(smsg); // CANCELED filenames overwrite others as we do not test for CANCELED one, two... if (!f.renameTo(new File(this.getFileName()))) { smsg = "error renaming unfinished file to: ".concat(this.getFileName()); output(smsg); debugoutput(smsg); } } debugoutput("done writing."); } catch (FileNotFoundException fnfe) { throw (fnfe); } catch (IOException ioe) { debugoutput("IOException"); throw (ioe); } finally { this.sVideoURL = null; try { fos.close(); } catch (Exception e) { } } }
From source file:gov.nih.nci.ncicb.tcga.dcc.qclive.loader.levelthree.LevelThreeLoader.java
/** * Loads archives by ID.// www .j av a 2 s . co m * * @param archiveId * id for an archive to load * @throws LoaderException * if there is an error loading an archive by id */ private void loadArchiveById(final Long archiveId) throws LoaderException { final Archive archive = getArchiveQueries().getArchive(archiveId.intValue()); DiseaseContextHolder.setDisease(archive.getTheTumor().getTumorName()); List<FileInfo> archiveFile = getArchiveQueries().getFilesForArchive(archiveId); // create a map for easy lookups Map<String, Long> archiveFileMap = new HashMap<String, Long>(); for (FileInfo file : archiveFile) { if (!excludedFiles.contains(file.getFileName())) { archiveFileMap.put(file.getFileName(), file.getId()); } } // get magetab archive dir final String magetabArchiveDir = getMagetabDir(archive); // get all files for the archive TabDelimitedContentNavigator sdrfNavigator = loadSDRF(magetabArchiveDir); parseDataSet(archive, archiveFileMap, sdrfNavigator, magetabArchiveDir); // update archive_info for both disease and common databses with a // timestamp when an archive is finished loading getLevelThreeQueries().updateArchiveLoadedDate(archiveId); getArchiveQueries().updateArchiveInfo(archiveId); }