List of usage examples for java.lang Thread isDaemon
public final boolean isDaemon()
From source file:org.sakaiproject.status.StatusServlet.java
protected void reportThreadDetails(HttpServletResponse response) throws Exception { PrintWriter pw = response.getWriter(); for (Thread thread : findAllThreads()) { if (thread != null) { String threadLocation = ""; try { StackTraceElement ste = thread.getStackTrace()[0]; StackTraceElement ste2 = thread.getStackTrace()[1]; threadLocation = ste.getClassName() + "." + ste.getMethodName() + "()," + ste.getFileName() + ":" + ste.getLineNumber() + "," + ste2.getClassName() + "." + ste2.getMethodName() + "()," + ste2.getFileName() + ":" + ste2.getLineNumber(); } catch (Exception e) { threadLocation = "?,?,?,?"; }/*from w ww.j a v a 2s . c om*/ pw.print(thread.getThreadGroup().getName() + "," + thread.getId() + "," + thread.getName() + "," + thread.getPriority() + "," + thread.getState().name() + "," + (thread.isAlive() ? "" : "notalive") + "," + (thread.isDaemon() ? "daemon" : "") + "," + (thread.isInterrupted() ? "interrupted" : "") + "," + threadLocation + "\n"); } } }
From source file:com.alibaba.wasp.master.AssignmentManager.java
/** * Get a named {@link java.util.concurrent.ThreadFactory} that just builds daemon threads * * @param prefix/*from ww w . j a v a 2s. c om*/ * name prefix for all threads created from the factory * @return a thread factory that creates named, daemon threads */ private static ThreadFactory newDaemonThreadFactory(final String prefix) { final ThreadFactory namedFactory = Threads.getNamedThreadFactory(prefix); return new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = namedFactory.newThread(r); if (!t.isDaemon()) { t.setDaemon(true); } if (t.getPriority() != Thread.NORM_PRIORITY) { t.setPriority(Thread.NORM_PRIORITY); } return t; } }; }
From source file:org.dspace.app.itemexport.ItemExport.java
/** * Does the work creating a List with all the Items in the Community or * Collection It then kicks off a new Thread to export the items, zip the * export directory and send confirmation email * /*www .j av a 2s. c o m*/ * @param dsObjects * - List of dspace objects to process * @param context * - the dspace context * @param additionalEmail * - email address to cc in addition the the current user email * @throws Exception */ private static void processDownloadableExport(List<DSpaceObject> dsObjects, Context context, final String additionalEmail, boolean toMigrate) throws Exception { final EPerson eperson = context.getCurrentUser(); final boolean migrate = toMigrate; // before we create a new export archive lets delete the 'expired' // archives //deleteOldExportArchives(eperson.getID()); deleteOldExportArchives(); // keep track of the commulative size of all bitstreams in each of the // items // it will be checked against the config file entry double size = 0; final ArrayList<Integer> items = new ArrayList<Integer>(); for (DSpaceObject dso : dsObjects) { if (dso.getType() == Constants.COMMUNITY) { Community community = (Community) dso; // get all the collections in the community Collection[] collections = community.getCollections(); for (Collection collection : collections) { // get all the items in each collection ItemIterator iitems = collection.getItems(); try { while (iitems.hasNext()) { Item item = iitems.next(); // get all the bundles in the item Bundle[] bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in each bundle Bitstream[] bitstreams = bundle.getBitstreams(); for (Bitstream bit : bitstreams) { // add up the size size += bit.getSize(); } } items.add(item.getID()); } } finally { if (iitems != null) { iitems.close(); } } } } else if (dso.getType() == Constants.COLLECTION) { Collection collection = (Collection) dso; // get all the items in the collection ItemIterator iitems = collection.getItems(); try { while (iitems.hasNext()) { Item item = iitems.next(); // get all thebundles in the item Bundle[] bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in the bundle Bitstream[] bitstreams = bundle.getBitstreams(); for (Bitstream bit : bitstreams) { // add up the size size += bit.getSize(); } } items.add(item.getID()); } } finally { if (iitems != null) { iitems.close(); } } } else if (dso.getType() == Constants.ITEM) { Item item = (Item) dso; // get all the bundles in the item Bundle[] bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in the bundle Bitstream[] bitstreams = bundle.getBitstreams(); for (Bitstream bit : bitstreams) { // add up the size size += bit.getSize(); } } items.add(item.getID()); } else { // nothing to do just ignore this type of DSpaceObject } } // check the size of all the bitstreams against the configuration file // entry if it exists String megaBytes = ConfigurationManager.getProperty("org.dspace.app.itemexport.max.size"); if (megaBytes != null) { float maxSize = 0; try { maxSize = Float.parseFloat(megaBytes); } catch (Exception e) { // ignore...configuration entry may not be present } if (maxSize > 0 && maxSize < (size / 1048576.00)) { // a megabyte throw new ItemExportException(ItemExportException.EXPORT_TOO_LARGE, "The overall size of this export is too large. Please contact your administrator for more information."); } } // if we have any items to process then kick off annonymous thread if (items.size() > 0) { Thread go = new Thread() { public void run() { Context context = null; ItemIterator iitems = null; try { // create a new dspace context context = new Context(); // ignore auths context.setIgnoreAuthorization(true); iitems = new ItemIterator(context, items); String fileName = assembleFileName("item", eperson, new Date()); String workDir = getExportWorkDirectory() + System.getProperty("file.separator") + fileName; String downloadDir = getExportDownloadDirectory(eperson.getID()); File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { log.error("Unable to create working directory"); } File dnDir = new File(downloadDir); if (!dnDir.exists() && !dnDir.mkdirs()) { log.error("Unable to create download directory"); } // export the items using normal export method exportItem(context, iitems, workDir, 1, migrate); // now zip up the export directory created above zip(workDir, downloadDir + System.getProperty("file.separator") + fileName + ".zip"); // email message letting user know the file is ready for // download emailSuccessMessage(context, eperson, fileName + ".zip"); // return to enforcing auths context.setIgnoreAuthorization(false); } catch (Exception e1) { try { emailErrorMessage(eperson, e1.getMessage()); } catch (Exception e) { // wont throw here } throw new IllegalStateException(e1); } finally { if (iitems != null) { iitems.close(); } // Make sure the database connection gets closed in all conditions. try { context.complete(); } catch (SQLException sqle) { context.abort(); } } } }; go.isDaemon(); go.start(); } }
From source file:org.dspace.app.itemexport.ItemExportServiceImpl.java
/** * Does the work creating a List with all the Items in the Community or * Collection It then kicks off a new Thread to export the items, zip the * export directory and send confirmation email * * @param dsObjects// w w w.ja v a 2 s . c o m * - List of dspace objects to process * @param context * - the dspace context * @param additionalEmail * - email address to cc in addition the the current user email * @param toMigrate Whether to use the migrate option or not * @throws Exception if error */ protected void processDownloadableExport(List<DSpaceObject> dsObjects, Context context, final String additionalEmail, boolean toMigrate) throws Exception { final EPerson eperson = context.getCurrentUser(); final boolean migrate = toMigrate; // before we create a new export archive lets delete the 'expired' // archives //deleteOldExportArchives(eperson.getID()); deleteOldExportArchives(); // keep track of the commulative size of all bitstreams in each of the // items // it will be checked against the config file entry double size = 0; final HashMap<String, List<UUID>> itemsMap = new HashMap<>(); for (DSpaceObject dso : dsObjects) { if (dso.getType() == Constants.COMMUNITY) { Community community = (Community) dso; // get all the collections in the community List<Collection> collections = communityService.getAllCollections(context, community); for (Collection collection : collections) { ArrayList<UUID> items = new ArrayList<>(); // get all the items in each collection Iterator<Item> iitems = itemService.findByCollection(context, collection); try { while (iitems.hasNext()) { Item item = iitems.next(); // get all the bundles in the item List<Bundle> bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in each bundle List<Bitstream> bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { // add up the size size += bitstream.getSize(); } } items.add(item.getID()); } } finally { if (items.size() > 0) { itemsMap.put("collection_" + collection.getID(), items); } } } } else if (dso.getType() == Constants.COLLECTION) { Collection collection = (Collection) dso; ArrayList<UUID> items = new ArrayList<>(); // get all the items in the collection Iterator<Item> iitems = itemService.findByCollection(context, collection); try { while (iitems.hasNext()) { Item item = iitems.next(); // get all thebundles in the item List<Bundle> bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in the bundle List<Bitstream> bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { // add up the size size += bitstream.getSize(); } } items.add(item.getID()); } } finally { if (items.size() > 0) { itemsMap.put("collection_" + collection.getID(), items); } } } else if (dso.getType() == Constants.ITEM) { Item item = (Item) dso; // get all the bundles in the item List<Bundle> bundles = item.getBundles(); for (Bundle bundle : bundles) { // get all the bitstreams in the bundle List<Bitstream> bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { // add up the size size += bitstream.getSize(); } } ArrayList<UUID> items = new ArrayList<>(); items.add(item.getID()); itemsMap.put("item_" + item.getID(), items); } else { // nothing to do just ignore this type of DSpaceObject } } // check the size of all the bitstreams against the configuration file // entry if it exists String megaBytes = ConfigurationManager.getProperty("org.dspace.app.itemexport.max.size"); if (megaBytes != null) { float maxSize = 0; try { maxSize = Float.parseFloat(megaBytes); } catch (Exception e) { // ignore...configuration entry may not be present } if (maxSize > 0 && maxSize < (size / 1048576.00)) { // a megabyte throw new ItemExportException(ItemExportException.EXPORT_TOO_LARGE, "The overall size of this export is too large. Please contact your administrator for more information."); } } // if we have any items to process then kick off anonymous thread if (itemsMap.size() > 0) { Thread go = new Thread() { @Override public void run() { Context context = null; Iterator<Item> iitems = null; try { // create a new dspace context context = new Context(); // ignore auths context.turnOffAuthorisationSystem(); String fileName = assembleFileName("item", eperson, new Date()); String workParentDir = getExportWorkDirectory() + System.getProperty("file.separator") + fileName; String downloadDir = getExportDownloadDirectory(eperson); File dnDir = new File(downloadDir); if (!dnDir.exists() && !dnDir.mkdirs()) { log.error("Unable to create download directory"); } Iterator<String> iter = itemsMap.keySet().iterator(); while (iter.hasNext()) { String keyName = iter.next(); List<UUID> uuids = itemsMap.get(keyName); List<Item> items = new ArrayList<Item>(); for (UUID uuid : uuids) { items.add(itemService.find(context, uuid)); } iitems = items.iterator(); String workDir = workParentDir + System.getProperty("file.separator") + keyName; File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { log.error("Unable to create working directory"); } // export the items using normal export method exportItem(context, iitems, workDir, 1, migrate, false); } // now zip up the export directory created above zip(workParentDir, downloadDir + System.getProperty("file.separator") + fileName + ".zip"); // email message letting user know the file is ready for // download emailSuccessMessage(context, eperson, fileName + ".zip"); // return to enforcing auths context.restoreAuthSystemState(); } catch (Exception e1) { try { emailErrorMessage(eperson, e1.getMessage()); } catch (Exception e) { // wont throw here } throw new IllegalStateException(e1); } finally { // Make sure the database connection gets closed in all conditions. try { context.complete(); } catch (SQLException sqle) { context.abort(); } } } }; go.isDaemon(); go.start(); } else { Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); emailErrorMessage(eperson, I18nUtil.getMessage("org.dspace.app.itemexport.no-result", supportedLocale)); } }
From source file:org.dspace.app.itemimport.ItemImportServiceImpl.java
/** * //from w ww .j a v a 2 s .c om * Given a local file or public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace * @param filepath The filepath to local file or the public URL of the zip file * @param owningCollection The owning collection the items will belong to * @param otherCollections The collections the created items will be inserted to, apart from the owning one * @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data * @param inputType The input type of the data (bibtex, csv, etc.), in case of local file * @param context The context * @param template whether to use template item * @throws Exception if error */ @Override public void processUIImport(String filepath, Collection owningCollection, String[] otherCollections, String resumeDir, String inputType, Context context, final boolean template) throws Exception { final EPerson oldEPerson = context.getCurrentUser(); final String[] theOtherCollections = otherCollections; final Collection theOwningCollection = owningCollection; final String theFilePath = filepath; final String theInputType = inputType; final String theResumeDir = resumeDir; final boolean useTemplateItem = template; Thread go = new Thread() { @Override public void run() { Context context = null; String importDir = null; EPerson eperson = null; try { // create a new dspace context context = new Context(); eperson = ePersonService.find(context, oldEPerson.getID()); context.setCurrentUser(eperson); context.turnOffAuthorisationSystem(); boolean isResume = theResumeDir != null; List<Collection> collectionList = new ArrayList<>(); if (theOtherCollections != null) { for (String colID : theOtherCollections) { UUID colId = UUID.fromString(colID); if (!theOwningCollection.getID().equals(colId)) { Collection col = collectionService.find(context, colId); if (col != null) { collectionList.add(col); } } } } importDir = ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir") + File.separator + "batchuploads" + File.separator + context.getCurrentUser().getID() + File.separator + (isResume ? theResumeDir : (new GregorianCalendar()).getTimeInMillis()); File importDirFile = new File(importDir); if (!importDirFile.exists()) { boolean success = importDirFile.mkdirs(); if (!success) { log.info("Cannot create batch import directory!"); throw new Exception("Cannot create batch import directory!"); } } String dataPath = null; String dataDir = null; if (theInputType.equals("saf")) { //In case of Simple Archive Format import (from remote url) dataPath = importDirFile + File.separator + "data.zip"; dataDir = importDirFile + File.separator + "data_unzipped2" + File.separator; } else if (theInputType.equals("safupload")) { //In case of Simple Archive Format import (from upload file) FileUtils.copyFileToDirectory(new File(theFilePath), importDirFile); dataPath = importDirFile + File.separator + (new File(theFilePath)).getName(); dataDir = importDirFile + File.separator + "data_unzipped2" + File.separator; } else { // For all other imports dataPath = importDirFile + File.separator + (new File(theFilePath)).getName(); dataDir = importDirFile + File.separator + "data" + File.separator; } //Clear these files, if a resume if (isResume) { if (!theInputType.equals("safupload")) { (new File(dataPath)).delete(); } (new File(importDirFile + File.separator + "error.txt")).delete(); FileDeleteStrategy.FORCE.delete(new File(dataDir)); FileDeleteStrategy.FORCE.delete( new File(importDirFile + File.separator + "data_unzipped" + File.separator)); } //In case of Simple Archive Format import we need an extra effort to download the zip file and unzip it String sourcePath = null; if (theInputType.equals("saf")) { OutputStream os = new FileOutputStream(dataPath); byte[] b = new byte[2048]; int length; InputStream is = new URL(theFilePath).openStream(); while ((length = is.read(b)) != -1) { os.write(b, 0, length); } is.close(); os.close(); sourcePath = unzip(new File(dataPath), dataDir); //Move files to the required folder FileUtils.moveDirectory(new File(sourcePath), new File(importDirFile + File.separator + "data_unzipped" + File.separator)); FileDeleteStrategy.FORCE.delete(new File(dataDir)); dataDir = importDirFile + File.separator + "data_unzipped" + File.separator; } else if (theInputType.equals("safupload")) { sourcePath = unzip(new File(dataPath), dataDir); //Move files to the required folder FileUtils.moveDirectory(new File(sourcePath), new File(importDirFile + File.separator + "data_unzipped" + File.separator)); FileDeleteStrategy.FORCE.delete(new File(dataDir)); dataDir = importDirFile + File.separator + "data_unzipped" + File.separator; } //Create mapfile path String mapFilePath = importDirFile + File.separator + "mapfile"; List<Collection> finalCollections = null; if (theOwningCollection != null) { finalCollections = new ArrayList<>(); finalCollections.add(theOwningCollection); finalCollections.addAll(collectionList); } setResume(isResume); if (theInputType.equals("saf") || theInputType.equals("safupload")) { //In case of Simple Archive Format import addItems(context, finalCollections, dataDir, mapFilePath, template); } else { // For all other imports (via BTE) addBTEItems(context, finalCollections, theFilePath, mapFilePath, useTemplateItem, theInputType, dataDir); } // email message letting user know the file is ready for // download emailSuccessMessage(context, eperson, mapFilePath); context.complete(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); String exceptionString = ExceptionUtils.getStackTrace(e); try { File importDirFile = new File(importDir + File.separator + "error.txt"); PrintWriter errorWriter = new PrintWriter(importDirFile); errorWriter.print(exceptionString); errorWriter.close(); emailErrorMessage(eperson, exceptionString); throw new Exception(e.getMessage()); } catch (Exception e2) { // wont throw here } } finally { // Make sure the database connection gets closed in all conditions. try { context.complete(); } catch (SQLException sqle) { context.abort(); } } } }; go.isDaemon(); go.start(); }