List of usage examples for java.lang ThreadGroup activeCount
public int activeCount()
From source file:Main.java
public static void main(String[] args) throws Exception { ThreadGroup tg = Thread.currentThread().getThreadGroup(); MyThread mt1 = new MyThread(tg, "first"); MyThread mt2 = new MyThread(tg, "second"); mt1.start();//w ww . ja v a2 s. co m mt2.start(); ThreadGroup parent = tg.getParent(); Thread[] list = new Thread[parent.activeCount()]; int count = parent.enumerate(list, true); String[] thdinfo = new String[count]; for (int i = 0; i < count; i++) thdinfo[i] = list[i].toString(); mt1.join(); mt2.join(); for (int i = 0; i < count; i++) System.out.println(thdinfo[i]); }
From source file:Main.java
public static void main(String[] argv) throws Exception { ThreadGroup group = Thread.currentThread().getThreadGroup().getParent(); while (group.getParent() != null) { group = group.getParent();/* w ww . ja v a 2s.com*/ } int numThreads = group.activeCount(); Thread[] threads = new Thread[numThreads * 2]; numThreads = group.enumerate(threads, false); for (int i = 0; i < numThreads; i++) { Thread thread = threads[i]; System.out.println(thread.getName()); } int numGroups = group.activeGroupCount(); ThreadGroup[] groups = new ThreadGroup[numGroups * 2]; numGroups = group.enumerate(groups, false); for (int i = 0; i < numGroups; i++) { System.out.println(groups[i]); } }
From source file:Main.java
public static void main(String[] args) { final ThreadGroup threadGroup = new ThreadGroup("workers"); ExecutorService executor = Executors.newCachedThreadPool(new ThreadFactory() { public Thread newThread(Runnable r) { return new Thread(threadGroup, r); }//from w w w. j a v a 2 s . c o m }); System.out.println(threadGroup.activeCount()); }
From source file:MyThread.java
public static void main(String args[]) throws Exception { ThreadGroup group = new ThreadGroup("new Group"); MyThread t1 = new MyThread(group, "Thread1"); MyThread t2 = new MyThread(group, "Thread2"); t1.start();// w w w . ja v a 2 s . c om t2.start(); Thread.sleep(1000); System.out.println(group.activeCount() + " threads in thread group..."); Thread th[] = new Thread[group.activeCount()]; group.enumerate(th); for (Thread t : th) { System.out.println(t.getName()); } Thread.sleep(1000); System.out.println(group.activeCount() + " threads in thread group..."); group.interrupt(); }
From source file:MyThread.java
public static void main(String args[]) throws Exception { ThreadGroup group = new ThreadGroup("Group"); ThreadGroup newGroup = new ThreadGroup(group, "new group"); MyThread t1 = new MyThread(group, "Thread1"); MyThread t2 = new MyThread(group, "Thread2"); t1.start();//from www.ja v a 2 s . c om t2.start(); Thread.sleep(1000); System.out.println(group.activeCount() + " threads in thread group..."); Thread th[] = new Thread[group.activeCount()]; group.enumerate(th); for (Thread t : th) { System.out.println(t.getName()); } Thread.sleep(1000); System.out.println(group.activeCount() + " threads in thread group..."); group.interrupt(); }
From source file:MyThread.java
public static void main(String args[]) throws Exception { ThreadGroup tg = new ThreadGroup("My Group"); MyThread thrd = new MyThread(tg, "MyThread #1"); MyThread thrd2 = new MyThread(tg, "MyThread #2"); MyThread thrd3 = new MyThread(tg, "MyThread #3"); thrd.start();//w w w.jav a 2 s. c o m thrd2.start(); thrd3.start(); Thread.sleep(1000); System.out.println(tg.activeCount() + " threads in thread group."); Thread thrds[] = new Thread[tg.activeCount()]; tg.enumerate(thrds); for (Thread t : thrds) System.out.println(t.getName()); thrd.myStop(); Thread.sleep(1000); System.out.println(tg.activeCount() + " threads in tg."); tg.interrupt(); }
From source file:NewThread.java
public static void main(String args[]) { ThreadGroup groupA = new ThreadGroup("Group A"); ThreadGroup groupB = new ThreadGroup("Group B"); NewThread ob1 = new NewThread("One", groupA); NewThread ob2 = new NewThread("Two", groupA); NewThread ob3 = new NewThread("Three", groupB); NewThread ob4 = new NewThread("Four", groupB); groupA.list();//ww w . jav a2 s.com groupB.list(); Thread tga[] = new Thread[groupA.activeCount()]; groupA.enumerate(tga); for (int i = 0; i < tga.length; i++) { ((NewThread) tga[i]).mysuspend(); } try { Thread.sleep(4000); } catch (InterruptedException e) { System.out.println("Main thread interrupted."); } System.out.println("Resuming Group A"); for (int i = 0; i < tga.length; i++) { ((NewThread) tga[i]).myresume(); } try { ob1.join(); ob2.join(); ob3.join(); ob4.join(); } catch (Exception e) { System.out.println("Exception in Main thread"); } }
From source file:MyThread.java
public static void main(String args[]) { ThreadGroup groupA = new ThreadGroup("Group A"); ThreadGroup groupB = new ThreadGroup("Group B"); MyThread ob1 = new MyThread("One", groupA); MyThread ob2 = new MyThread("Two", groupA); MyThread ob3 = new MyThread("Three", groupB); MyThread ob4 = new MyThread("Four", groupB); System.out.println("\nHere is output from list():"); groupA.list();/*from w w w . j a v a2 s . co m*/ groupB.list(); System.out.println("Suspending Group A"); Thread tga[] = new Thread[groupA.activeCount()]; groupA.enumerate(tga); // get threads in group for (int i = 0; i < tga.length; i++) { ((MyThread) tga[i]).suspendMe(); // suspend each thread } try { Thread.sleep(1000); } catch (InterruptedException e) { System.out.println("Main thread interrupted."); } System.out.println("Resuming Group A"); for (int i = 0; i < tga.length; i++) { ((MyThread) tga[i]).resumeMe(); } try { System.out.println("Waiting for threads to finish."); ob1.join(); ob2.join(); ob3.join(); ob4.join(); } catch (Exception e) { System.out.println("Exception in Main thread"); } System.out.println("Main thread exiting."); }
From source file:org.apache.hadoop.cifs.Cifs2HdfsClient.java
public static void main(String[] args) { // This handles parsing args.. This is a really crappy implementation. I // have a better one I can share from Commons-cli package Configuration conf = new Configuration(); String[] otherArgs = null;/*from www . java2s.c om*/ try { otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); } catch (IOException e4) { // TODO Auto-generated catch block e4.printStackTrace(); } options = new Options(); options.addOption("cifs_host", true, "CIFS/SMB Server Hostname --cifs_host winfileserver1.nt.example.com"); options.addOption("cifs_domain", true, "CIFS/SMB Domain --cifs_domain nt.example.com"); options.addOption("cifs_logonto", true, "CIFS/SMB LogonTo --cifs_logonto windc1nt, hadoopserver"); options.addOption("cifs_input_folder", true, "CIFS/SMB Server Input Folder --cifs_input_folder M201209 "); options.addOption("cifs_output_folder", true, "CIFS/SMB Server Output Folder --cifs_output_folder M201209 "); options.addOption("cifs_input_file", true, "CIFS/SMB Server Single Input File filename.csv or filename*"); options.addOption("cifs_userid", true, "CIFS/SMB Domain Userid --cifs_userid usergoeshere"); options.addOption("cifs_pwd", true, "CIFS/SMB Domain Password --cifs_pwd passwordgoeshere"); options.addOption("cifs_hadoop_cred_path", true, "CIFS Password --cifs_hadoop_cred_path /user/username/credstore.jceks"); options.addOption("cifs_pwd_alias", true, "CIFS Password Alias --cifs_pwd_alias password.alias"); options.addOption("transfer_limit", true, "# of transfers to execute simultaneously should not transfer Note: 10-15 = optimal --transfer_limit 10"); options.addOption("max_depth", true, "CIFS ONLY - Max Depth to recurse --max_depth 10"); options.addOption("ignore_top_folder_files", false, "CIFS ONLY - Ignore Top Level Folder files"); options.addOption("no_nested_transfer", false, "CIFS ONLY - Do not nest into folders for transfer"); options.addOption("hdfs_output_folder", true, "HDFS Output Folder --hdfs_output_dir /scm/"); options.addOption("hdfs_input_folder", true, "HDFS Input Folder --hdfs_input_dir /scm/"); // options.addOption("hdfs_input_file", true, "HDFS Single Input File // filename.csv or filename*"); options.addOption("krb_keytab", true, "KeyTab File to Connect to HDFS --krb_keytab $HOME/S00000.keytab"); options.addOption("krb_upn", true, "Kerberos Princpial for Keytab to Connect to HDFS --krb_upn S00000@EXAMP.EXAMPLE.COM"); options.addOption("help", false, "Display help"); CommandLineParser parser = new CIFSParser(); CommandLine cmd = null; try { cmd = parser.parse(options, otherArgs); } catch (ParseException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } if (cmd.hasOption("cifs_host") && cmd.hasOption("cifs_domain") && cmd.hasOption("cifs_userid")) { cifsHost = cmd.getOptionValue("cifs_host"); cifsDomain = cmd.getOptionValue("cifs_domain"); cifsUserId = cmd.getOptionValue("cifs_userid"); if (cmd.hasOption("cifs_pwd")) { cifsPwd = cmd.getOptionValue("cifs_pwd"); } else if (cmd.hasOption("cifs_pwd_alias") && cmd.hasOption("cifs_hadoop_cred_path")) { cifsPwdAlias = cmd.getOptionValue("cifs_pwd_alias"); cifsPwdCredPath = cmd.getOptionValue("cifs_hadoop_cred_path"); } else { System.out.println("Missing CIFS Password / CIFS Password Alias / CIFS Hadoop Cred Path"); missingParams(); System.exit(0); } if (cmd.hasOption("cifs_logonto")) { cifsLogonTo = cmd.getOptionValue("cifs_logonto"); } else { cifsLogonTo = null; } if (cmd.hasOption("ignore_top_folder_files")) { ignoreTopFolder = true; } if (cmd.hasOption("no_nested_transfer")) { noNesting = true; } if (cmd.hasOption("transfer_limit")) { transferLimitTrue = true; transferLimit = cmd.getOptionValue("transfer_limit"); } if (cmd.hasOption("max_depth")) { maxDepth = Integer.valueOf(cmd.getOptionValue("max_depth")); } if (cmd.hasOption("hdfs_input_folder") && cmd.hasOption("cifs_output_folder")) { hdfsInputFolder = cmd.getOptionValue("hdfs_input_folder"); cifsOutputFolder = cmd.getOptionValue("cifs_output_folder"); hdfs2cifs = true; if (!(cifsOutputFolder.startsWith("/"))) { cifsOutputFolder = "/" + cifsOutputFolder; cifsOutputFolder.substring(1, cifsOutputFolder.length()); } if (!(cifsOutputFolder.endsWith("/"))) { cifsOutputFolder.substring(0, cifsOutputFolder.length() - 1); } /* * if (cmd.hasOption("hdfs_input_file")) { hdfsInputFile = * cmd.getOptionValue("hdfs_input_file"); maxDepth = -1; noNesting = true; } */ } if (cmd.hasOption("hdfs_output_folder") && cmd.hasOption("cifs_input_folder")) { cifsInputFolder = cmd.getOptionValue("cifs_input_folder"); if (!(cifsInputFolder.startsWith("/"))) { cifsInputFolder = "/" + cifsInputFolder; } if (!(cifsInputFolder.endsWith("/"))) { cifsInputFolder = cifsInputFolder + "/"; } hdfsOutputFolder = cmd.getOptionValue("hdfs_output_folder"); cifs2hdfs = true; if (cmd.hasOption("cifs_input_file")) { cifsInputFile = cmd.getOptionValue("cifs_input_file"); maxDepth = -1; noNesting = true; } } if (cifs2hdfs && hdfs2cifs) { System.out.println( "Error Cannot specify hdfs_output_folder/hdfs_input_folder or cifs_output_folder/cifs_input_folder together"); missingParams(); System.exit(0); } } else { missingParams(); System.exit(0); } if (cmd.hasOption("krb_keytab") && cmd.hasOption("krb_upn")) { setKrb = true; keytab = cmd.getOptionValue("krb_keytab"); keytabupn = cmd.getOptionValue("krb_upn"); File keytabFile = new File(keytab); if (keytabFile.exists()) { if (!(keytabFile.canRead())) { System.out.println("KeyTab exists but cannot read it - exiting"); missingParams(); System.exit(1); } } else { System.out.println("KeyTab doesn't exist - exiting"); missingParams(); System.exit(1); } } hdfsClient = new HdfsClient(setKrb, keytabupn, keytab); hdfsClient.checkSecurity(); if (cifsPwdCredPath != null && cifsPwdAlias != null) { cifsPwd = hdfsClient.getCredsViaJceks(cifsPwdCredPath, cifsPwdAlias); } if (hdfs2cifs) { cifsClient = new CifsClient(cifsLogonTo, cifsUserId, cifsPwd, cifsDomain, -1, false); List<String> hdfsfileList = null; try { hdfsfileList = hdfsClient.getHdfsFiles(hdfsInputFolder); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } // Spins up a thread per directory to allow some parallelism.. // Theoretically this can be run as a Mapreduce job ThreadGroup cifsTg = new ThreadGroup("CifsThreadGroup"); for (int i = 0; i < hdfsfileList.size(); i++) { String fileName = hdfsfileList.get(i); HDFS2CifsThread sc = null; if (transferLimitTrue) { while (Integer.valueOf(transferLimit) == cifsTg.activeCount()) { synchronized (objectWaiter) { try { objectWaiter.wait(10000L); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } String threadName = "cifs" + i; sc = new HDFS2CifsThread(cifsClient, cifsTg, threadName, fileName, cifsHost, cifsOutputFolder, setKrb, keytabupn, keytab); sc.start(); } } if (cifs2hdfs) { cifsClient = new CifsClient(cifsLogonTo, cifsUserId, cifsPwd, cifsDomain, Integer.valueOf(maxDepth), noNesting); SmbFile smbFileConn = cifsClient.createInitialConnection(cifsHost, cifsInputFolder); try { cifsClient.traverse(smbFileConn, Integer.valueOf(maxDepth), ignoreTopFolder, cifsInputFile); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } cifsFileList = cifsClient.getFileList(); int cifsCount = cifsFileList.size(); // Spins up a thread per directory to allow some parallelism.. // Theoretically this can be run as a Mapreduce job ThreadGroup cifsTg = new ThreadGroup("CifsThreadGroup"); for (int i = 0; i < cifsCount; i++) { String fileName = cifsFileList.get(i); Cifs2HDFSThread sc = null; if (transferLimitTrue) { while (Integer.valueOf(transferLimit) == cifsTg.activeCount()) { synchronized (objectWaiter) { try { objectWaiter.wait(10000L); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } try { String threadName = "cifs" + i; sc = new Cifs2HDFSThread(cifsTg, threadName, new SmbFile(fileName, cifsClient.auth), hdfsOutputFolder, cifsHost, cifsInputFolder, setKrb, keytabupn, keytab); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } sc.start(); } } }
From source file:Main.java
public static void interruptThreadAndJoin(ThreadGroup threadGroup) { Thread[] threads = new Thread[threadGroup.activeCount()]; threadGroup.enumerate(threads);/*from www . j a va 2 s . c o m*/ for (Thread thread : threads) { interruptThreadAndJoin(thread); } }