Example usage for java.lang System nanoTime

List of usage examples for java.lang System nanoTime

Introduction

In this page you can find the example usage for java.lang System nanoTime.

Prototype

@HotSpotIntrinsicCandidate
public static native long nanoTime();

Source Link

Document

Returns the current value of the running Java Virtual Machine's high-resolution time source, in nanoseconds.

Usage

From source file:com.nesscomputing.service.discovery.client.internal.ServiceDiscoveryReader.java

@Override
void visit(final List<String> childNodes, final ZooKeeper zookeeper, final long tick)
        throws InterruptedException {
    final Map<String, List<ServiceInformation>> serviceMap = new HashMap<String, List<ServiceInformation>>();

    if (!childNodes.isEmpty()) {
        final List<ServiceInformation> rawServices = new ArrayList<ServiceInformation>(childNodes.size());
        final CountDownLatch latch = new CountDownLatch(childNodes.size());

        final long now = System.nanoTime();

        for (final String child : childNodes) {

            final String childPath = getNodePath(child);

            if (badNodes.containsKey(childPath)) {
                final Long penaltyEndsTime = badNodes.get(childPath);
                if (penaltyEndsTime != null && penaltyEndsTime > now) {
                    // Decrement the countdown latch, because there will be no callback for this
                    // node.
                    latch.countDown();/*  w  w  w. j a  v  a2 s. c o  m*/
                    // Ignore a bad node for a while.
                    continue;
                }
                LOG.info("Unmarking %s as a bad node!", childPath);
                badNodes.remove(childPath);
            }

            zookeeper.getData(childPath, false, new DataCallback() {
                @Override
                public void processResult(final int rc, final String path, final Object ctx, final byte[] data,
                        final Stat stat) {

                    ServiceInformation si = null;
                    try {
                        if (data != null && data.length > 0) {
                            si = objectMapper.readValue(data, ServiceInformation.class);
                            LOG.trace("%s contains %s", path, si);
                        } else {
                            // This can sometimes happen if a node that we want to inspect
                            // disappears between callback post and callback processing.
                            LOG.trace("Got callback but no data!");
                        }

                    } catch (IOException ioe) {
                        LOG.debug(ioe, "While deserializing %s", new String(data, Charsets.UTF_8));
                        LOG.info("Marking %s as a bad node!", path);
                        // Put a bad node into the penalty box.
                        badNodes.put(path, now + penaltyTime);
                    } finally {
                        synchronized (rawServices) {
                            if (si != null) {
                                rawServices.add(si);
                            }
                        }
                        latch.countDown();
                    }
                }
            }, null);
        }

        if (!latch.await(discoveryConfig.getZookeeperTimeout().getMillis(), TimeUnit.MILLISECONDS)) {
            LOG.warn("Timeout waiting for callbacks, some nodes were not parsed.");
        }

        // Make sure that even with late callbacks, this will not throw spurious ConcurrentModificationExceptions
        synchronized (rawServices) {
            for (final ServiceInformation si : rawServices) {
                List<ServiceInformation> services = serviceMap.get(si.getServiceName());
                if (services == null) {
                    services = new ArrayList<ServiceInformation>();
                    serviceMap.put(si.getServiceName(), services);
                }
                services.add(si);
            }
        }
    }

    Map<String, ConsistentRingGroup> serviceGroups = Maps.newHashMap();
    for (Map.Entry<String, List<ServiceInformation>> entry : serviceMap.entrySet()) {
        ConsistentRingGroup currentGroup = stateHolder.getState().get(entry.getKey());
        //Rebuilding a group is kind of expensive, so reuse the old group if it hasn't changed
        if (currentGroup != null
                && Sets.newHashSet(entry.getValue()).equals(Sets.newHashSet(currentGroup.getAll()))) {
            serviceGroups.put(entry.getKey(), currentGroup);
        } else {
            serviceGroups.put(entry.getKey(), new ConsistentRingGroup(entry.getValue()));
        }
    }
    stateHolder.setState(serviceGroups);
}

From source file:org.arrow.data.neo4j.store.impl.ExecutionStoreImpl.java

public Node getExecution() {

    Map<String, Object> properties = new HashMap<>();
    properties.put("id", String.valueOf(System.nanoTime()));

    Node node = template.createNode(properties);
    node.addLabel(DynamicLabel.label("Execution"));
    node.addLabel(DynamicLabel.label("_Execution"));

    return node;/*from w  w  w.j a va  2 s  .c  om*/
}

From source file:org.intelligentsia.utility.generator.DefaultGeneratorServiceTest.java

@Test
public void testMultipleInsert() {
    Assert.assertNotNull(generatorFactory);
    final Generator generator = generatorFactory.newGenerator(
            ((SessionFactoryImplementor) ((Session) entityManager.getDelegate()).getSessionFactory())
                    .getDialect(),/*w  w  w  .  j a v a 2 s  .  co m*/
            300000l, 50l, "TEST_SEQUENCES_COMMON_ID", CommonIdIdentifier.class.getSimpleName());
    Assert.assertNotNull(generator);
    final Long init = generator.generate();
    final long start = System.nanoTime();
    Long value = 0L;
    for (int i = 0; i < MAX; i++) {
        value = generator.generate();
    }
    final long end = System.nanoTime();
    System.out.println("------------------------------------------------------");
    System.out.println("Total: " + (end - start) + " ns ");
    System.out.println((((double) ((end - start) / MAX)) / (1000 * 1000)) + "ms per identifier");
    System.out.println("------------------------------------------------------");
    Assert.assertTrue(init < value);
    Assert.assertTrue((init + MAX) <= value);
}

From source file:bd.gov.forms.web.ListBuilder.java

@RequestMapping(value = "/saveList", method = RequestMethod.POST)
public String saveForm(@ModelAttribute("listDataCmd") ListData listData, BindingResult result,
        HttpServletRequest request, ModelMap model) {

    String access = UserAccessChecker.check(request);
    if (access != null) {
        return access;
    }/*from  w ww.j a  v a2 s  .  c o m*/

    if (FormUtil.isEmpty(listData.getName()) || FormUtil.isEmpty(listData.getValues())) {
        throw new RuntimeException("Required value not found.");
    }

    model.put("listDataCmd", listData);
    model.put("formAction", "saveList");

    log.debug("listData->save");

    listData.setSysId(Long.toString(System.nanoTime()) + new Long(new Random().nextLong()));
    listDao.saveListData(listData);

    model.put("message", "msg.form.submitted");
    model.put("msgType", "success");

    return "redirect:list.htm";
}

From source file:dk.statsbiblioteket.netark.dvenabler.Command.java

private static void convert(File in, File out, List<DVConfig> dvFields, boolean verbose) throws IOException {
    if (verbose) {
        System.out.println(String.format("Adjusting from %s to %s with adjustment fields", in, out));
        for (DVConfig dvConfig : dvFields) {
            System.out.println(dvConfig.toString(true));
        }//from   w  ww  .j a  va  2  s .  c om
    } else {
        System.out.println(
                String.format("Adjusting from %s to %s with %d adjustment fields", in, out, dvFields.size()));
    }
    long processTime = -System.nanoTime();
    IndexUtils.convert(in, out, dvFields);
    processTime += System.nanoTime();
    System.out.println("Finished conversion successfully in " + (processTime / 1000000 / 1000) + " seconds");
}

From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.RemoteDPParForMR.java

/**
 * /*from w w  w  .j av a 2s  . c om*/
 * @param pfid
 * @param program
 * @param taskFile
 * @param resultFile
 * @param enableCPCaching 
 * @param mode
 * @param numMappers
 * @param replication
 * @return
 * @throws DMLRuntimeException
 */
public static RemoteParForJobReturn runJob(long pfid, String itervar, String matrixvar, String program,
        String resultFile, MatrixObject input, PDataPartitionFormat dpf, OutputInfo oi, boolean tSparseCol, //config params
        boolean enableCPCaching, int numReducers, int replication, int max_retry) //opt params
        throws DMLRuntimeException {
    RemoteParForJobReturn ret = null;
    String jobname = "ParFor-DPEMR";
    long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;

    JobConf job;
    job = new JobConf(RemoteDPParForMR.class);
    job.setJobName(jobname + pfid);

    //maintain dml script counters
    Statistics.incrementNoOfCompiledMRJobs();

    try {
        /////
        //configure the MR job

        //set arbitrary CP program blocks that will perform in the reducers
        MRJobConfiguration.setProgramBlocks(job, program);

        //enable/disable caching
        MRJobConfiguration.setParforCachingConfig(job, enableCPCaching);

        //setup input matrix
        Path path = new Path(input.getFileName());
        long rlen = input.getNumRows();
        long clen = input.getNumColumns();
        int brlen = (int) input.getNumRowsPerBlock();
        int bclen = (int) input.getNumColumnsPerBlock();
        MRJobConfiguration.setPartitioningInfo(job, rlen, clen, brlen, bclen, InputInfo.BinaryBlockInputInfo,
                oi, dpf, 1, input.getFileName(), itervar, matrixvar, tSparseCol);
        job.setInputFormat(InputInfo.BinaryBlockInputInfo.inputFormatClass);
        FileInputFormat.setInputPaths(job, path);

        //set mapper and reducers classes
        job.setMapperClass(DataPartitionerRemoteMapper.class);
        job.setReducerClass(RemoteDPParWorkerReducer.class);

        //set output format
        job.setOutputFormat(SequenceFileOutputFormat.class);

        //set output path
        MapReduceTool.deleteFileIfExistOnHDFS(resultFile);
        FileOutputFormat.setOutputPath(job, new Path(resultFile));

        //set the output key, value schema

        //parfor partitioning outputs (intermediates)
        job.setMapOutputKeyClass(LongWritable.class);
        if (oi == OutputInfo.BinaryBlockOutputInfo)
            job.setMapOutputValueClass(PairWritableBlock.class);
        else if (oi == OutputInfo.BinaryCellOutputInfo)
            job.setMapOutputValueClass(PairWritableCell.class);
        else
            throw new DMLRuntimeException("Unsupported intermrediate output info: " + oi);
        //parfor exec output
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Text.class);

        //////
        //set optimization parameters

        //set the number of mappers and reducers 
        job.setNumReduceTasks(numReducers);

        //disable automatic tasks timeouts and speculative task exec
        job.setInt("mapred.task.timeout", 0);
        job.setMapSpeculativeExecution(false);

        //set up preferred custom serialization framework for binary block format
        if (MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION)
            MRJobConfiguration.addBinaryBlockSerializationFramework(job);

        //set up map/reduce memory configurations (if in AM context)
        DMLConfig config = ConfigurationManager.getConfig();
        DMLAppMasterUtils.setupMRJobRemoteMaxMemory(job, config);

        //disable JVM reuse
        job.setNumTasksToExecutePerJvm(1); //-1 for unlimited 

        //set the replication factor for the results
        job.setInt("dfs.replication", replication);

        //set the max number of retries per map task
        //note: currently disabled to use cluster config
        //job.setInt("mapreduce.map.maxattempts", max_retry);

        //set unique working dir
        MRJobConfiguration.setUniqueWorkingDir(job);

        /////
        // execute the MR job         
        RunningJob runjob = JobClient.runJob(job);

        // Process different counters 
        Statistics.incrementNoOfExecutedMRJobs();
        Group pgroup = runjob.getCounters().getGroup(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME);
        int numTasks = (int) pgroup.getCounter(Stat.PARFOR_NUMTASKS.toString());
        int numIters = (int) pgroup.getCounter(Stat.PARFOR_NUMITERS.toString());
        if (DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode()) {
            Statistics.incrementJITCompileTime(pgroup.getCounter(Stat.PARFOR_JITCOMPILE.toString()));
            Statistics.incrementJVMgcCount(pgroup.getCounter(Stat.PARFOR_JVMGC_COUNT.toString()));
            Statistics.incrementJVMgcTime(pgroup.getCounter(Stat.PARFOR_JVMGC_TIME.toString()));
            Group cgroup = runjob.getCounters().getGroup(CacheableData.CACHING_COUNTER_GROUP_NAME.toString());
            CacheStatistics
                    .incrementMemHits((int) cgroup.getCounter(CacheStatistics.Stat.CACHE_HITS_MEM.toString()));
            CacheStatistics.incrementFSBuffHits(
                    (int) cgroup.getCounter(CacheStatistics.Stat.CACHE_HITS_FSBUFF.toString()));
            CacheStatistics
                    .incrementFSHits((int) cgroup.getCounter(CacheStatistics.Stat.CACHE_HITS_FS.toString()));
            CacheStatistics.incrementHDFSHits(
                    (int) cgroup.getCounter(CacheStatistics.Stat.CACHE_HITS_HDFS.toString()));
            CacheStatistics.incrementFSBuffWrites(
                    (int) cgroup.getCounter(CacheStatistics.Stat.CACHE_WRITES_FSBUFF.toString()));
            CacheStatistics.incrementFSWrites(
                    (int) cgroup.getCounter(CacheStatistics.Stat.CACHE_WRITES_FS.toString()));
            CacheStatistics.incrementHDFSWrites(
                    (int) cgroup.getCounter(CacheStatistics.Stat.CACHE_WRITES_HDFS.toString()));
            CacheStatistics
                    .incrementAcquireRTime(cgroup.getCounter(CacheStatistics.Stat.CACHE_TIME_ACQR.toString()));
            CacheStatistics
                    .incrementAcquireMTime(cgroup.getCounter(CacheStatistics.Stat.CACHE_TIME_ACQM.toString()));
            CacheStatistics
                    .incrementReleaseTime(cgroup.getCounter(CacheStatistics.Stat.CACHE_TIME_RLS.toString()));
            CacheStatistics
                    .incrementExportTime(cgroup.getCounter(CacheStatistics.Stat.CACHE_TIME_EXP.toString()));
        }

        // read all files of result variables and prepare for return
        LocalVariableMap[] results = readResultFile(job, resultFile);

        ret = new RemoteParForJobReturn(runjob.isSuccessful(), numTasks, numIters, results);
    } catch (Exception ex) {
        throw new DMLRuntimeException(ex);
    } finally {
        // remove created files 
        try {
            MapReduceTool.deleteFileIfExistOnHDFS(new Path(resultFile), job);
        } catch (IOException ex) {
            throw new DMLRuntimeException(ex);
        }
    }

    if (DMLScript.STATISTICS) {
        long t1 = System.nanoTime();
        Statistics.maintainCPHeavyHitters("MR-Job_" + jobname, t1 - t0);
    }

    return ret;
}

From source file:com.db.comserv.main.utilities.HttpCaller.java

@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DM_DEFAULT_ENCODING")
public HttpResult runRequest(String type, String methodType, URL url, List<Map<String, String>> headers,
        String requestBody, String sslByPassOption, int connTimeOut, int readTimeout, HttpServletRequest req)
        throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException,
        UnsupportedEncodingException, IOException, UnknownHostException, URISyntaxException {

    StringBuffer response = new StringBuffer();
    HttpResult httpResult = new HttpResult();
    boolean gzip = false;
    final long startNano = System.nanoTime();
    try {//ww  w . j a  va2 s.  com
        URL encodedUrl = new URL(Utility.encodeUrl(url.toString()));
        HttpURLConnection con = (HttpURLConnection) encodedUrl.openConnection();
        TrustModifier.relaxHostChecking(con, sslByPassOption);

        // connection timeout 5s
        con.setConnectTimeout(connTimeOut);

        // read timeout 10s
        con.setReadTimeout(readTimeout * getQueryCost(req));

        methodType = methodType.toUpperCase();
        con.setRequestMethod(methodType);

        sLog.debug("Performing '{}' to '{}'", methodType, ServletUtil.filterUrl(url.toString()));

        // Get headers & set request property
        for (int i = 0; i < headers.size(); i++) {
            Map<String, String> header = headers.get(i);
            con.setRequestProperty(header.get("headerKey").toString(), header.get("headerValue").toString());
            sLog.debug("Setting Header '{}' with value '{}'", header.get("headerKey").toString(),
                    ServletUtil.filterHeaderValue(header.get("headerKey").toString(),
                            header.get("headerValue").toString()));
        }

        if (con.getRequestProperty("Accept-Encoding") == null) {
            con.setRequestProperty("Accept-Encoding", "gzip");
        }

        if (requestBody != null && !requestBody.equals("")) {
            con.setDoOutput(true);
            DataOutputStream wr = new DataOutputStream(con.getOutputStream());
            wr.write(Utility.toUtf8Bytes(requestBody));
            wr.flush();
            wr.close();

        }

        // push response
        BufferedReader in = null;
        String inputLine;

        List<String> contentEncoding = con.getHeaderFields().get("Content-Encoding");
        if (contentEncoding != null) {
            for (String val : contentEncoding) {
                if ("gzip".equalsIgnoreCase(val)) {
                    sLog.debug("Gzip enabled response");
                    gzip = true;
                    break;
                }
            }
        }

        sLog.debug("Response: '{} {}' with headers '{}'", con.getResponseCode(), con.getResponseMessage(),
                ServletUtil.buildHeadersForLog(con.getHeaderFields()));

        if (con.getResponseCode() != 200 && con.getResponseCode() != 201) {
            if (con.getErrorStream() != null) {
                if (gzip) {
                    in = new BufferedReader(
                            new InputStreamReader(new GZIPInputStream(con.getErrorStream()), "UTF-8"));
                } else {
                    in = new BufferedReader(new InputStreamReader(con.getErrorStream(), "UTF-8"));
                }
            }
        } else {
            String[] urlParts = url.toString().split("\\.");
            if (urlParts.length > 1) {
                String ext = urlParts[urlParts.length - 1];
                if (ext.equalsIgnoreCase("png") || ext.equalsIgnoreCase("jpg") || ext.equalsIgnoreCase("jpeg")
                        || ext.equalsIgnoreCase("gif")) {
                    BufferedImage imBuff;
                    if (gzip) {
                        imBuff = ImageIO.read(new GZIPInputStream(con.getInputStream()));
                    } else {
                        BufferedInputStream bfs = new BufferedInputStream(con.getInputStream());
                        imBuff = ImageIO.read(bfs);
                    }
                    BufferedImage newImage = new BufferedImage(imBuff.getWidth(), imBuff.getHeight(),
                            BufferedImage.TYPE_3BYTE_BGR);

                    // converting image to greyScale
                    int width = imBuff.getWidth();
                    int height = imBuff.getHeight();
                    for (int i = 0; i < height; i++) {
                        for (int j = 0; j < width; j++) {
                            Color c = new Color(imBuff.getRGB(j, i));
                            int red = (int) (c.getRed() * 0.21);
                            int green = (int) (c.getGreen() * 0.72);
                            int blue = (int) (c.getBlue() * 0.07);
                            int sum = red + green + blue;
                            Color newColor = new Color(sum, sum, sum);
                            newImage.setRGB(j, i, newColor.getRGB());
                        }
                    }

                    ByteArrayOutputStream out = new ByteArrayOutputStream();
                    ImageIO.write(newImage, "jpg", out);
                    byte[] bytes = out.toByteArray();

                    byte[] encodedBytes = Base64.encodeBase64(bytes);
                    String base64Src = new String(encodedBytes);
                    int imageSize = ((base64Src.length() * 3) / 4) / 1024;
                    int initialImageSize = imageSize;
                    int maxImageSize = Integer.parseInt(properties.getValue("Reduced_Image_Size"));
                    float quality = 0.9f;
                    if (!(imageSize <= maxImageSize)) {
                        //This means that image size is greater and needs to be reduced.
                        sLog.debug("Image size is greater than " + maxImageSize + " , compressing image.");
                        while (!(imageSize < maxImageSize)) {
                            base64Src = compress(base64Src, quality);
                            imageSize = ((base64Src.length() * 3) / 4) / 1024;
                            quality = quality - 0.1f;
                            DecimalFormat df = new DecimalFormat("#.0");
                            quality = Float.parseFloat(df.format(quality));
                            if (quality <= 0.1) {
                                break;
                            }
                        }
                    }
                    sLog.debug("Initial image size was : " + initialImageSize + " Final Image size is : "
                            + imageSize + "Url is : " + url + "quality is :" + quality);
                    String src = "data:image/" + urlParts[urlParts.length - 1] + ";base64,"
                            + new String(base64Src);
                    JSONObject joResult = new JSONObject();
                    joResult.put("Image", src);
                    out.close();
                    httpResult.setResponseCode(con.getResponseCode());
                    httpResult.setResponseHeader(con.getHeaderFields());
                    httpResult.setResponseBody(joResult.toString());
                    httpResult.setResponseMsg(con.getResponseMessage());
                    return httpResult;
                }
            }

            if (gzip) {
                in = new BufferedReader(
                        new InputStreamReader(new GZIPInputStream(con.getInputStream()), "UTF-8"));
            } else {
                in = new BufferedReader(new InputStreamReader(con.getInputStream(), "UTF-8"));
            }
        }
        if (in != null) {
            while ((inputLine = in.readLine()) != null) {
                response.append(inputLine);
            }
            in.close();
        }

        httpResult.setResponseCode(con.getResponseCode());
        httpResult.setResponseHeader(con.getHeaderFields());
        httpResult.setResponseBody(response.toString());
        httpResult.setResponseMsg(con.getResponseMessage());

    } catch (Exception e) {
        sLog.error("Failed to received HTTP response after timeout", e);

        httpResult.setTimeout(true);
        httpResult.setResponseCode(500);
        httpResult.setResponseMsg("Internal Server Error Timeout");
        return httpResult;
    }

    return httpResult;
}

From source file:OptimalPrimitives.java

protected void paintComponent(Graphics g) {
    Graphics2D g2d = (Graphics2D) g;
    long startTime, endTime, totalTime;

    g.setColor(Color.WHITE);//ww  w.  java2  s  .  c  om
    g.fillRect(0, 0, getWidth(), getHeight());
    g.setColor(Color.BLACK);

    g.drawString("Bad vs. Good Primitive Rendering", 50, 20);
    g.drawString("(" + ITERATIONS + " iterations)", 100, 35);
    g.drawString("Bad: ", 10, BAD_Y + 30);
    g.drawString("Good: ", 10, GOOD_Y + 30);

    // Bad line
    Shape line = new Line2D.Double(LINE_X, BAD_Y, LINE_X + 50, BAD_Y + 50);
    startTime = System.nanoTime();
    for (int i = 0; i < ITERATIONS; ++i) {
        g2d.draw(line);
    }
    endTime = System.nanoTime();
    totalTime = (endTime - startTime) / 1000000;
    System.out.println("bad line = " + totalTime);
    g.drawString(totalTime + " ms", LINE_X, BAD_Y + 70);

    // Good line
    startTime = System.nanoTime();
    for (int i = 0; i < ITERATIONS; ++i) {
        g.drawLine(LINE_X, GOOD_Y, LINE_X + 50, GOOD_Y + 50);
    }
    endTime = System.nanoTime();
    totalTime = (endTime - startTime) / 1000000;
    System.out.println("good line = " + totalTime);
    g.drawString(totalTime + " ms", LINE_X, GOOD_Y + 70);

    // Bad rect
    Shape rect = new Rectangle(RECT_X, BAD_Y, 50, 50);
    startTime = System.nanoTime();
    for (int i = 0; i < ITERATIONS; ++i) {
        g2d.fill(rect);
    }
    endTime = System.nanoTime();
    totalTime = (endTime - startTime) / 1000000;
    System.out.println("bad rect = " + totalTime);
    g.drawString(totalTime + " ms", RECT_X, BAD_Y + 70);

    // Good rect
    startTime = System.nanoTime();
    for (int i = 0; i < ITERATIONS; ++i) {
        g.fillRect(RECT_X, GOOD_Y, 50, 50);
    }
    endTime = System.nanoTime();
    totalTime = (endTime - startTime) / 1000000;
    System.out.println("good rect = " + totalTime);
    g.drawString(totalTime + " ms", RECT_X, GOOD_Y + 70);
}

From source file:my.school.spring.beans.ProfilingBeanPostProcessor.java

@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
    Class beanClazz = profilingClasses.get(beanName);
    if (beanClazz != null) {
        Object proxyInstance = Proxy.newProxyInstance(beanClazz.getClassLoader(), beanClazz.getInterfaces(),
                (Object proxy, Method method, Object[] args) -> {
                    LOG.info("Call for {}::{}", proxy.getClass().getName(), method.getName());
                    Object ret;// w  ww .  j ava 2s.  c  om
                    if (profilingController.isEnabled()) {
                        long startTime = System.nanoTime();
                        ret = method.invoke(bean, args);
                        long endTime = System.nanoTime();
                        LOG.info("PROF:8150F577C514: {}", endTime - startTime);
                    } else {
                        ret = method.invoke(bean, args);
                    }
                    ret.getClass().getName();
                    return ret;
                });
        LOG.info("Creating a PROXY for {}: {} over target of class {} (Defined as: {})", beanName,
                proxyInstance.getClass().getName(), bean.getClass().getName(), beanClazz.getName());
        return proxyInstance;
    }
    return bean;
}

From source file:de.hshannover.f4.trust.iron.mapserver.communication.http.BasicAccessAuthenticationTest.java

@Override
@Before/*from   w  ww.  j a  v a  2 s  .c  om*/
public void setUp() {

    // ugly, create a properties file "somewhere" for testing
    try {
        File f;
        do {
            testConf = "irond_test_" + System.nanoTime();
            f = new File(testConf);
        } while (f.exists());

        FileWriter fw = new FileWriter(f);
        BufferedWriter bw = new BufferedWriter(fw);
        bw.write("test:test");
        bw.flush();
        fw.close();
    } catch (IOException e) {
        fail(e.getMessage());
    }

    mServerConf = StubProvider.getServerConfStub(testConf);
    BasicAuthProvider provider = null;
    try {
        provider = new BasicAuthProviderPropImpl(mServerConf);
    } catch (ProviderInitializationException e) {
        fail("Cannot initialize the provider!");
    }

    Socket s = new Socket();
    mBasicAuth = new BasicChannelAuth(s, provider);
}