Example usage for java.lang Object Object

List of usage examples for java.lang Object Object

Introduction

In this page you can find the example usage for java.lang Object Object.

Prototype

@HotSpotIntrinsicCandidate
public Object() 

Source Link

Document

Constructs a new object.

Usage

From source file:gaderian.test.utilities.TestSpringLookupFactory.java

public void testBeanSpecificFactory() {
    SpringLookupFactory lf = new SpringLookupFactory();
    BeanFactory beanFactory = createMock(BeanFactory.class);

    ServiceImplementationFactoryParameters fp = createMock(ServiceImplementationFactoryParameters.class);

    SpringBeanParameter param = buildParameter("fred", beanFactory);

    Object fred = new Object();

    expect(beanFactory.getBean("fred", List.class)).andReturn(fred);

    expect(fp.getFirstParameter()).andReturn(param);

    expect(fp.getServiceInterface()).andReturn(List.class);

    replayAllRegisteredMocks();//from   w  w w  .j a va 2s. co  m

    Object actual = lf.createCoreServiceImplementation(fp);

    assertSame(fred, actual);

    verifyAllRegisteredMocks();
}

From source file:org.commonjava.rwx.http.httpclient4.EStreamResponseHandler.java

@Override
public List<Event<?>> handleResponse(final HttpResponse resp) throws ClientProtocolException, IOException {
    final StatusLine status = resp.getStatusLine();
    System.out.println(status);/*from w  ww .ja v a 2 s  .  c  o m*/
    if (status.getStatusCode() > 199 && status.getStatusCode() < 203) {
        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
        IOUtils.copy(resp.getEntity().getContent(), baos);

        Logger logger = LoggerFactory.getLogger(getClass());

        File recording = null;
        FileOutputStream stream = null;
        try {
            recording = File.createTempFile("xml-rpc.response.", ".xml");
            stream = new FileOutputStream(recording);
            stream.write(baos.toByteArray());
        } catch (final IOException e) {
            logger.debug("Failed to record xml-rpc response to file.", e);
            // this is an auxilliary function. ignore errors.
        } finally {
            IOUtils.closeQuietly(stream);
            logger.info("\n\n\nRecorded response to: {}\n\n\n", recording);
        }

        try {
            logger.trace("Got response: \n\n{}", new Object() {
                @Override
                public String toString() {
                    try {
                        return new String(baos.toByteArray(), "UTF-8");
                    } catch (final UnsupportedEncodingException e) {
                        return new String(baos.toByteArray());
                    }
                }
            });

            final EventStreamParserImpl estream = new EventStreamParserImpl();
            new StaxParser(new ByteArrayInputStream(baos.toByteArray())).parse(new FaultAwareWrapper(estream));

            return estream.getEvents();
        } catch (final XmlRpcException e) {
            error = e;
            return null;
        }
    } else {
        error = new XmlRpcException("Invalid response status: '" + status + "'.");
        return null;
    }
}

From source file:fuse.okuyamafs.OkuyamaFilesystem.java

public OkuyamaFilesystem(String okuyamaMasterNode, boolean singleMode) throws IOException {
    log.info("okuyama file system init start...");

    int files = 0;
    int dirs = 0;
    int blocks = 0;

    this.masterNodeStr = masterNodeStr;

    statfs = new FuseStatfs();
    statfs.blocks = Integer.MAX_VALUE;
    statfs.blockSize = blockSize;//from  ww w .j av  a 2s.c o  m
    statfs.blocksFree = Integer.MAX_VALUE;
    statfs.files = files + dirs;
    statfs.filesFree = Integer.MAX_VALUE;
    statfs.namelen = 2048;
    try {
        for (int idx = 0; idx < 100; idx++) {
            this.parallelDataAccessSync[idx] = new Object();
        }
        client = new OkuyamaClientWrapper(singleMode);
    } catch (Exception e) {
        throw new IOException(e);
    }
    log.info("okuyama file system init end...");
}

From source file:org.jax.maanova.plot.RenderChartImageTask.java

/**
 * Setter for the chart/*from  w w w  .ja  va 2  s  .  co m*/
 * @param chart the chart
 */
public void setChart(JFreeChart chart) {
    if (this.chart != null) {
        this.chart.removeChangeListener(this.chartChageListener);
    }

    this.chart = chart;

    if (chart != null) {
        chart.addChangeListener(this.chartChageListener);
    }

    this.renderRequestQueue.offer(new Object());
}

From source file:org.openspaces.eviction.test.FIFOSingleOrderTest.java

protected void assertMemoryShortageTest() {
    Assert.assertTrue("amount of objects in space is larger than cache size",
            gigaSpace.count(new Object()) <= cacheSize);
}

From source file:de.r2soft.empires.framework.test.ObjectTreeTest.java

@Test
public void testMove() {
    Object obj = new Object();
    tree.insert(new Vector2D(50, 50), obj);
    tree.move(new Vector2D(50, 50), new Vector2D(60, 60));
    Object objT = tree.search(new Vector2D(60, 60));
    Assert.assertEquals(obj, objT);//from   ww  w.j ava  2  s.c  o  m
}

From source file:net.cpollet.jixture.fixtures.TestMappingFixture.java

@Test
public void entityPassesFilterReturnsTrueWhenNoFilterSet() {
    // GIVEN//from  ww w .j  a v  a  2  s  . c  om
    MappingFixture mappingFixture = new MappingFixture();

    // WHEN
    boolean actualValue = mappingFixture.filter(new Object());

    // THEN
    assertThat(actualValue).isTrue();
}

From source file:org.openspaces.eviction.test.ClassSpecificOrderTest.java

@Test
public void inClassLRUReadTest() throws Exception {

    logger.info("write an object");
    gigaSpace.write(new SilverMedal(0));

    logger.info("fill the space with more than cache size object and red the original in the middle");
    for (int i = 1; i <= cacheSize + 10; i++) {
        if (i == (cacheSize / 2))
            gigaSpace.read(new SilverMedal(0));
        else//from  w  ww.  j ava 2s  .c o  m
            gigaSpace.write(new SilverMedal(i));
    }
    Assert.assertTrue("amount of objects in space is larger than cache size",
            gigaSpace.count(new Object()) == cacheSize);
    logger.info("assert the original object is still in cache");
    Assert.assertNotNull("silver medal 0 is not in space", gigaSpace.read(new SilverMedal(0)));
    logger.info("Test Passed");
}

From source file:hu.dolphio.tprttapi.service.RttServiceImpl.java

@Override
public Collection<ReportElementTO> loadRttTrackingsFromReport()
        throws URISyntaxException, IOException, ClientException {

    if ((projectId == null && reportId == null) || (projectId != null && reportId != null)) {
        throw new ClientException("Project ID or Report ID must be set!");
    }//  w w  w  . jav a  2  s  .c o m

    BasicCookieStore cookieStore = new BasicCookieStore();
    CloseableHttpClient httpclient = HttpClients.custom().setDefaultCookieStore(cookieStore)
            .setDefaultRequestConfig(config).build();

    HttpEntity httpEntity = new InputStreamEntity(new ByteArrayInputStream(
            new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(new Object() {
                public String username = propertyReader.getRttUserName();
                public String password = propertyReader.getRttPassword();
            }).getBytes("UTF-8")));

    HttpUriRequest login = RequestBuilder.post().setUri(new URI(propertyReader.getRttHost() + "/login"))
            .setEntity(httpEntity).setHeader("Accept-Language", "sk,en-US;q=0.8,en;q=0.6,hu;q=0.4")
            .setHeader("Content-Type", "application/json;charset=utf-8").build();
    CloseableHttpResponse loginResponse = httpclient.execute(login);
    LOG.debug("RTT login response: " + loginResponse);

    if (HttpResponseStatus
            .getStatusByCode(loginResponse.getStatusLine().getStatusCode()) != HttpResponseStatus.SUCCESS) {
        throw new ClientException(
                "[" + loginResponse.getStatusLine().getStatusCode() + "] Login to RTT failed!");
    }

    EntityUtils.consume(loginResponse.getEntity());

    StringBuilder postUriBuilder = new StringBuilder().append(propertyReader.getRttHost())
            .append(reportId == null ? propertyReader.getRttReportByProjectUrl()
                    : propertyReader.getRttReportByReportUrl())
            .append(reportId == null ? projectId : reportId).append("/json?startDate=").append(dateFrom)
            .append("&endDate=").append(dateTo);

    LOG.trace("RTT report query: " + postUriBuilder.toString());

    HttpGet get = new HttpGet(postUriBuilder.toString());
    CloseableHttpResponse rttResponse = httpclient.execute(get);

    if (HttpResponseStatus
            .getStatusByCode(rttResponse.getStatusLine().getStatusCode()) != HttpResponseStatus.SUCCESS) {
        throw new ClientException("[" + rttResponse.getStatusLine().getStatusCode()
                + "] Downloading tracking information from RTT failed!");
    }
    String trackingsJson = IOUtils.toString(rttResponse.getEntity().getContent(), "utf-8");

    Collection<ReportElementTO> fromJson = new ObjectMapper().readValue(trackingsJson,
            new TypeReference<Collection<ReportElementTO>>() {
            });

    return fromJson;
}

From source file:com.cloudera.livy.client.local.driver.RemoteDriver.java

private RemoteDriver(String[] args) throws Exception {
    this.activeJobs = Maps.newConcurrentMap();
    this.jcLock = new Object();
    this.shutdownLock = new Object();
    localTmpDir = Files.createTempDir();

    SparkConf conf = new SparkConf();
    String serverAddress = null;/*ww  w.  ja v  a 2s  . co m*/
    int serverPort = -1;
    for (int idx = 0; idx < args.length; idx += 2) {
        String key = args[idx];
        if (key.equals("--remote-host")) {
            serverAddress = getArg(args, idx);
        } else if (key.equals("--remote-port")) {
            serverPort = Integer.parseInt(getArg(args, idx));
        } else if (key.equals("--client-id")) {
            conf.set(LocalConf.SPARK_CONF_PREFIX + CLIENT_ID.key, getArg(args, idx));
        } else if (key.equals("--secret")) {
            conf.set(LocalConf.SPARK_CONF_PREFIX + CLIENT_SECRET.key, getArg(args, idx));
        } else if (key.equals("--conf")) {
            String[] val = getArg(args, idx).split("[=]", 2);
            conf.set(val[0], val[1]);
        } else {
            throw new IllegalArgumentException("Invalid command line: " + Joiner.on(" ").join(args));
        }
    }

    executor = Executors.newCachedThreadPool();

    LOG.info("Connecting to: {}:{}", serverAddress, serverPort);

    LocalConf livyConf = new LocalConf(null);
    for (Tuple2<String, String> e : conf.getAll()) {
        if (e._1().startsWith(LocalConf.SPARK_CONF_PREFIX)) {
            String key = e._1().substring(LocalConf.SPARK_CONF_PREFIX.length());
            livyConf.set(key, e._2());
            LOG.debug("Remote Driver config: {} = {}", key, e._2());
        }
    }

    String clientId = livyConf.get(CLIENT_ID);
    Preconditions.checkArgument(clientId != null, "No client ID provided.");
    String secret = livyConf.get(CLIENT_SECRET);
    Preconditions.checkArgument(secret != null, "No secret provided.");

    System.out.println("MAPCONF-->");
    System.out.println(livyConf);
    this.egroup = new NioEventLoopGroup(livyConf.getInt(RPC_MAX_THREADS),
            new ThreadFactoryBuilder().setNameFormat("Driver-RPC-Handler-%d").setDaemon(true).build());
    this.serializer = new Serializer();
    this.protocol = new DriverProtocol(this, jcLock);

    // The RPC library takes care of timing out this.
    this.clientRpc = Rpc.createClient(livyConf, egroup, serverAddress, serverPort, clientId, secret, protocol)
            .get();
    this.running = true;

    this.clientRpc.addListener(new Rpc.Listener() {
        @Override
        public void rpcClosed(Rpc rpc) {
            LOG.warn("Shutting down driver because RPC channel was closed.");
            shutdown(null);
        }
    });

    try {
        long t1 = System.currentTimeMillis();
        LOG.info("Starting Spark context at {}", t1);
        JavaSparkContext sc = new JavaSparkContext(conf);
        LOG.info("Spark context finished initialization in {}ms", System.currentTimeMillis() - t1);
        sc.sc().addSparkListener(new DriverSparkListener(this));
        synchronized (jcLock) {
            jc = new JobContextImpl(sc, localTmpDir);
            jcLock.notifyAll();
        }
    } catch (Exception e) {
        LOG.error("Failed to start SparkContext: " + e, e);
        shutdown(e);
        synchronized (jcLock) {
            jcLock.notifyAll();
        }
        throw e;
    }

    synchronized (jcLock) {
        for (JobWrapper<?> job : jobQueue) {
            job.submit(executor);
        }
        jobQueue.clear();
    }
}