Example usage for java.lang ThreadLocal ThreadLocal

List of usage examples for java.lang ThreadLocal ThreadLocal

Introduction

In this page you can find the example usage for java.lang ThreadLocal ThreadLocal.

Prototype

public ThreadLocal() 

Source Link

Document

Creates a thread local variable.

Usage

From source file:org.apache.lens.cube.metadata.UpdatePeriod.java

private static DateFormat getQuarterlyFormat() {
    if (quarterlyFormat == null) {
        quarterlyFormat = new ThreadLocal<DateFormat>() {
            @Override// w  w w.j a  v  a  2s .c o  m
            protected SimpleDateFormat initialValue() {
                return new SimpleDateFormat(QUARTERLY.formatStr());
            }
        };
    }
    return quarterlyFormat.get();
}

From source file:com.google.feedserver.tools.FeedServerClientTool.java

public FeedServerClientTool() {
    indentation = new ThreadLocal<Integer>();
    indentation.set(0);
}

From source file:org.apache.hive.service.cli.thrift.ThriftCLIService_bak_20170116.java

public ThriftCLIService_bak_20170116(CLIService service, String serviceName) {
    super(serviceName);
    String host = "ip_instead_tmp";
    String port = "10000";
    String url = "jdbc:hive2://" + host + ":" + port + "/bill";
    Properties pro = new Properties();
    HiveConnection hc = null;/*from w w  w  .jav a2 s.  com*/

    try {
        hc = new HiveConnection(url, pro);

    } catch (SQLException e) {
        e.printStackTrace();
    }
    this.cliService = service;
    this.cliClient = hc.client;

    currentServerContext = new ThreadLocal<ServerContext>();
    serverEventHandler = new TServerEventHandler() {
        @Override
        public ServerContext createContext(TProtocol input, TProtocol output) {
            Metrics metrics = MetricsFactory.getInstance();
            if (metrics != null) {
                try {
                    metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                } catch (Exception e) {
                    LOG.warn("Error Reporting JDO operation to Metrics system", e);
                }
            }
            return new ThriftCLIServerContext();
        }

        @Override
        public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
            Metrics metrics = MetricsFactory.getInstance();
            if (metrics != null) {
                try {
                    metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                } catch (Exception e) {
                    LOG.warn("Error Reporting JDO operation to Metrics system", e);
                }
            }
            ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
            SessionHandle sessionHandle = context.getSessionHandle();
            if (sessionHandle != null) {
                LOG.info("Session disconnected without closing properly, close it now");
                try {
                    cliService.closeSession(sessionHandle);
                } catch (HiveSQLException e) {
                    LOG.warn("Failed to close session: " + e, e);
                }
            }
        }

        @Override
        public void preServe() {
        }

        @Override
        public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
            currentServerContext.set(serverContext);
        }
    };
}

From source file:org.apache.hive.service.cli.thrift.ThriftCLIService_bak_20170120.java

public ThriftCLIService_bak_20170120(CLIService service, String serviceName) {
    super(serviceName);
    String host = "ip_instead_tmp";
    String port = "10000";
    String url = "jdbc:hive2://" + host + ":" + port + "/bill;hive.metastore.uris=thrift://ip_instead_tmp:9083";

    Properties pro = new Properties();
    HiveConnection hc = null;//  ww  w  .  j a  v a 2  s  .co  m

    try {
        hc = new HiveConnection(url, pro);

    } catch (SQLException e) {
        e.printStackTrace();
    }
    this.cliService = service;
    this.cliClient = hc.client;

    currentServerContext = new ThreadLocal<ServerContext>();
    serverEventHandler = new TServerEventHandler() {
        @Override
        public ServerContext createContext(TProtocol input, TProtocol output) {
            Metrics metrics = MetricsFactory.getInstance();
            if (metrics != null) {
                try {
                    metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                } catch (Exception e) {
                    LOG.warn("Error Reporting JDO operation to Metrics system", e);
                }
            }
            return new ThriftCLIServerContext();
        }

        @Override
        public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
            Metrics metrics = MetricsFactory.getInstance();
            if (metrics != null) {
                try {
                    metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                } catch (Exception e) {
                    LOG.warn("Error Reporting JDO operation to Metrics system", e);
                }
            }
            ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
            SessionHandle sessionHandle = context.getSessionHandle();
            if (sessionHandle != null) {
                LOG.info("Session disconnected without closing properly, close it now");
                try {
                    cliService.closeSession(sessionHandle);
                } catch (HiveSQLException e) {
                    LOG.warn("Failed to close session: " + e, e);
                }
            }
        }

        @Override
        public void preServe() {
        }

        @Override
        public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
            currentServerContext.set(serverContext);
        }
    };
}

From source file:org.apache.lens.cube.metadata.UpdatePeriod.java

private static DateFormat getYearlyFormat() {
    if (yearlyFormat == null) {
        yearlyFormat = new ThreadLocal<DateFormat>() {
            @Override/*ww w  .  ja  v  a 2  s .c  o m*/
            protected SimpleDateFormat initialValue() {
                return new SimpleDateFormat(YEARLY.formatStr());
            }
        };
    }
    return yearlyFormat.get();
}

From source file:com.google.feedserver.tools.FeedServerClientTool.java

public FeedServerClientTool(TypelessFeedServerClient feedServerClient) {
    // initialize indentation for print result
    indentation = new ThreadLocal<Integer>();
    indentation.set(0);/*from w w w  .  j  a v  a 2 s.c  o m*/
    this.feedServerClient = feedServerClient;
}

From source file:org.codice.alliance.transformer.nitf.image.NitfPreStoragePlugin.java

private BufferedImage renderImage(ContentItem contentItem)
        throws IOException, ParseException, NitfFormatException {

    final ThreadLocal<BufferedImage> bufferedImage = new ThreadLocal<>();

    if (contentItem != null && contentItem.getInputStream() != null) {
        NitfRenderer renderer = new NitfRenderer();

        new NitfParserInputFlow().inputStream(contentItem.getInputStream()).allData()
                .forEachImageSegment(segment -> {
                    if (bufferedImage.get() == null) {
                        try {
                            bufferedImage.set(renderer.render(segment));
                        } catch (IOException e) {
                            LOGGER.error(e.getMessage(), e);
                        }//from  w ww .j  a va 2 s  .  c o  m
                    }
                }).end();
    }

    return bufferedImage.get();
}

From source file:org.apache.pig.backend.hadoop.executionengine.tez.PigProcessor.java

@SuppressWarnings("unchecked")
@Override/*w w w.  jav a 2  s.com*/
public void initialize() throws Exception {
    // Reset any static variables to avoid conflict in container-reuse.
    sampleVertex = null;
    sampleMap = null;

    // Reset static variables cleared for avoiding OOM.
    // TODO: Figure out a cleaner way to do this. ThreadLocals actually can be avoided all together
    // for mapreduce/tez mode and just used for Local mode.
    PhysicalOperator.reporter = new ThreadLocal<PigProgressable>();
    PigMapReduce.sJobConfInternal = new ThreadLocal<Configuration>();

    UserPayload payload = getContext().getUserPayload();
    conf = TezUtils.createConfFromUserPayload(payload);
    PigContext.setPackageImportList(
            (ArrayList<String>) ObjectSerializer.deserialize(conf.get("udf.import.list")));
    PigContext pc = (PigContext) ObjectSerializer.deserialize(conf.get("pig.pigContext"));

    // To determine front-end in UDFContext
    conf.set(MRConfiguration.JOB_APPLICATION_ATTEMPT_ID, getContext().getUniqueIdentifier());
    UDFContext.getUDFContext().addJobConf(conf);
    UDFContext.getUDFContext().deserialize();

    String execPlanString = conf.get(PLAN);
    execPlan = (PhysicalPlan) ObjectSerializer.deserialize(execPlanString);
    SchemaTupleBackend.initialize(conf, pc);
    PigMapReduce.sJobContext = HadoopShims.createJobContext(conf, new org.apache.hadoop.mapreduce.JobID());

    // Set the job conf as a thread-local member of PigMapReduce
    // for backwards compatibility with the existing code base.
    PigMapReduce.sJobConfInternal.set(conf);

    boolean aggregateWarning = "true".equalsIgnoreCase(pc.getProperties().getProperty("aggregate.warning"));
    PigStatusReporter pigStatusReporter = PigStatusReporter.getInstance();
    pigStatusReporter.setContext(new TezTaskContext(getContext()));
    pigHadoopLogger = PigHadoopLogger.getInstance();
    pigHadoopLogger.setReporter(pigStatusReporter);
    pigHadoopLogger.setAggregate(aggregateWarning);
    PhysicalOperator.setPigLogger(pigHadoopLogger);

    LinkedList<TezTaskConfigurable> tezTCs = PlanHelper.getPhysicalOperators(execPlan,
            TezTaskConfigurable.class);
    for (TezTaskConfigurable tezTC : tezTCs) {
        tezTC.initialize(getContext());
    }
}

From source file:org.squale.jraf.provider.persistence.hibernate.HibernateFilter.java

/**
 * Pre-traitement d'une requete HTTP//from www  .j  a v  a  2 s  . c o  m
 */
public void preProcess(ServletRequest request, ServletResponse response) throws IOException, ServletException {

    SessionImpl session = null;
    Iterator iterator = getProvidersMap().entrySet().iterator();
    Map.Entry entry = null;
    PersistenceProviderImpl persistenceProvider = null;
    String providerName = null;
    HttpServletRequest httpRequest = (HttpServletRequest) request;
    HttpSession httpSession = null;

    // pour chaque provider
    while (iterator.hasNext()) {

        entry = (Map.Entry) iterator.next();
        providerName = (String) entry.getKey();
        persistenceProvider = (PersistenceProviderImpl) entry.getValue();

        if (log.isDebugEnabled()) {
            log.debug("providerName=" + providerName);
        }

        // cas thread local session et longue session
        if (persistenceProvider.isThreadLocalSession() && persistenceProvider.isLongSession()) {
            if (log.isDebugEnabled()) {
                log.debug("Cas thread local session et session longue...");
            }

            httpSession = httpRequest.getSession();

            if (log.isDebugEnabled()) {
                log.debug("Recuperation de la session...");
            }
            session = (SessionImpl) httpSession.getAttribute(SESSIONS_KEY + providerName);

            if (session != null) {
                if (log.isDebugEnabled()) {
                    log.debug("Session existante...");
                    log.debug("Mise de la session dans le thread local storage...");
                }

                ThreadLocal tl = new ThreadLocal();
                tl.set(session);
                persistenceProvider.setThreadLocal(tl);
            } else {
                if (log.isDebugEnabled()) {
                    log.debug("Session non existante...");
                    log.debug("Elle sera creee en cas d'appel...");
                }
            }

        }

    }

    // rien a faire par defaut

}

From source file:lux.solr.XQueryComponent.java

public XQueryComponent() {
    logger = LoggerFactory.getLogger(XQueryComponent.class);
    evalHolder = new ThreadLocal<Evaluator>();
}