Example usage for java.util.logging Logger log

List of usage examples for java.util.logging Logger log

Introduction

In this page you can find the example usage for java.util.logging Logger log.

Prototype

public void log(Level level, Throwable thrown, Supplier<String> msgSupplier) 

Source Link

Document

Log a lazily constructed message, with associated Throwable information.

Usage

From source file:edu.emory.cci.aiw.i2b2etl.dest.I2b2QueryResultsHandler.java

private void truncateTable(Connection conn, String tableName) throws SQLException {
    Logger logger = I2b2ETLUtil.logger();
    String queryId = query.getName();
    String sql = "TRUNCATE TABLE " + tableName;
    if (logger.isLoggable(Level.FINE)) {
        logger.log(Level.FINE, "Executing the following SQL for query {0}: {1}", new Object[] { queryId, sql });
    }/*from w w  w . j  a v a  2  s  .  c  om*/
    try (final Statement st = conn.createStatement()) {
        st.execute(sql);
        logger.log(Level.FINE, "Done executing SQL for query {0}", queryId);
    } catch (SQLException ex) {
        logger.log(Level.SEVERE, "An error occurred truncating the tables for query " + queryId, ex);
        throw ex;
    }
}

From source file:org.archive.crawler.frontier.AbstractFrontier.java

/**
 * Take note of any processor-local errors that have been entered into the
 * CrawlURI./*  w  w w .ja va  2  s. c  o m*/
 * 
 * <br><br>
 * 
 * ?
 * 
 * @param curi
 *  
 */
protected void logNonfatalErrors(CrawlURI curi) {
    if (curi.containsDataKey(A_NONFATAL_ERRORS)) {
        Collection<Throwable> x = curi.getNonFatalFailures();
        Logger le = loggerModule.getNonfatalErrors();
        for (Throwable e : x) {
            le.log(Level.WARNING, curi.toString(), new Object[] { curi, e });
        }
        // once logged, discard
        curi.getData().remove(A_NONFATAL_ERRORS);
    }
}

From source file:net.openhft.chronicle.logger.jul.JulTestBase.java

protected void testBinaryAppender(String testId, Chronicle chronicle) throws IOException {

    final String threadId = "thread-" + Thread.currentThread().getId();
    final long timestamp = System.currentTimeMillis();

    setupLogManager(testId);// w  w w.  ja  v  a  2 s.c o m
    final Logger logger = Logger.getLogger(testId);

    for (ChronicleLogLevel level : LOG_LEVELS) {
        log(logger, level, "level is {0}", level);
    }

    ExcerptTailer tailer = chronicle.createTailer().toStart();
    ChronicleLogEvent evt = null;

    for (ChronicleLogLevel level : LOG_LEVELS) {
        assertTrue(tailer.nextIndex());

        evt = ChronicleLogHelper.decodeBinary(tailer);
        assertNotNull(evt);
        assertEquals(evt.getVersion(), ChronicleLog.VERSION);
        assertTrue(evt.getTimeStamp() >= timestamp);
        assertEquals(level, evt.getLevel());
        assertEquals(threadId, evt.getThreadName());
        assertEquals(testId, evt.getLoggerName());
        assertEquals("level is {0}", evt.getMessage());
        assertNotNull(evt.getArgumentArray());
        assertEquals(1, evt.getArgumentArray().length);
        assertEquals(level, evt.getArgumentArray()[0]);
        assertNull(evt.getThrowable());

        tailer.finish();
    }

    logger.log(Level.FINE, "Throwable test 1", new UnsupportedOperationException());
    logger.log(Level.FINE, "Throwable test 2", new UnsupportedOperationException("Exception message"));

    assertTrue(tailer.nextIndex());
    evt = ChronicleLogHelper.decodeBinary(tailer);
    assertEquals("Throwable test 1", evt.getMessage());
    assertNotNull(evt.getThrowable());
    assertTrue(evt.getThrowable() instanceof UnsupportedOperationException);
    assertNull(evt.getThrowable().getMessage());

    assertTrue(tailer.nextIndex());
    evt = ChronicleLogHelper.decodeBinary(tailer);
    assertEquals("Throwable test 2", evt.getMessage());
    assertNotNull(evt.getThrowable());
    assertTrue(evt.getThrowable() instanceof UnsupportedOperationException);
    assertEquals("Exception message", evt.getThrowable().getMessage());

    tailer.close();

    chronicle.close();
    chronicle.clear();
}

From source file:net.daboross.bukkitdev.skywars.world.WorldUnzipper.java

public void doWorldUnzip(Logger logger) throws StartupFailedException {
    Validate.notNull(logger, "Logger cannot be null");
    Path outputDir = Bukkit.getWorldContainer().toPath().resolve(Statics.BASE_WORLD_NAME);
    if (Files.exists(outputDir)) {
        return;/*from  ww  w  . j a  v  a 2  s  . c  o m*/
    }
    try {
        Files.createDirectories(outputDir);
    } catch (IOException e) {
        throw new StartupFailedException("Couldn't create directory " + outputDir.toAbsolutePath() + ".");
    }

    InputStream fis = WorldUnzipper.class.getResourceAsStream(Statics.ZIP_FILE_PATH);
    if (fis == null) {
        throw new StartupFailedException("Couldn't get resource.\nError creating world. Please delete "
                + Statics.BASE_WORLD_NAME + " and restart server.");
    }
    try {
        try (ZipInputStream zis = new ZipInputStream(fis)) {
            ZipEntry ze = zis.getNextEntry();
            while (ze != null) {
                String fileName = ze.getName();
                Path newFile = outputDir.resolve(fileName);
                Path parent = newFile.getParent();
                if (parent != null) {
                    Files.createDirectories(parent);
                }
                if (ze.isDirectory()) {
                    logger.log(Level.FINER, "Making dir {0}", newFile);
                    Files.createDirectories(newFile);
                } else if (Files.exists(newFile)) {
                    logger.log(Level.FINER, "Already exists {0}", newFile);
                } else {
                    logger.log(Level.FINER, "Copying {0}", newFile);
                    try (FileOutputStream fos = new FileOutputStream(newFile.toFile())) {
                        try {
                            int next;
                            while ((next = zis.read()) != -1) {
                                fos.write(next);
                            }
                            fos.flush();
                        } catch (IOException ex) {
                            logger.log(Level.WARNING, "Error copying file from zip", ex);
                            throw new StartupFailedException("Error creating world. Please delete "
                                    + Statics.BASE_WORLD_NAME + " and restart server.");
                        }
                        fos.close();
                    }
                }
                try {
                    ze = zis.getNextEntry();
                } catch (IOException ex) {
                    throw new StartupFailedException(
                            "Error getting next zip entry\nError creating world. Please delete "
                                    + Statics.BASE_WORLD_NAME + " and restart server.",
                            ex);
                }
            }
        }
    } catch (IOException | RuntimeException ex) {
        throw new StartupFailedException(
                "\nError unzipping world. Please delete " + Statics.BASE_WORLD_NAME + " and restart server.",
                ex);
    }
}

From source file:org.protempa.backend.dsb.relationaldb.EventResultProcessor.java

@Override
public void process(ResultSet resultSet) throws SQLException {
    ResultCache<Event> results = getResults();
    EntitySpec entitySpec = getEntitySpec();
    String entitySpecName = entitySpec.getName();
    //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0;
    String[] propIds = entitySpec.getPropositionIds();
    ColumnSpec codeSpec = entitySpec.getCodeSpec();
    if (codeSpec != null) {
        List<ColumnSpec> codeSpecL = codeSpec.asList();
        codeSpec = codeSpecL.get(codeSpecL.size() - 1);
    }//from  w w w  .  j av a2 s  .  c  om
    Logger logger = SQLGenUtil.logger();
    PropertySpec[] propertySpecs = entitySpec.getPropertySpecs();
    Value[] propertyValues = new Value[propertySpecs.length];
    int count = 0;
    ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
    int[] columnTypes = new int[resultSetMetaData.getColumnCount()];
    for (int i = 0; i < columnTypes.length; i++) {
        columnTypes[i] = resultSetMetaData.getColumnType(i + 1);
    }
    String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length];
    SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId());
    JDBCPositionFormat positionParser = entitySpec.getPositionParser();

    while (resultSet.next()) {
        int i = 1;
        String keyId = resultSet.getString(i++);
        if (keyId == null) {
            logger.warning("A keyId is null. Skipping record.");
            continue;
        }

        i = readUniqueIds(uniqueIds, resultSet, i);
        if (Arrays.contains(uniqueIds, null)) {
            if (logger.isLoggable(Level.WARNING)) {
                logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.",
                        StringUtils.join(uniqueIds, ", "));
                continue;
            }
        }
        UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds);

        String propId = null;
        if (!isCasePresent()) {
            if (codeSpec == null) {
                assert propIds.length == 1 : "Don't know which proposition id to assign to";
                propId = propIds[0];
            } else {
                String code = resultSet.getString(i++);
                propId = sqlCodeToPropositionId(codeSpec, code);
                if (propId == null) {
                    continue;
                }
            }
        } else {
            i++;
        }

        ColumnSpec finishTimeSpec = entitySpec.getFinishTimeSpec();
        Granularity gran = entitySpec.getGranularity();
        Interval interval = null;
        if (finishTimeSpec == null) {
            Long d = null;
            try {
                d = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
                i++;
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse timestamp. Leaving the start time/timestamp unset.",
                        e);
            }
            interval = intervalFactory.getInstance(d, gran);
        } else {
            Long start = null;
            try {
                start = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse start time. Leaving the start time/timestamp unset.",
                        e);
            } finally {
                i++;
            }
            Long finish = null;
            try {
                finish = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse start time. Leaving the finish time unset.", e);
            } finally {
                i++;
            }
            if (finish != null && start != null && finish.compareTo(start) < 0) {
                logger.log(Level.WARNING, "Finish {0} is before start {1}: Leaving time unset",
                        new Object[] { finish, start });
                interval = intervalFactory.getInstance(null, gran, null, gran);
            } else {
                interval = intervalFactory.getInstance(start, gran, finish, gran);
            }
        }

        i = extractPropertyValues(resultSet, i, propertyValues, columnTypes);

        if (isCasePresent()) {
            propId = resultSet.getString(i++);
        }

        Event event = new Event(propId, uniqueId);
        event.setSourceSystem(dsType);
        event.setInterval(interval);
        for (int j = 0; j < propertySpecs.length; j++) {
            PropertySpec propertySpec = propertySpecs[j];
            event.setProperty(propertySpec.getName(), propertyValues[j]);
        }
        logger.log(Level.FINEST, "Created event {0}", event);
        results.add(keyId, event);
        if (++count % FLUSH_SIZE == 0) {
            try {
                results.flush(this);
            } catch (IOException ex) {
                throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
            }
            if (logger.isLoggable(Level.FINE)) {
                Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records");
            }
        }
    }
    try {
        results.flush(this);
    } catch (IOException ex) {
        throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
    }
    if (logger.isLoggable(Level.FINE)) {
        Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total",
                "Retrieved {0} records total");
    }
}

From source file:jp.ikedam.jenkins.plugins.ldap_sasl.SearchUserDnResolver.java

/**
 * Resolve the user DN by querying the LDAP directory.
 * /*  w  ww. j a  v  a 2  s  .c o  m*/
 * @param ctx LDAP context, already authenticated.
 * @param username the username the user authenticated with.
 * 
 * @return the DN of the user.
 * @see jp.ikedam.jenkins.plugins.ldap_sasl.UserDnResolver#getUserDn(javax.naming.ldap.LdapContext, java.lang.String)
 */
@Override
public String getUserDn(LdapContext ctx, String username) {
    Logger logger = getLogger();
    if (StringUtils.isBlank(getSearchQueryTemplate())) {
        // not configured.
        logger.severe("Not configured.");

        return null;
    }

    try {
        SearchControls searchControls = new SearchControls();
        searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);
        logger.fine(String.format("Searching users base=%s, username=%s", getSearchBase(), username));
        String query = expandUsername(getSearchQueryTemplate(), username);
        NamingEnumeration<SearchResult> entries = ctx.search((getSearchBase() != null) ? getSearchBase() : "",
                query, searchControls);
        if (!entries.hasMore()) {
            // no entry.
            logger.severe(String.format("User not found: %s", username));
            return null;
        }

        String userDn = entries.next().getNameInNamespace();

        if (entries.hasMore()) {
            // more than one entry.
            logger.severe(String.format("User found more than one: %s", username));
            return null;
        }
        entries.close();

        return userDn;
    } catch (NamingException e) {
        logger.log(Level.SEVERE, "Failed to search a user", e);
        return null;
    }
}

From source file:alma.acs.nc.sm.generic.AcsScxmlEngine.java

/**
 * @param scxmlFileName The qualified xml file name, e.g. "/alma/acs/nc/sm/EventSubscriberStates.xml",
 *                      in the form that {@link Class#getResource(String)} can use to load the scxml
 *                      definition file from the classpath. 
 * @param logger//from   w w w  .j a v  a  2  s .co  m
 * @param actionDispatcher 
 * @param signalType enum class, needed to convert signal names to enum values.
 * @throws IllegalArgumentException if any of the args are <code>null</code> or if the <code>actionDispatcher</code>
 *                                  is not complete for all possible actions.
 */
public AcsScxmlEngine(String scxmlFileName, Logger logger, AcsScxmlActionDispatcher<A> actionDispatcher,
        Class<S> signalType) {

    this.logger = logger;
    this.actionDispatcher = actionDispatcher;
    this.signalType = signalType;

    // TODO decide if we want to insist here, or let the user check this beforehand
    if (!actionDispatcher.isActionMappingComplete()) {
        throw new IllegalArgumentException("actionDispatcher is not complete.");
    }

    errorTracer = new Tracer(); // create error tracer
    exprEvaluator = new JexlEvaluator(); // Evaluator evaluator = new ELEvaluator();
    eventDispatcher = new SimpleDispatcher(); // create event dispatcher
    exprContext = new JexlContext(); // set new context

    // Adding AcsScxmlActionDispatcher to the SM root context 
    // so that the generated action classes can get it from there and can delegate action calls.
    exprContext.set(AcsScxmlActionDispatcher.class.getName(), actionDispatcher);

    try {
        // load the scxml model
        loadModel(scxmlFileName);

        startExecution();

    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Failed to load or start the state machine.", ex); // TODO
    }

}

From source file:edu.emory.cci.aiw.i2b2etl.dest.I2b2QueryResultsHandler.java

@Override
public void finish() throws QueryResultsHandlerProcessingException {

    Logger logger = I2b2ETLUtil.logger();
    logger.log(Level.FINE, "Beginning finish for query {0}", this.query.getName());
    String queryId = this.query.getName();

    SQLException exception = null;

    try {//from w w w  .ja v a 2 s . c  om
        if (this.factHandlers != null) {
            for (Iterator<FactHandler> itr = this.factHandlers.iterator(); itr.hasNext();) {
                FactHandler factHandler = itr.next();
                factHandler.close();
                itr.remove();
            }
        }
    } catch (SQLException ex) {
        exception = ex;
    }

    if (this.dataSchemaConnection != null) {
        try {
            this.dataSchemaConnection.close();
            this.dataSchemaConnection = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    if (this.patientDimensionFactory != null) {
        try {
            // persist Patients & Visits.
            this.patientDimensionFactory.close();
            this.patientDimensionFactory = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    logger.log(Level.INFO, "Populating dimensions for query {0}", queryId);

    if (exception == null) {
        try (Connection conn = openDataDatabaseConnection();
                CallableStatement mappingCall = conn
                        .prepareCall("{ call EUREKA.EK_INSERT_PID_MAP_FROMTEMP(?, ?) }")) {
            conn.setAutoCommit(true);
            logger.log(Level.INFO, "Populating patient dimension for query {0}", queryId);
            mappingCall.setString(1, tempPatientMappingTableName());
            mappingCall.setInt(2, UPLOAD_ID);
            mappingCall.execute();
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (this.visitDimensionFactory != null) {
        try {
            this.visitDimensionFactory.close();
            this.visitDimensionFactory = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    if (exception == null) {
        try (Connection conn = openDataDatabaseConnection();
                CallableStatement mappingCall = conn
                        .prepareCall("{ call EUREKA.EK_INSERT_EID_MAP_FROMTEMP(?, ?) }")) {
            conn.setAutoCommit(true);
            mappingCall.setString(1, tempEncounterMappingTableName());
            mappingCall.setInt(2, UPLOAD_ID);
            mappingCall.execute();
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null) {
        try (Connection conn = openDataDatabaseConnection()) {
            try (CallableStatement call = conn.prepareCall("{ call EUREKA.EK_INS_PATIENT_FROMTEMP(?, ?) }")) {
                conn.setAutoCommit(true);
                call.setString(1, tempPatientTableName());
                call.setInt(2, UPLOAD_ID);
                call.execute();
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null) {
        try (Connection conn = openDataDatabaseConnection();
                CallableStatement call = conn.prepareCall("{ call EUREKA.EK_INS_ENC_VISIT_FROMTEMP(?, ?) }")) {
            conn.setAutoCommit(true);
            logger.log(Level.INFO, "Populating visit dimension for query {0}", queryId);
            call.setString(1, tempVisitTableName());
            call.setInt(2, UPLOAD_ID);
            call.execute();
            //commit and rollback are called by the stored procedure.
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (this.providerDimensionFactory != null) {
        try {
            // find Provider root. gather its leaf nodes. persist Providers.
            this.providerDimensionFactory.close();
            this.providerDimensionFactory = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    if (exception == null) {
        try {
            logger.log(Level.INFO, "Populating provider dimension for query {0}", queryId);
            try (Connection conn = openDataDatabaseConnection()) {
                conn.setAutoCommit(true);
                try (CallableStatement call = conn
                        .prepareCall("{ call EUREKA.EK_INS_PROVIDER_FROMTEMP(?, ?) }")) {
                    call.setString(1, tempProviderTableName());
                    call.setInt(2, UPLOAD_ID);
                    call.execute();
                }
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null) {
        try {
            // flush hot concepts out of the tree. persist Concepts.
            logger.log(Level.INFO, "Populating concept dimension for query {0}", this.query.getName());
            new ConceptDimensionLoader(this.conceptDimensionHandler).execute(this.metadata.getAllRoots());
        } catch (SQLException ex) {
            exception = ex;
        }
    }
    if (this.conceptDimensionHandler != null) {
        try {
            this.conceptDimensionHandler.close();
            this.conceptDimensionHandler = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    if (exception == null) {
        try {
            try (Connection conn = openDataDatabaseConnection()) {
                conn.setAutoCommit(true);
                try (CallableStatement call = conn
                        .prepareCall("{ call EUREKA.EK_INS_CONCEPT_FROMTEMP(?, ?) }")) {
                    call.setString(1, tempConceptTableName());
                    call.setInt(2, UPLOAD_ID);
                    call.execute();
                }
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null) {
        try {
            logger.log(Level.INFO, "Populating modifier dimension for query {0}", this.query.getName());
            new ModifierDimensionLoader(this.modifierDimensionHandler)
                    .execute(this.metadata.getModifierRoots());
        } catch (SQLException ex) {
            exception = ex;
        }
    }
    if (this.modifierDimensionHandler != null) {
        try {
            this.modifierDimensionHandler.close();
            this.modifierDimensionHandler = null;
        } catch (SQLException ex) {
            if (exception == null) {
                exception = ex;
            }
        }
    }

    if (exception == null) {
        try (Connection conn = openDataDatabaseConnection()) {
            conn.setAutoCommit(true);
            try (CallableStatement call = conn.prepareCall("{ call EUREKA.EK_INS_MODIFIER_FROMTEMP(?, ?) }")) {
                call.setString(1, tempModifierTableName());
                call.setInt(2, UPLOAD_ID);
                call.execute();
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null) {
        try {
            logger.log(Level.INFO, "Done populating dimensions for query {0}", queryId);

            try (Connection conn = openDataDatabaseConnection()) {
                conn.setAutoCommit(true);
                logger.log(Level.INFO, "Populating observation_fact from temporary table");
                try (CallableStatement call = conn
                        .prepareCall("{ call EUREKA.EK_UPDATE_OBSERVATION_FACT(?, ?, ?, ?) }")) {
                    call.setString(1, tempObservationFactTableName());
                    call.setString(2, tempObservationFactCompleteTableName());
                    call.setLong(3, UPLOAD_ID);
                    call.setLong(4,
                            (this.query.getQueryMode() == QueryMode.UPDATE && this.settings.getMergeOnUpdate())
                                    ? 1
                                    : 0); // appendFlag
                    call.execute();
                }
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    try {
        // re-enable the indexes now that we're done populating the table
        enableObservationFactIndexes();
    } catch (SQLException ex) {
        if (exception == null) {
            exception = ex;
        }
    }

    try {
        // execute post-hook
        executePostHook();
    } catch (SQLException ex) {
        if (exception == null) {
            exception = ex;
        }
    }

    List<String> cFullNames = new ArrayList<>();
    if (exception == null && this.metadataConnectionSpec != null) {
        logger.log(Level.INFO, "Querying TABLE_ACCESS for full names");
        try (Connection conn = openMetadataDatabaseConnection()) {
            try (Statement stmt = conn.createStatement();
                    ResultSet rs = stmt.executeQuery("SELECT DISTINCT C_FULLNAME FROM TABLE_ACCESS")) {
                while (rs.next()) {
                    cFullNames.add(rs.getString(1));
                }
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null && !cFullNames.isEmpty()) {
        for (String cFullName : cFullNames) {
            logger.log(Level.INFO, "Getting number of records loaded for {0}", cFullName);
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_START,
                    getClass(), new Date(), "Count of " + cFullName));
            String countQuery = "SELECT count(*) FROM " + tempObservationFactCompleteTableName() + " obx join "
                    + tempConceptTableName()
                    + " tc ON (obx.concept_cd=tc.concept_cd) WHERE tc.concept_path like '" + cFullName
                    + "%' AND obx.modifier_cd='@' AND obx.patient_num IS NOT NULL AND obx.encounter_num IS NOT NULL";
            int count = -1;
            try (Connection conn = openDataDatabaseConnection();
                    Statement stmt = conn.createStatement();
                    ResultSet rs = stmt.executeQuery(countQuery)) {
                if (rs.next()) {
                    count = rs.getInt(1);
                }
            } catch (SQLException ex) {
                exception = ex;
                break;
            }
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_STOP,
                    getClass(), new Date(), "Count of " + cFullName));
            if (exception == null) {
                logger.log(Level.INFO, "{0} {1} record(s) loaded", new Object[] { count, cFullName });
                fireProtempaEvent(
                        new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                                getClass(), new Date(), "Count of " + cFullName + ": " + count));
            } else {
                fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO,
                        ProtempaEvent.Type.QRH_STEP_RESULT, getClass(), new Date(),
                        "Count of " + cFullName + ": ERROR (" + exception.getMessage() + ")"));
            }
        }
    }

    if (exception == null) {
        logger.log(Level.INFO, "Getting number of patient records loaded");
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_START,
                getClass(), new Date(), "Count of patient records"));
        int count = -1;
        try (Connection conn = openDataDatabaseConnection();
                Statement stmt = conn.createStatement();
                ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tempPatientTableName())) {
            if (rs.next()) {
                count = rs.getInt(1);
            }
        } catch (SQLException ex) {
            exception = ex;
        }
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_STOP,
                getClass(), new Date(), "Count of patient records"));
        if (exception == null) {
            logger.log(Level.INFO, "{0} patient record(s) loaded", count);
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                    getClass(), new Date(), "Count of patient records: " + count));
        } else {
            fireProtempaEvent(
                    new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT, getClass(),
                            new Date(), "Count of patient records: ERROR (" + exception.getMessage() + ")"));
        }
    }

    if (exception == null) {
        logger.log(Level.INFO, "Getting number of visit records loaded");
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_START,
                getClass(), new Date(), "Count of visit records"));
        int count = -1;
        try (Connection conn = openDataDatabaseConnection();
                Statement stmt = conn.createStatement();
                ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tempVisitTableName())) {
            if (rs.next()) {
                count = rs.getInt(1);
            }
        } catch (SQLException ex) {
            exception = ex;
        }
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_STOP,
                getClass(), new Date(), "Count of visit records"));
        if (exception == null) {
            logger.log(Level.INFO, "{0} visit record(s) loaded", count);
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                    getClass(), new Date(), "Count of visit records: " + count));
        } else {
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                    getClass(), new Date(), "Count of visit records: ERROR (" + exception.getMessage() + ")"));
        }
    }

    if (exception == null) {
        logger.log(Level.INFO, "Getting number of provider records loaded");
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_START,
                getClass(), new Date(), "Count of provider records"));
        int count = -1;
        try (Connection conn = openDataDatabaseConnection();
                Statement stmt = conn.createStatement();
                ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tempProviderTableName())) {
            if (rs.next()) {
                count = rs.getInt(1);
            }
        } catch (SQLException ex) {
            exception = ex;
        }
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_STOP,
                getClass(), new Date(), "Count of provider records"));
        if (exception == null) {
            logger.log(Level.INFO, "{0} provider record(s) loaded", count);
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                    getClass(), new Date(), "Count of provider records: " + count));
        } else {
            fireProtempaEvent(
                    new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT, getClass(),
                            new Date(), "Count of provider records: ERROR (" + exception.getMessage() + ")"));
        }
    }

    if (exception == null) {
        logger.log(Level.INFO, "Getting number of concept records loaded");
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_START,
                getClass(), new Date(), "Count of concept records"));
        int count = -1;
        try (Connection conn = openDataDatabaseConnection();
                Statement stmt = conn.createStatement();
                ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tempConceptTableName())) {
            if (rs.next()) {
                count = rs.getInt(1);
            }
        } catch (SQLException ex) {
            exception = ex;
        }
        fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_STOP,
                getClass(), new Date(), "Count of concept records"));
        if (exception == null) {
            logger.log(Level.INFO, "{0} concept record(s) loaded", count);
            fireProtempaEvent(new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT,
                    getClass(), new Date(), "Count of concept records: " + count));
        } else {
            fireProtempaEvent(
                    new ProtempaEvent(ProtempaEvent.Level.INFO, ProtempaEvent.Type.QRH_STEP_RESULT, getClass(),
                            new Date(), "Count of concept records: ERROR (" + exception.getMessage() + ")"));
        }
    }

    if (exception == null) {
        try {
            logger.log(Level.INFO, "Done populating observation fact table for query {0}", queryId);
            if (this.metadataConnectionSpec != null) {
                logger.log(Level.INFO, "Populating metadata tables for query {0}", queryId);
                String tableName = this.settings.getMetaTableName();
                try (MetaTableConceptHandler metaTableHandler = new MetaTableConceptHandler(
                        this.metadataConnectionSpec, tableName)) {
                    MetaTableConceptLoader metaTableConceptLoader = new MetaTableConceptLoader(
                            metaTableHandler);
                    metaTableConceptLoader.execute(this.metadata.getAllRoots());
                    logger.log(Level.INFO, "Done populating metadata tables for query {0}", queryId);
                }
            } else {
                logger.log(Level.INFO, "Skipping metadata tables for query {0}", queryId);
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception == null && this.settings.getManageCTotalNum()) {
        try (Connection conn = openMetadataDatabaseConnection()) {
            conn.setAutoCommit(true);
            try (Statement stmt = conn.createStatement();
                    ResultSet rs = stmt.executeQuery("SELECT DISTINCT C_TABLE_NAME FROM TABLE_ACCESS")) {
                while (rs.next()) {
                    String tableName = rs.getString(1);
                    try (CallableStatement mappingCall = conn
                            .prepareCall("{ call EUREKA.EK_UPDATE_C_TOTALNUM(?) }")) {
                        logger.log(Level.INFO, "Updating C_TOTALNUM for query {0}", this.query.getName());
                        mappingCall.setString(1, tableName);
                        mappingCall.execute();
                        //commit and rollback are called by stored procedure.
                    }
                }
            }
        } catch (SQLException ex) {
            exception = ex;
        }
    }

    if (exception != null) {
        logger.log(Level.SEVERE, "Load into i2b2 failed for query " + queryId, exception);
        throw new QueryResultsHandlerProcessingException("Load into i2b2 failed for query " + queryId,
                exception);
    }
}

From source file:nl.strohalm.cyclos.utils.logging.LoggingHandlerImpl.java

@Override
public void trace(final TraceLogDTO params) {
    final Logger logger = getTraceLogger();
    boolean isError = params.getError() != null;
    final Level detailed = TraceLevel.DETAILED.getLevel();
    final boolean detailedLoggable = logger.isLoggable(detailed);
    boolean logParameters = detailedLoggable;
    Level logLevel;//  w ww  .  j  av a  2s  .  c  om
    if (isError) {
        final Level error = TraceLevel.ERRORS.getLevel();
        final boolean errorLoggable = logger.isLoggable(error);
        logLevel = errorLoggable ? error : null;
    } else {
        if (traceWritesOnly && !params.isHasDatabaseWrites()) {
            return;
        }
        final Level normal = TraceLevel.SIMPLE.getLevel();
        final boolean normalLoggable = logger.isLoggable(normal);
        logLevel = detailedLoggable ? detailed : normalLoggable ? normal : null;
    }
    if (logLevel != null) {
        final String message = buildActionString(params, logParameters);
        try {
            logger.log(logLevel, message, params.getError());
        } catch (final Exception e) {
            System.out.println("Error generating log on " + settingsService.getLogSettings().getTraceFile());
        }
    }

}

From source file:de.jaetzold.philips.hue.HueBridgeComm.java

static List<HueBridge> discover() {
    final Logger log = Logger.getLogger(HueBridge.class.getName());
    final SimpleServiceDiscovery serviceDiscovery = new SimpleServiceDiscovery();
    int attempted = 0;
    int maxAttempts = Math.min(4, Math.max(1, HueBridge.discoveryAttempts));
    Map<String, URL> foundBriges = new HashMap<>();
    // if nothing is found the first time try up to maxAttempts times with increasing timeouts
    while (foundBriges.isEmpty() && attempted < maxAttempts) {
        serviceDiscovery.setSearchMx(1 + attempted);
        serviceDiscovery.setSocketTimeout(500 + attempted * 1500);
        final List<? extends SimpleServiceDiscovery.Response> responses = serviceDiscovery
                .discover(SimpleServiceDiscovery.SEARCH_TARGET_ROOTDEVICE);
        try {/*  www  .j ava2 s  . c  o m*/
            for (SimpleServiceDiscovery.Response response : responses) {
                String urlBase = null;
                final String usn = response.getHeader("USN");
                if (usn != null && usn.matches("uuid:[-\\w]+")) {
                    if (!foundBriges.containsKey(usn)) {
                        final String server = response.getHeader("SERVER");
                        if (server != null && server.contains("IpBridge")) {
                            final String location = response.getHeader("LOCATION");
                            if (location != null && location.endsWith(".xml")) {
                                DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
                                DocumentBuilder db = dbf.newDocumentBuilder();
                                Document doc = db.parse(new URL(location).openStream());
                                final NodeList modelNames = doc.getElementsByTagName("modelName");
                                for (int i = 0; i < modelNames.getLength(); i++) {
                                    final Node item = modelNames.item(i);
                                    if (item.getParentNode().getNodeName().equals("device") && item
                                            .getTextContent().matches("(?i).*philips\\s+hue\\s+bridge.*")) {
                                        final NodeList urlBases = doc.getElementsByTagName("URLBase");
                                        if (urlBases.getLength() > 0) {
                                            urlBase = urlBases.item(0).getTextContent();
                                            break;
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                if (urlBase != null) {
                    foundBriges.put(usn, new URL(urlBase));
                }
            }
        } catch (Exception e) {
            HueBridge.lastDiscoveryException = e;
            log.log(Level.INFO, "Exception when dicovering devices", e);
        }
        attempted++;
    }
    List<HueBridge> result = new ArrayList<>();
    for (Map.Entry<String, URL> entry : foundBriges.entrySet()) {
        final HueBridge bridge = new HueBridge(entry.getValue(), null);
        bridge.UDN = entry.getKey();
        result.add(bridge);
    }
    return result;
}