Example usage for java.io IOException getClass

List of usage examples for java.io IOException getClass

Introduction

In this page you can find the example usage for java.io IOException getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:org.apache.synapse.transport.nhttp.ServerHandler.java

/**
 * Handle IO errors while reading or writing to underlying channels
 * @param conn the connection being processed
 * @param e the exception encountered// w w w .  j  av  a 2 s  .c  om
 */
public void exception(NHttpServerConnection conn, IOException e) {
    String errMsg = "I/O error : " + e.getMessage();

    if (e instanceof ConnectionClosedException
            || (e.getMessage() != null && (e.getMessage().contains("Connection reset by peer")
                    || e.getMessage().contains("forcibly closed")))) {
        if (log.isDebugEnabled()) {
            errMsg = "I/O error (Probably the keepalive connection " + "was closed):" + e.getMessage();
            log.debug(errMsg);
        }
    } else if (e.getMessage() != null) {
        errMsg = e.getMessage().toLowerCase();
        if (errMsg.indexOf("broken") != -1) {
            errMsg = "I/O error (Probably the connection " + "was closed by the remote party):"
                    + e.getMessage();
            log.warn(errMsg);
        } else {
            errMsg = "I/O error: " + e.getMessage();
            log.error(errMsg, e);
        }
        if (metrics != null) {
            metrics.incrementFaultsReceiving();
        }
    } else {
        log.error("Unexpected I/O error: " + e.getClass().getName(), e);
        if (metrics != null) {
            metrics.incrementFaultsReceiving();
        }
        errMsg = "Unexpected I/O error: " + e.getMessage();
    }

    shutdownConnection(conn, true, errMsg);
}

From source file:edu.ku.brc.specify.toycode.BuildSearchIndex.java

/**
 * //from w  ww  .  j a v a  2 s  .c  om
 */
public void index() {
    //    0            1           2              3                4               5      6     7
    // CatalogNumber, CountAmt, StartDate, StationFieldNumber TypeStatusName, FullName, Name, RankID,
    //    8          9            10            11            12          13       14     15       16        17         18           19
    // Latitude1, Longitude1, LocalityName, MaxElevation, MinElevation, FullName, Name, RankID, LastName, FirstName, MiddleInitial, Text1
    //         20              21            22              23           24           25         26          27          28
    //collectionObjectId, DeterminationID, TaxonID, CollectingEventID, LocalityID, GeographyID, AgentID, tx.ParentID, geo.ParentID

    //      0            1              2                3               4           5           6          7               8         9          10        11
    // CatalogNumber, StartDate, StationFieldNumber TypeStatusName, tx.FullName, Latitude1, Longitude1, LocalityName, geo.FullName, LastName, FirstName, MiddleInitial
    //                  0  1   2   3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9   20  1  2   3  4  5  6  7  8
    int[] colToTblId = { 1, 1, 10, 10, 4, 4, 4, 4, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5, 5, 1, 1, 9, 4, 10, 2, 3, 5, 4,
            3 };
    int[] includeCol = { 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0 };

    // Index for ResultSet (which is one more than the array index)
    int idIndex = 20;
    int taxIndex = 23;
    int geoIndex = 26;
    int ceIndex = 24;
    int geoNameIndex = 15;
    int taxNameIndex = 7;
    int collDateIndex = 3;

    int taxParentIndex = 28;
    int geoParentIndex = 29;

    Calendar cal = Calendar.getInstance();

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_30);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM collectionobject");
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        //PreparedStatement pStmt = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]), analyzers[i], true,
                        IndexWriter.MaxFieldLength.LIMITED);
            }

            System.out.println("Total Records: " + totalRecs);

            stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery();
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql);
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            while (rs.next()) {
                String id = rs.getString(idIndex + 1);
                Document doc = new Document();

                doc.add(new Field("id", id.toString(), Field.Store.YES, Field.Index.ANALYZED));

                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);

                int cnt = 0;
                for (int i = 0; i < idIndex; i++) {
                    if (includeCol[i] == 1) {
                        String val = rs.getString(i + 1);
                        if (i == 0) {
                            val = val.replaceFirst("^0+(?!$)", "");
                        }

                        //System.out.println(i+" "+cnt+"  "+md.getColumnName(i+1)+" ["+(StringUtils.isNotEmpty(val) ? val : " ")+"] ");
                        contents.append(StringUtils.isNotEmpty(val) ? val : " ");
                        contents.append('\t');
                        cnt++;
                    }
                }

                indexStr.append(contents);

                Date collDate = rs.getDate(collDateIndex);
                if (collDate != null) {
                    cal.setTime(collDate);
                    String yearStr = Integer.toString(cal.get(Calendar.YEAR));
                    indexStr.append(yearStr);
                    indexStr.append('\t');
                    doc.add(new Field("yr", yearStr, Field.Store.YES, Field.Index.ANALYZED));
                }

                sb.setLength(0);
                for (int i = idIndex; i < colToTblId.length; i++) {
                    if (i > idIndex)
                        sb.append(',');
                    sb.append(String.format("%d=%d", colToTblId[i], rs.getInt(i + 1)));
                }
                doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                //writers[0].addDocument(doc);

                ///////////////////////////////////////////////
                // Catalog Number
                ///////////////////////////////////////////////
                String catNum = rs.getString(1);
                if (StringUtils.isNotEmpty(catNum)) {
                    doc.add(new Field("cn", catNum, Field.Store.YES, Field.Index.ANALYZED));
                }

                ///////////////////////////////////////////////
                // Image Name in Text1
                ///////////////////////////////////////////////
                boolean hasName = false;
                /*try
                {
                int idd = Integer.parseInt(id);
                //pStmt.setInt(1, idd);
                //ResultSet rsp = pStmt.executeQuery();
                ResultSet rsp = stmt3.executeQuery(String.format("SELECT Text1 FROM preparation WHERE CollectionObjectID = %d AND Text1 IS NOT NULL", idd));
                if (rsp.next())
                {
                    String imgName = rsp.getString(1);
                    if (StringUtils.isNotEmpty(imgName))
                    {
                        String nm = FilenameUtils.getName(imgName);
                        doc.add(new Field("im", nm, Field.Store.NO, Field.Index.ANALYZED));
                        contents.append(nm);
                        hasName = true;
                    }
                }
                rsp.close();
                } catch (SQLException e) {e.printStackTrace();}
                */
                if (!hasName) {
                    contents.append(" ");
                }
                contents.append('\t');

                ///////////////////////////////////////////////
                // Collector  (Agent)
                ///////////////////////////////////////////////
                String dataStr = buildStr(rs, sb, 17, 18, 19);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("ag", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Locality 
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 9, 10, 11, 12, 13, 14);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("lc", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[2].addDocument(doc);

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Taxon
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 5, 6);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("tx", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[3].addDocument(doc);

                int taxId = rs.getInt(taxIndex);
                boolean taxOK = !rs.wasNull();
                int taxPId = rs.getInt(taxParentIndex);
                taxOK = taxOK && !rs.wasNull();

                int geoId = rs.getInt(geoIndex);
                boolean geoOK = !rs.wasNull();
                int geoPId = rs.getInt(geoParentIndex);
                geoOK = geoOK && !rs.wasNull();

                int ceId = rs.getInt(ceIndex);
                boolean ceOK = !rs.wasNull();

                if (taxOK) {
                    addHigherTaxa(stmt2, doc, indexStr, taxId, taxPId, rs.getInt(taxNameIndex + 1),
                            rs.getString(taxNameIndex));
                    addAuthor(stmt2, doc, indexStr, taxId);
                }

                if (geoOK) {
                    addCountry(stmt2, doc, indexStr, geoId, geoPId, rs.getInt(geoNameIndex + 1),
                            rs.getString(geoNameIndex));
                }

                if (ceOK) {
                    addHost(stmt2, doc, indexStr, ceId);
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                doc.add(new Field("cs", indexStr.toString(), Field.Store.NO, Field.Index.ANALYZED));
                //doc.add(new Field("contents", contents.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    writers[0].optimize();
                }
            }
            rs.close();

        } catch (SQLException sqlex) {
            sqlex.printStackTrace();

        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("IOException adding Lucene Document: " + e.getMessage());

        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

        }

    } catch (IOException e) {
        e.printStackTrace();

        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (CorruptIndexException e) {
                e.printStackTrace();

            } catch (IOException e) {
                e.printStackTrace();
            }
            writer = null;
        }

        long endTime = System.currentTimeMillis();
        System.out.println("Time: " + (endTime - startTime) / 1000);
    }
}

From source file:org.gege.caldavsyncadapter.syncadapter.SyncAdapter.java

@Override
public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider,
        SyncResult syncResult) {//from  ww w .j  a  va2s.  c o m
    boolean bolError = false;

    String url = mAccountManager.getUserData(account, AuthenticatorActivity.USER_DATA_URL_KEY);
    this.mCountPerformSync += 1;
    Log.v(TAG, "onPerformSync() count:" + String.valueOf(this.mCountPerformSync) + " on " + account.name
            + " with URL " + url);

    CalendarList serverCalList;

    CalendarList androidCalList = new CalendarList(account, provider, CalendarSource.Android, url);
    androidCalList.readCalendarFromClient();
    ArrayList<Uri> notifyList = new ArrayList<Uri>();

    try {
        String Username = "";
        String UserDataVersion = mAccountManager.getUserData(account, AuthenticatorActivity.USER_DATA_VERSION);
        if (UserDataVersion == null) {
            Username = account.name;
        } else {
            Username = mAccountManager.getUserData(account, AuthenticatorActivity.USER_DATA_USERNAME);
        }

        CaldavFacade facade = new CaldavFacade(Username, mAccountManager.getPassword(account), url);
        facade.setAccount(account);
        facade.setProvider(provider);
        facade.setVersion(mVersion);
        serverCalList = facade.getCalendarList(this.getContext());
        //String davProperties = facade.getLastDav();
        Log.i(TAG, String.valueOf(androidCalList.getCalendarList().size()) + " calendars found at android");

        for (DavCalendar serverCalendar : serverCalList.getCalendarList()) {
            Log.i(TAG, "Detected calendar name=" + serverCalendar.getCalendarDisplayName() + " URI="
                    + serverCalendar.getURI());

            Uri androidCalendarUri = serverCalendar.checkAndroidCalendarList(androidCalList, this.getContext());

            // check if the adapter was able to get an existing calendar or create a new one
            if (androidCalendarUri != null) {
                // the provider seems to work correct, reset the counter
                mCountProviderFailed = 0;
                DavCalendar androidCalendar = androidCalList.getCalendarByAndroidUri(androidCalendarUri);

                //if ((FORCE_SYNCHRONIZE) || (androidCalendar.getcTag() == null) || (!androidCalendar.getcTag().equals(serverCalendar.getcTag()))) {
                if ((androidCalendar.getcTag() == null)
                        || (!androidCalendar.getcTag().equals(serverCalendar.getcTag()))) {
                    Log.d(TAG, "CTag has changed, something to synchronise");
                    if (serverCalendar.readCalendarEvents(facade)) {
                        this.synchroniseEvents(androidCalendar, serverCalendar, syncResult.stats, notifyList);

                        Log.d(TAG, "Updating stored CTag");
                        //serverCalendar.updateAndroidCalendar(androidCalendarUri, Calendar.CTAG, serverCalendar.getcTag());
                        androidCalendar.setCTag(serverCalendar.getcTag(), true);
                    } else {
                        Log.d(TAG, "unable to read events from server calendar");
                    }
                } else {
                    Log.d(TAG, "CTag has not changed, nothing to do");

                    /* this is unnecessary. "SkippedEntries" are:
                     * Counter for tracking how many entries, either from the server or the local store, 
                     * were ignored during the sync operation. This could happen if the SyncAdapter detected 
                     * some unparsable data but decided to skip it and move on rather than failing immediately. 
                     */

                    /*long CalendarID = ContentUris.parseId(androidCalendarUri);
                    String selection = "(" + Events.CALENDAR_ID + " = ?)";
                    String[] selectionArgs = new String[] {String.valueOf(CalendarID)}; 
                    Cursor countCursor = provider.query(Events.CONTENT_URI, new String[] {"count(*) AS count"},
                       selection,
                       selectionArgs,
                       null);
                            
                      countCursor.moveToFirst();
                      int count = countCursor.getInt(0);
                      syncResult.stats.numSkippedEntries += count;
                      countCursor.close();*/

                }

                this.checkDirtyAndroidEvents(provider, account, androidCalendarUri, facade,
                        serverCalendar.getURI(), syncResult.stats, notifyList);
            } else {
                // this happens if the data provider failes to get an existing or create a new calendar
                mCountProviderFailed += 1;
                Log.e(TAG, "failed to get an existing or create a new calendar");
                syncResult.stats.numIoExceptions += 1;
                if (mCountProviderFailed >= mCountProviderFailedMax) {
                    // see issue #96
                    NotificationsHelper.signalSyncErrors(this.getContext(),
                            "Caldav sync error (provider failed)",
                            "are you using CyanogenMod in Incognito Mode?");
                } else {
                    NotificationsHelper.signalSyncErrors(this.getContext(),
                            "Caldav sync error (provider failed)",
                            "the provider failed to get an existing or create a new calendar");
                }
                bolError = true;
            }
        }

        if (!bolError) {
            // check whether a calendar is not synced -> delete it at android
            androidCalList.deleteCalendarOnClientSideOnly(this.getContext());
        }

        // notify the ContentResolver
        for (Uri uri : androidCalList.getNotifyList()) {
            this.getContext().getContentResolver().notifyChange(uri, null);
        }
        for (Uri uri : serverCalList.getNotifyList()) {
            this.getContext().getContentResolver().notifyChange(uri, null);
        }
        for (Uri uri : notifyList) {
            this.getContext().getContentResolver().notifyChange(uri, null);
        }

        //Log.i(TAG,"Statistiks for Calendar: " + serverCalendar.getURI().toString());
        //Log.i(TAG,"Statistiks for AndroidCalendar: " + androidCalendar.getAndroidCalendarUri().toString());
        Log.i(TAG, "Entries:                       " + String.valueOf(syncResult.stats.numEntries));
        Log.i(TAG, "Rows inserted:                 " + String.valueOf(syncResult.stats.numInserts));
        Log.i(TAG, "Rows updated:                  " + String.valueOf(syncResult.stats.numUpdates));
        Log.i(TAG, "Rows deleted:                  " + String.valueOf(syncResult.stats.numDeletes));
        Log.i(TAG, "Rows skipped:                  " + String.valueOf(syncResult.stats.numSkippedEntries));
        Log.i(TAG, "Io Exceptions:                 " + String.valueOf(syncResult.stats.numIoExceptions));
        Log.i(TAG, "Parse Exceptions:              " + String.valueOf(syncResult.stats.numParseExceptions));
        Log.i(TAG, "Auth Exceptions:               " + String.valueOf(syncResult.stats.numAuthExceptions));
        Log.i(TAG, "Conflict Detected Exceptions:  "
                + String.valueOf(syncResult.stats.numConflictDetectedExceptions));

        /*} catch (final AuthenticatorException e) {
              syncResult.stats.numParseExceptions++;
              Log.e(TAG, "AuthenticatorException", e);*/
        /*} catch (final OperationCanceledException e) {
            Log.e(TAG, "OperationCanceledExcetpion", e);*/
    } catch (final IOException e) {
        Log.e(TAG, "IOException", e);
        syncResult.stats.numIoExceptions++;
        NotificationsHelper.signalSyncErrors(this.getContext(), "Caldav sync error (IO)", e.getMessage());
        //NotificationsHelper.getCurrentSyncLog().addException(e);
        /*} catch (final AuthenticationException e) {
        //mAccountManager.invalidateAuthToken(Constants.ACCOUNT_TYPE, authtoken);
        syncResult.stats.numAuthExceptions++;
        Log.e(TAG, "AuthenticationException", e);*/
    } catch (final ParseException e) {
        syncResult.stats.numParseExceptions++;
        Log.e(TAG, "ParseException", e);
        NotificationsHelper.signalSyncErrors(this.getContext(), "Caldav sync error (parsing)", e.getMessage());
        //NotificationsHelper.getCurrentSyncLog().addException(e);
        /*} catch (final JSONException e) {
            syncResult.stats.numParseExceptions++;
            Log.e(TAG, "JSONException", e);*/
    } catch (Exception e) {
        Log.e(TAG, "Updating calendar exception " + e.getClass().getName(), e);
        syncResult.stats.numParseExceptions++;
        NotificationsHelper.signalSyncErrors(this.getContext(), "Caldav sync error (general)", e.getMessage());
        //NotificationsHelper.getCurrentSyncLog().addException(e);
        //throw new RuntimeException(e);
    }
}

From source file:edu.ku.brc.specify.tools.webportal.BuildSearchIndex.java

/**
 * /*  w  w w. j  a v  a 2 s . co m*/
 */
public void index() {
    //    0            1           2              3                4               5      6     7
    // CatalogNumber, CountAmt, StartDate, StationFieldNumber TypeStatusName, FullName, Name, RankID,
    //    8          9            10            11            12          13       14     15       16        17         18           19
    // Latitude1, Longitude1, LocalityName, MaxElevation, MinElevation, FullName, Name, RankID, LastName, FirstName, MiddleInitial, Text1
    //         20              21            22              23           24           25         26          27          28
    //collectionObjectId, DeterminationID, TaxonID, CollectingEventID, LocalityID, GeographyID, AgentID, tx.ParentID, geo.ParentID

    //      0            1              2                3               4           5           6          7               8         9          10        11
    // CatalogNumber, StartDate, StationFieldNumber TypeStatusName, tx.FullName, Latitude1, Longitude1, LocalityName, geo.FullName, LastName, FirstName, MiddleInitial
    //                  0  1   2   3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9   20  1  2   3  4  5  6  7  8
    int[] colToTblId = { 1, 1, 10, 10, 4, 4, 4, 4, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5, 5, 1, 1, 9, 4, 10, 2, 3, 5, 4,
            3 };
    int[] includeCol = { 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0 };

    // Index for ResultSet (which is one more than the array index)
    int idIndex = 20;
    int taxIndex = 23;
    int geoIndex = 26;
    int ceIndex = 24;
    int geoNameIndex = 15;
    int taxNameIndex = 7;
    int collDateIndex = 3;

    int taxParentIndex = 28;
    int geoParentIndex = 29;

    Calendar cal = Calendar.getInstance();

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_30);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM collectionobject");
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        //PreparedStatement pStmt = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]), analyzers[i], true,
                        IndexWriter.MaxFieldLength.LIMITED);
            }

            System.out.println("Total Records: " + totalRecs);

            stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery();
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql);
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            while (rs.next()) {
                String id = rs.getString(idIndex + 1);
                Document doc = new Document();

                doc.add(new Field("id", id.toString(), Field.Store.YES, Field.Index.ANALYZED));

                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);

                int cnt = 0;
                for (int i = 0; i < idIndex; i++) {
                    if (includeCol[i] == 1) {
                        String val = rs.getString(i + 1);
                        if (i == 0) {
                            val = val.replaceFirst("^0+(?!$)", "");
                        }

                        //System.out.println(i+" "+cnt+"  "+md.getColumnName(i+1)+" ["+(StringUtils.isNotEmpty(val) ? val : " ")+"] ");
                        contents.append(StringUtils.isNotEmpty(val) ? val : " ");
                        contents.append('\t');
                        cnt++;
                    }
                }

                indexStr.append(contents);

                Date collDate = rs.getDate(collDateIndex);
                if (collDate != null) {
                    cal.setTime(collDate);
                    String yearStr = Integer.toString(cal.get(Calendar.YEAR));
                    indexStr.append(yearStr);
                    indexStr.append('\t');
                    doc.add(new Field("yr", yearStr, Field.Store.YES, Field.Index.ANALYZED));
                }

                sb.setLength(0);
                for (int i = idIndex; i < colToTblId.length; i++) {
                    //if (i>idIndex) sb.append(',');
                    //sb.append(String.format("%d=%d", colToTblId[i], rs.getInt(i+1)));
                    doc.add(new Field(Integer.toString(colToTblId[i]), Integer.toString(rs.getInt(i + 1)),
                            Field.Store.YES, Field.Index.NOT_ANALYZED));
                }
                doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                //writers[0].addDocument(doc);

                ///////////////////////////////////////////////
                // Catalog Number
                ///////////////////////////////////////////////
                String catNum = rs.getString(1);
                if (StringUtils.isNotEmpty(catNum)) {
                    doc.add(new Field("cn", catNum, Field.Store.YES, Field.Index.ANALYZED));
                }

                ///////////////////////////////////////////////
                // Image Name in Text1
                ///////////////////////////////////////////////
                boolean hasName = false;
                /*try
                {
                int idd = Integer.parseInt(id);
                //pStmt.setInt(1, idd);
                //ResultSet rsp = pStmt.executeQuery();
                ResultSet rsp = stmt3.executeQuery(String.format("SELECT Text1 FROM preparation WHERE CollectionObjectID = %d AND Text1 IS NOT NULL", idd));
                if (rsp.next())
                {
                    String imgName = rsp.getString(1);
                    if (StringUtils.isNotEmpty(imgName))
                    {
                        String nm = FilenameUtils.getName(imgName);
                        doc.add(new Field("im", nm, Field.Store.NO, Field.Index.ANALYZED));
                        contents.append(nm);
                        hasName = true;
                    }
                }
                rsp.close();
                } catch (SQLException e) {e.printStackTrace();}
                */
                if (!hasName) {
                    contents.append(" ");
                }
                contents.append('\t');

                ///////////////////////////////////////////////
                // Collector  (Agent)
                ///////////////////////////////////////////////
                String dataStr = buildStr(rs, sb, 17, 18, 19);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("ag", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Locality 
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 9, 10, 11, 12, 13, 14);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("lc", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[2].addDocument(doc);

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Taxon
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 5, 6);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("tx", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[3].addDocument(doc);

                int taxId = rs.getInt(taxIndex);
                boolean taxOK = !rs.wasNull();
                int taxPId = rs.getInt(taxParentIndex);
                taxOK = taxOK && !rs.wasNull();

                int geoId = rs.getInt(geoIndex);
                boolean geoOK = !rs.wasNull();
                int geoPId = rs.getInt(geoParentIndex);
                geoOK = geoOK && !rs.wasNull();

                int ceId = rs.getInt(ceIndex);
                boolean ceOK = !rs.wasNull();

                if (taxOK) {
                    addHigherTaxa(stmt2, doc, indexStr, taxId, taxPId, rs.getInt(taxNameIndex + 1),
                            rs.getString(taxNameIndex));
                    addAuthor(stmt2, doc, indexStr, taxId);
                }

                if (geoOK) {
                    addCountry(stmt2, doc, indexStr, geoId, geoPId, rs.getInt(geoNameIndex + 1),
                            rs.getString(geoNameIndex));
                }

                if (ceOK) {
                    addHost(stmt2, doc, indexStr, ceId);
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                doc.add(new Field("cs", indexStr.toString(), Field.Store.NO, Field.Index.ANALYZED));
                doc.add(new Field("contents", contents.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    writers[0].optimize();
                }
            }
            rs.close();

        } catch (SQLException sqlex) {
            sqlex.printStackTrace();

        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("IOException adding Lucene Document: " + e.getMessage());

        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

        }

    } catch (IOException e) {
        e.printStackTrace();

        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (CorruptIndexException e) {
                e.printStackTrace();

            } catch (IOException e) {
                e.printStackTrace();
            }
            writer = null;
        }

        long endTime = System.currentTimeMillis();
        System.out.println("Time: " + (endTime - startTime) / 1000);
    }
}

From source file:de.decoit.visa.topology.TopologyStorage.java

/**
 * Layout the topology by using the de.decoit.visa.gridlayout.GridLayout
 * class. By default, the 'neato' executable will be used for layouting. If
 * any nodes with fixed positions are detected, the 'fdp' executable will be
 * used to get better results. All existing switches and VMs will be used as
 * nodes, all cables as edges. Already positioned nodes will not be moved.
 *//*from   w ww  .j a va 2  s  .  c o  m*/
public void layoutTopology() {
    try {
        // Layout the subgrids of all component groups

        HashSet<ComponentGroup> processedGroups = new HashSet<>();

        for (Map.Entry<String, ComponentGroup> groupEntry : storage.entrySet()) {
            // Process all groups except the global group 0.0.0.0
            if (!groupEntry.getValue().isGlobalGroup()) {
                // neato is the default layouter
                String command = "neato";

                // Create a new layouter
                GridLayout layout = new GridLayout(groupEntry.getValue().subGridDimensions);

                // Add all components of this group to the layouter
                for (NetworkComponent nc : groupEntry.getValue().componentList) {
                    // Use fdp layouter if there are nodes with fixed
                    // positions
                    if (!layout.addComponent(nc)) {
                        command = "fdp";
                    }
                }

                for (NetworkCable nc : groupEntry.getValue().cables) {
                    layout.addCable(nc);
                }

                for (Map.Entry<String, GroupSwitch> gsEntry : groupEntry.getValue().groupSwitches.entrySet()) {
                    // Use fdp layouter if there are nodes with fixed
                    // positions
                    if (!layout.addGroupSwitch(gsEntry.getValue())) {
                        command = "fdp";
                    }
                }

                // Run the layouter
                layout.run(command);

                // Add the current group to the processed groups set
                processedGroups.add(groupEntry.getValue());
            }
        }

        // Layout the base layer group 0.0.0.0

        // neato is the default layouter
        String command = "neato";

        // Create a new layouter
        GridLayout layout = new GridLayout(TEBackend.getGridDimensions());

        // Add all components to the layouter
        for (NetworkComponent nc : getComponentGroupByName("0.0.0.0").componentList) {
            // Use fdp layouter if there are nodes with fixed positions
            if (!layout.addComponent(nc)) {
                command = "fdp";
            }
        }

        for (NetworkCable nc : getComponentGroupByName("0.0.0.0").cables) {
            if (nc.getLeft().getComponentGroup().equals(nc.getRight().getComponentGroup())) {
                layout.addCable(nc);
            }
        }

        // Add all group objects to the layouter
        for (ComponentGroup cg : processedGroups) {
            // Use fdp layouter if there are nodes with fixed positions
            if (!layout.addComponentGroup(cg)) {
                command = "fdp";
            }
        }

        // Run the layouter
        layout.run(command);
    } catch (IOException ex) {
        StringBuilder sb = new StringBuilder("Caught: [");
        sb.append(ex.getClass().getSimpleName());
        sb.append("] ");
        sb.append(ex.getMessage());
        log.error(sb.toString());

        if (log.isDebugEnabled()) {
            for (StackTraceElement ste : ex.getStackTrace()) {
                log.debug(ste.toString());
            }
        }
    }
}

From source file:org.opendaylight.sfc.bootstrap.SfcProviderBootstrapRestAPITest.java

@Test
public void testReadJsonFiles() {
    String configOriginalPath = "sfc-provider/src/test/resources/SfcProviderConfig/sfc_provider_config_test.json";
    String jsonConfigString = null;
    JSONObject configFile = null;/*from w w w .j a v  a2  s.  c  o m*/
    byte[] encoded = null;

    // create json file. File is slightly changed because original path in bootstrapDataDir does
    // not exists
    Path providerPath = Paths.get(configOriginalPath);

    try {
        encoded = Files.readAllBytes(providerPath);
        jsonConfigString = new String(encoded, StandardCharsets.UTF_8);
    } catch (IOException e) {
        LOG.error("Failed to...", e);
    }

    if (encoded != null) {
        try {
            configFile = new JSONObject(jsonConfigString);
        } catch (JSONException e) {
            LOG.error("Error instantiating {}", jsonConfigString, e);
        }
    }

    if (configFile != null) {
        try {
            configFile = configFile.getJSONObject("bootstrap");
        } catch (JSONException e) {
            LOG.error("Error retrieving bootstrap object", e);
        }
    }

    // first mock returns true when method tries to find json file (is does not exists), second
    // puts created json as a result
    PowerMockito.stub(PowerMockito.method(SfcProviderConfig.class, "readConfigFile")).toReturn(true);
    PowerMockito.stub(PowerMockito.method(SfcProviderConfig.class, "getJsonBootstrapObject"))
            .toReturn(configFile);

    /*
     * Actual test. It checks, if both json files has been read successfully, contain all
     * necessary data and if
     * the rest json file has been created. If so, this file is then PUT to url address location
     * - this step needs running
     * sfc-karaf (or any server for test purposes). It is not running - so method should throw
     * ClientHandlerException.
     * If so, test catch that exception, check it and consider test to pass (all steps like
     * reading json etc. were successful).
     * If some other exception is thrown (or none), test will fail.
     */

    try {
        // SfcProviderBootstrapRestAPI.getPutBootstrapData(new Object[0], new Class[0]);
        SfcProviderBootstrapRestAPI sfcProviderBootstrapRestAPI = new SfcProviderBootstrapRestAPI(new Object[0],
                new Class[0], "param");
        sfcProviderBootstrapRestAPI.putBootstrapData();
    } catch (Exception e) {
        if (e.getClass() == ClientHandlerException.class) {
            assertEquals("Must be equal", e.getClass(), (ClientHandlerException.class));
            assertTrue("Must be true", e.getCause().getMessage().toLowerCase().contains("connection refused"));
        } else
            // test is ok in IDE, build throws null pointer, don't know why
            assertEquals("Must be equal", e.getClass(), (NullPointerException.class));
    }
}

From source file:org.fhaes.fhfilereader.FHFile.java

/**
 * Initializes all properties of a new FHFile.
 *//*from  w  ww.  j  av  a  2 s . co m*/
private void init() {

    log.debug("Initialising file: " + this.getName());
    isInitialised = true;
    isFileValid = false;

    File outputFile = null;
    try {
        outputFile = File.createTempFile("fhaes", ".tmp");
        outputFile.deleteOnExit();
    } catch (IOException e1) {
        e1.printStackTrace();
    }

    File[] inputFileArr = new File[1];
    inputFileArr[0] = this;

    // Parse the legacy data file
    try {
        // Create a new converter
        tricycleReader = new FHX2Reader();
        log.debug("Checking file using DendroFileIO...");

        // TridasEntitiesFromDefaults def = new TridasEntitiesFromDefaults();
        tricycleReader.loadFile(super.getAbsolutePath());
        log.debug("DendroFileIO is happy with file");
        isFileValid = true;
    } catch (IOException e) {
        // Standard IO Exception
        log.info("IO Exception in DendroFileIO...  " + e.getLocalizedMessage());
        errorMessage = "Unable to open file";
        isFileValid = false;

    } catch (InvalidDendroFileException e) {
        // Fatal error interpreting file
        log.info(e.getLocalizedMessage());
        errorMessage = e.getLocalizedMessage();
        isFileValid = false;

        if (e.getPointerType().equals(PointerType.LINE) && e.getPointerNumber() != null) {
            try {
                lineNumberError = Integer.parseInt(e.getPointerNumber());
            } catch (NumberFormatException ex) {
                // Do nothing!
            }
        }
    }

    log.debug("DendroFileIO was happy with file, but let's make sure that FHAES parser is happy too...");

    fhaesReader = new FHX2FileReader(this);
    try {
        FHFileChecker checker = new FHFileChecker();
        isFileValid = checker.doCheck(null, inputFileArr, outputFile, false, true);
        report = checker.getReport();
    } catch (Exception e) {
        log.error("Elena's file checker crashed.  Cannot read file.");
        errorMessage = "Error parsing file using FHAES stage 2 parser.  Unknown error";
        report = "An unhandled error was encountered when checking this file.\nPlease contact the developers for further information. Technical details are as follows:\n\nException type:  "
                + e.getClass().getSimpleName() + "\nError:           " + e.getLocalizedMessage();

        e.printStackTrace();
        return;
    } finally {
        outputFile.delete();
    }

    fhaesReader = new FHX2FileReader(this);

    if (isFileValid) {
        log.debug("File checker is happy with file");
        return;
    } else {
        log.debug("File checker found an error");
        errorMessage = "FHAES stage 2 parser found an error with this file.  See summary tab for more information.";
    }
}

From source file:org.cast.cwm.service.UserSpreadsheetReader.java

/**
 * Read spreadsheet of user information and generate potential users.
 * Returns true if all was sucessful and users could be created as specified.
 * /*from  w  w w. jav a2 s.  co m*/
 * This method does NOT modify the datastore.
 * 
 * @param stream the input stream of CSV data
 * @return true if no errors encountered.
 */
@Override
public boolean readInput(InputStream stream) {
    potentialUsers = new ArrayList<PotentialUserSave>();
    potentialSites = new HashMap<String, Site>();
    potentialPeriods = new HashMap<Site, Map<String, Period>>();

    CSVParser parser;
    try {
        parser = CSVFormat.EXCEL.withHeader().withIgnoreEmptyLines().withIgnoreSurroundingSpaces()
                .parse(new InputStreamReader(new BOMInputStream(stream), "UTF-8"));
    } catch (IOException e) {
        log.error(e.getMessage());
        globalError = e.getMessage();
        return false;
    }

    // Make our own secondary mapping of header names to fields, by
    // lowercasing and removing spaces from all header names
    headerMap = parser.getHeaderMap();
    for (String hdr : new HashSet<String>(headerMap.keySet())) {
        String normalized = hdr.toLowerCase().replaceAll("\\s", "");
        if (!normalized.equals(hdr)) {
            headerMap.put(normalized, headerMap.get(hdr));
        }
    }

    globalError = checkRequiredHeaders(headerMap);
    if (!Strings.isEmpty(globalError))
        return false;

    // Read the CSV file, create PotentialUserSave objects, record error messages, add to potentialUsers List
    try {
        boolean errors = false; // have errors been encountered?
        for (CSVRecord record : parser) {

            try {
                User user = createUserObject(record);
                String messages = populateUserObject(user, record);
                if (Strings.isEmpty(messages))
                    messages = validateUser(user);

                // Add a PotentialUserSave to the list.
                potentialUsers.add(new PotentialUserSave(modelProvider.modelOf(user), messages, record));
                if (!Strings.isEmpty(messages))
                    errors = true;

            } catch (ArrayIndexOutOfBoundsException e) {
                // This can happen if the last row is missing values; Excel doesn't fill them out to the last column
                log.error("Caught exception importing line {}: {}", parser.getCurrentLineNumber(),
                        e.getClass());
                potentialUsers.add(new PotentialUserSave(null, "Data missing from CSV.\n", record));
                errors = true;
            } catch (Exception e) {
                e.printStackTrace();
                log.error("Caught exception importing line {}: {}", parser.getCurrentLineNumber(),
                        e.getClass());
                potentialUsers.add(new PotentialUserSave(null, "Error: " + e, record));
                errors = true;
            }
        }

        // If CSV file has only one line, it is either empty or has unrecognized LF/CR values.
        if (parser.getCurrentLineNumber() == 1) {
            potentialUsers.add(
                    new PotentialUserSave(null, "Empty or Corrupted File.  Note: Save as Windows CSV.", null));
            globalError = "Empty or Corrupted File - LF/CR values may be invalid!";
            throw new CharacterCodingException();
        }
        return (!errors);

    } catch (CharacterCodingException e) {
        log.error("Empty or Corrupted File - only 1 line found - CR/LF issue?. {}", e.getClass());
        return false;
    }

}

From source file:com.clustercontrol.agent.filecheck.FileCheck.java

/**
 * ?<BR>/* w w w.jav  a 2s . com*/
 * 
 */
public void run() {
    m_log.debug("check start. directory=" + m_directory);

    ArrayList<JobFileCheck> kickList = new ArrayList<JobFileCheck>();

    // 1. 
    File directory = new File(m_directory);
    if (!directory.isDirectory()) {
        m_log.warn(m_directory + " is not directory");
        return;
    }
    File[] files = directory.listFiles();
    if (files == null) {
        m_log.warn(m_directory + " does not have a reference permission");
        return;
    }
    ArrayList<File> fileList = new ArrayList<File>();

    for (File file : files) {
        if (!file.isFile()) {
            m_log.debug(file.getName() + " is not file");
            continue;
        }
        fileList.add(file);
    }

    // 2. ??
    ArrayList<String> filenameList = new ArrayList<String>();
    for (File file : fileList) {
        filenameList.add(file.getName());
    }
    for (String filename : fileTimestampCache.keySet()) {
        if (!filenameList.contains(filename)) {
            fileTimestampCache.remove(filename);
            fileTimestampFlagCache.remove(filename);
            fileSizeCache.remove(filename);
            fileSizeFlagCache.remove(filename);
            for (JobFileCheck check : m_jobFileCheckList) {
                if (check.getEventType() == FileCheckConstant.TYPE_DELETE && matchFile(check, filename)) {
                    m_log.info("kickList.add [" + filename + "] (delete)");
                    JobFileCheck kick = getCopy(check);
                    kick.setFileName(filename);
                    kickList.add(kick);
                }
            }
        }
    }

    // 3. ??
    for (File file : fileList) {
        String filename = file.getName();
        Long newTimestamp = file.lastModified();
        Long oldTimestamp = fileTimestampCache.get(filename);
        if (oldTimestamp == null) {
            fileTimestampCache.put(filename, newTimestamp);
            fileTimestampFlagCache.put(filename, false);
            for (JobFileCheck check : m_jobFileCheckList) {
                if (check.getEventType() == FileCheckConstant.TYPE_CREATE && matchFile(check, filename)) {
                    m_log.info("kickList.add [" + filename + "] (create)");
                    JobFileCheck kick = getCopy(check);
                    kick.setFileName(filename);
                    kickList.add(kick);
                }
            }
        } else if (!oldTimestamp.equals(newTimestamp)) {
            m_log.info("timestamp : " + oldTimestamp + "->" + newTimestamp + " (" + filename + ")");
            fileTimestampCache.put(filename, newTimestamp);
            fileTimestampFlagCache.put(filename, true);
        } else {
            if (fileTimestampFlagCache.get(filename) != null && fileTimestampFlagCache.get(filename)) {
                // ?????????
                for (JobFileCheck check : m_jobFileCheckList) {
                    if (check.getEventType() == FileCheckConstant.TYPE_MODIFY
                            && check.getModifyType() == FileCheckConstant.TYPE_MODIFY_TIMESTAMP
                            && matchFile(check, filename)) {
                        m_log.info("kickList.add [" + filename + "] (timestamp)");
                        JobFileCheck kick = getCopy(check);
                        kick.setFileName(filename);
                        kickList.add(kick);
                    }
                }
            }
            fileTimestampFlagCache.put(filename, false);
        }
    }

    // 4. ??
    for (File file : fileList) {
        String filename = file.getName();
        RandomAccessFileWrapper fr = null;
        try {
            fr = new RandomAccessFileWrapper(file, "r");
            Long newSize = fr.length();
            Long oldSize = fileSizeCache.get(filename);
            if (oldSize == null) {
                fileSizeCache.put(filename, newSize);
                fileSizeFlagCache.put(filename, false);
            } else if (!oldSize.equals(newSize)) {
                m_log.info("size : " + oldSize + "->" + newSize + " (" + filename + ")");
                fileSizeCache.put(filename, newSize);
                fileSizeFlagCache.put(filename, true);
            } else {
                if (fileSizeFlagCache.get(filename) != null && fileSizeFlagCache.get(filename)) {
                    // ?????????
                    for (JobFileCheck check : m_jobFileCheckList) {
                        if (check.getEventType() == FileCheckConstant.TYPE_MODIFY
                                && check.getModifyType() == FileCheckConstant.TYPE_MODIFY_FILESIZE
                                && matchFile(check, filename)) {
                            m_log.info("kickList.add [" + filename + "] (filesize)");
                            JobFileCheck kick = getCopy(check);
                            kick.setFileName(filename);
                            kickList.add(kick);
                        }
                    }
                }
                fileSizeFlagCache.put(filename, false);
            }
        } catch (IOException e) {
            m_log.info("run() : IOException: " + e.getMessage());
        } catch (Exception e) {
            m_log.warn("run() : IOException: " + e.getMessage());
        } finally {
            if (fr != null) {
                try {
                    fr.close();
                } catch (final Exception e) {
                    m_log.debug("run() : " + e.getMessage());
                }
            }
        }
    }

    // 1??????
    if (initFlag) {
        initFlag = false;
        return;
    }

    // 5. Job?
    for (JobFileCheck jobFileCheck : kickList) {
        m_log.info("kick " + jobFileCheck.getId());
        String calendarId = jobFileCheck.getCalendarId();
        CalendarInfo calendarInfo = jobFileCheck.getCalendarInfo();
        boolean run = true;
        if (calendarId != null && calendarInfo == null) {
            m_log.info("unknown error : id=" + jobFileCheck.getId() + "calendarId=" + calendarId);
        }
        if (calendarInfo != null) {
            run = CalendarWSUtil.isRun(calendarInfo);
        }

        if (!run) {
            m_log.info("not exec(calendar) : id=" + jobFileCheck.getId() + "calendarId=" + calendarId);
            continue;
        }
        try {
            String sessionId = jobFileCheckResultRetry(jobFileCheck);
            String jobunitId = jobFileCheck.getJobunitId();
            String jobId = jobFileCheck.getJobId();
            m_log.info("jobFileCheckResult sessionId=" + sessionId + ", jobunitId=" + jobunitId + ", jobId="
                    + jobId);
        } catch (Exception e) {
            m_log.warn("run(jobFileCheckResult) : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
        }
    }
}

From source file:com.github.wellcomer.query3.core.Autocomplete.java

/**
    ?.   ?,  TreeSet ?  ? ?. ? TreeSet   .
        /*  w  w w. ja  v a 2 s .c  o  m*/
    @param queryList ?? ?.
    @param scanModifiedOnly ?   ?.
    @param mergePrevious ?? ?  ??  .
*/
public void autolearn(QueryList queryList, boolean scanModifiedOnly, boolean mergePrevious) throws IOException {

    FileTime timestamp;
    long modifiedSince = 0;
    Path timestampFilePath = Paths.get(filePath, ".timestamp");

    if (scanModifiedOnly) { //   
        try { //  ?   ? ?
            timestamp = Files.getLastModifiedTime(timestampFilePath);
            modifiedSince = timestamp.toMillis();
        } catch (IOException e) { //    ?    ? 
            Files.createFile(timestampFilePath);
        }
    }

    HashMap<String, TreeSet<String>> fields = new HashMap<>(); //  - ? ?,  -  ? 
    Iterator<Query> queryIterator = queryList.iterator(modifiedSince); // ? ?? ? ?  ?

    String k, v;

    while (queryIterator.hasNext()) {

        Query query = queryIterator.next();

        for (Map.Entry<String, String> entry : query.entrySet()) {

            k = entry.getKey().toLowerCase();
            v = entry.getValue().trim();

            if (v.length() < 2)
                continue;

            if (!fields.containsKey(k)) {

                TreeSet<String> treeSet = new TreeSet<>();

                try {
                    if (mergePrevious) { // ? ?  
                        List<String> lines = Files.readAllLines(Paths.get(filePath, k), charset);
                        treeSet.addAll(lines);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }

                fields.put(k, treeSet);
            }
            TreeSet<String> treeSet = fields.get(k);
            treeSet.add(v);
        }
    }

    for (Map.Entry<String, TreeSet<String>> entry : fields.entrySet()) {

        k = entry.getKey();
        ArrayList<String> lines = new ArrayList<>(fields.get(k));

        FileWriter fileWriter = new FileWriter(Paths.get(filePath, k).toString());
        fileWriter.write(StringUtils.join(lines, System.getProperty("line.separator")));
        fileWriter.flush();
        fileWriter.close();
    }

    try {
        Files.setLastModifiedTime(timestampFilePath, FileTime.fromMillis(System.currentTimeMillis()));
    } catch (IOException e) {
        if (e.getClass().getSimpleName().equals("NoSuchFileException"))
            Files.createFile(timestampFilePath);
        e.printStackTrace();
    }
}