List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:ddf.catalog.impl.CatalogFrameworkImpl.java
@Override public UpdateResponse update(UpdateStorageRequest streamUpdateRequest) throws IngestException, SourceUnavailableException { validateUpdateStorageRequest(streamUpdateRequest); setFlagsOnRequest(streamUpdateRequest); if (fanoutEnabled) { throw new IngestException(FANOUT_MESSAGE); }/* ww w . ja v a 2 s . co m*/ if (Requests.isLocal(streamUpdateRequest) && (!sourceIsAvailable(catalog) || !storageIsAvailable(storage))) { SourceUnavailableException sourceUnavailableException = new SourceUnavailableException( "Local provider is not available, cannot perform create operation."); if (INGEST_LOGGER.isWarnEnabled()) { INGEST_LOGGER.warn("Error on create operation, local provider not available.", sourceUnavailableException); } throw sourceUnavailableException; } Map<String, Metacard> metacardMap = new HashMap<>(); List<ContentItem> contentItems = new ArrayList<>(streamUpdateRequest.getContentItems().size()); HashMap<String, Path> tmpContentPaths = new HashMap<>(streamUpdateRequest.getContentItems().size()); generateMetacardAndContentItems(streamUpdateRequest, streamUpdateRequest.getContentItems(), metacardMap, contentItems, tmpContentPaths); streamUpdateRequest.getProperties().put(CONTENT_PATHS, tmpContentPaths); UpdateResponse updateResponse; UpdateStorageRequest updateStorageRequest = null; try { if (contentItems.size() > 0) { updateStorageRequest = new UpdateStorageRequestImpl(contentItems, streamUpdateRequest.getId(), streamUpdateRequest.getProperties()); for (final PreUpdateStoragePlugin plugin : frameworkProperties.getPreUpdateStoragePlugins()) { try { updateStorageRequest = plugin.process(updateStorageRequest); } catch (PluginExecutionException e) { LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } UpdateStorageResponse updateStorageResponse; try { updateStorageResponse = storage.update(updateStorageRequest); updateStorageResponse.getProperties().put(CONTENT_PATHS, tmpContentPaths); } catch (StorageException e) { throw new IngestException("Could not store content items. Removed created metacards.", e); } for (final PostUpdateStoragePlugin plugin : frameworkProperties.getPostUpdateStoragePlugins()) { try { updateStorageResponse = plugin.process(updateStorageResponse); } catch (PluginExecutionException e) { LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } for (ContentItem contentItem : updateStorageResponse.getUpdatedContentItems()) { metacardMap.put(contentItem.getId(), contentItem.getMetacard()); } } UpdateRequestImpl updateRequest = new UpdateRequestImpl(Iterables.toArray( metacardMap.values().stream().map(Metacard::getId).collect(Collectors.toList()), String.class), new ArrayList<>(metacardMap.values())); updateRequest.setProperties(streamUpdateRequest.getProperties()); updateResponse = update(updateRequest); } catch (Exception e) { if (updateStorageRequest != null) { try { storage.rollback(updateStorageRequest); } catch (StorageException e1) { LOGGER.error("Unable to remove temporary content for id: " + streamUpdateRequest.getId(), e1); } } throw new IngestException("Unable to store products for request: " + streamUpdateRequest.getId(), e); } finally { if (updateStorageRequest != null) { try { storage.commit(updateStorageRequest); } catch (StorageException e) { LOGGER.error("Unable to commit content changes for id: " + updateStorageRequest.getId(), e); try { storage.rollback(updateStorageRequest); } catch (StorageException e1) { LOGGER.error("Unable to remove temporary content for id: " + updateStorageRequest.getId(), e1); } } } tmpContentPaths.values().stream().forEach(path -> FileUtils.deleteQuietly(path.toFile())); tmpContentPaths.clear(); } return updateResponse; }
From source file:ddf.catalog.impl.CatalogFrameworkImpl.java
@Override public CreateResponse create(CreateStorageRequest streamCreateRequest) throws IngestException, SourceUnavailableException { validateCreateStorageRequest(streamCreateRequest); setFlagsOnRequest(streamCreateRequest); if (fanoutEnabled) { throw new IngestException(FANOUT_MESSAGE); }//from w w w . java 2s . c o m if (Requests.isLocal(streamCreateRequest) && (!sourceIsAvailable(catalog) || !storageIsAvailable(storage))) { SourceUnavailableException sourceUnavailableException = new SourceUnavailableException( "Local provider is not available, cannot perform create operation."); if (INGEST_LOGGER.isWarnEnabled()) { INGEST_LOGGER.warn("Error on create operation, local provider not available.", sourceUnavailableException); } throw sourceUnavailableException; } Map<String, Metacard> metacardMap = new HashMap<>(); List<ContentItem> contentItems = new ArrayList<>(streamCreateRequest.getContentItems().size()); HashMap<String, Path> tmpContentPaths = new HashMap<>(streamCreateRequest.getContentItems().size()); generateMetacardAndContentItems(streamCreateRequest, streamCreateRequest.getContentItems(), metacardMap, contentItems, tmpContentPaths); streamCreateRequest.getProperties().put(CONTENT_PATHS, tmpContentPaths); // Get attributeOverrides, apply them and then remove them from the streamCreateRequest so they are not exposed to plugins Map<String, String> attributeOverrideHeaders = (HashMap<String, String>) streamCreateRequest.getProperties() .get(Constants.ATTRIBUTE_OVERRIDES_KEY); applyAttributeOverridesToMetacardMap(attributeOverrideHeaders, metacardMap); streamCreateRequest.getProperties().remove(Constants.ATTRIBUTE_OVERRIDES_KEY); CreateStorageRequest createStorageRequest = null; CreateResponse createResponse; try { if (contentItems.size() > 0) { createStorageRequest = new CreateStorageRequestImpl(contentItems, streamCreateRequest.getId(), streamCreateRequest.getProperties()); for (final PreCreateStoragePlugin plugin : frameworkProperties.getPreCreateStoragePlugins()) { try { createStorageRequest = plugin.process(createStorageRequest); } catch (PluginExecutionException e) { LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } CreateStorageResponse createStorageResponse; try { createStorageResponse = storage.create(createStorageRequest); createStorageResponse.getProperties().put(CONTENT_PATHS, tmpContentPaths); } catch (StorageException e) { throw new IngestException("Could not store content items.", e); } for (final PostCreateStoragePlugin plugin : frameworkProperties.getPostCreateStoragePlugins()) { try { createStorageResponse = plugin.process(createStorageResponse); } catch (PluginExecutionException e) { LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } for (ContentItem contentItem : createStorageResponse.getCreatedContentItems()) { if (contentItem.getMetacard().getResourceURI() == null) { contentItem.getMetacard() .setAttribute(new AttributeImpl(Metacard.RESOURCE_URI, contentItem.getUri())); contentItem.getMetacard().setAttribute( new AttributeImpl(Metacard.RESOURCE_SIZE, String.valueOf(contentItem.getSize()))); } metacardMap.put(contentItem.getId(), contentItem.getMetacard()); } } CreateRequest createRequest = new CreateRequestImpl(new ArrayList<>(metacardMap.values()), streamCreateRequest.getProperties()); createResponse = create(createRequest); } catch (Exception e) { if (createStorageRequest != null) { try { storage.rollback(createStorageRequest); } catch (StorageException e1) { LOGGER.error("Unable to remove temporary content for id: " + createStorageRequest.getId(), e1); } } throw new IngestException("Unable to store products for request: " + streamCreateRequest.getId(), e); } finally { if (createStorageRequest != null) { try { storage.commit(createStorageRequest); } catch (StorageException e) { LOGGER.error("Unable to commit content changes for id: " + createStorageRequest.getId(), e); try { storage.rollback(createStorageRequest); } catch (StorageException e1) { LOGGER.error("Unable to remove temporary content for id: " + createStorageRequest.getId(), e1); } } } tmpContentPaths.values().stream().forEach(path -> FileUtils.deleteQuietly(path.toFile())); tmpContentPaths.clear(); } return createResponse; }
From source file:de.tudarmstadt.ukp.dariah.IO.DARIAHWriter.java
private void convert(JCas aJCas, PrintWriter aOut) { int paragraphId = 0, sentenceId = 0, tokenId = 0; Map<Token, Collection<NamedEntity>> neCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, NamedEntity.class); Map<Token, Collection<Chunk>> chunksCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, Chunk.class); Map<Token, Collection<Section>> sectionCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, Section.class); Map<Token, Collection<DirectSpeech>> directSpeechCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, DirectSpeech.class); Map<Token, Collection<SemanticPredicate>> predIdx = JCasUtil.indexCovered(aJCas, Token.class, SemanticPredicate.class); Map<SemanticPredicate, Collection<Token>> pred2TokenIdx = JCasUtil.indexCovering(aJCas, SemanticPredicate.class, Token.class); Map<SemanticArgument, Collection<Token>> argIdx = JCasUtil.indexCovered(aJCas, SemanticArgument.class, Token.class); //Coreference Map<Token, Collection<CoreferenceLink>> corefLinksCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, CoreferenceLink.class); HashMap<CoreferenceLink, CoreferenceChain> linkToChainMap = new HashMap<>(); HashMap<CoreferenceChain, Integer> corefChainToIntMap = new HashMap<>(); int corefChainId = 0; for (CoreferenceChain chain : JCasUtil.select(aJCas, CoreferenceChain.class)) { CoreferenceLink link = chain.getFirst(); int count = 0; while (link != null) { linkToChainMap.put(link, chain); link = link.getNext();/*from w w w . j av a2 s. c o m*/ count++; } if (count > 0) { corefChainToIntMap.put(chain, corefChainId); corefChainId++; } } HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>(); Collection<Paragraph> paragraphs = select(aJCas, Paragraph.class); Collection<Sentence> sentences = select(aJCas, Sentence.class); TreeSet<Integer> sentenceEnds = new TreeSet<>(); for (Sentence sentence : sentences) { sentenceEnds.add(sentence.getEnd()); } for (Paragraph paragraph : paragraphs) { sentenceEnds.add(paragraph.getEnd()); } for (Paragraph para : select(aJCas, Paragraph.class)) { for (Sentence sentence : selectCovered(Sentence.class, para)) { // Tokens List<Token> tokens = selectCovered(Token.class, sentence); // Check if we should try to include the morphology in output List<Morpheme> morphologies = selectCovered(Morpheme.class, sentence); boolean useMorphology = tokens.size() == morphologies.size(); // Check if we should try to include the morphology in output List<Hyphenation> hyphenations = selectCovered(Hyphenation.class, sentence); boolean useHyphenation = tokens.size() == hyphenations.size(); //Parsing information String[] parseFragments = null; List<ROOT> root = selectCovered(ROOT.class, sentence); if (root.size() == 1) { PennTreeNode rootNode = PennTreeUtils.convertPennTree(root.get(0)); if ("ROOT".equals(rootNode.getLabel())) { rootNode.setLabel("TOP"); } parseFragments = toPrettyPennTree(rootNode); } boolean useParseFragements = (parseFragments != null && parseFragments.length == tokens.size()); List<SemanticPredicate> preds = selectCovered(SemanticPredicate.class, sentence); for (int i = 0; i < tokens.size(); i++) { Row row = new Row(); row.paragraphId = paragraphId; row.sentenceId = sentenceId; row.tokenId = tokenId; row.token = tokens.get(i); row.args = new SemanticArgument[preds.size()]; if (useParseFragements) { row.parseFragment = parseFragments[i]; } if (useMorphology) { row.morphology = morphologies.get(i); } if (useHyphenation) { row.hyphenation = hyphenations.get(i); } // Section ID Collection<Section> section = sectionCoveringMap.get(row.token); if (section.size() > 0) row.sectionId = section.toArray(new Section[0])[0].getValue(); // Named entities Collection<NamedEntity> ne = neCoveringMap.get(row.token); if (ne.size() > 0) row.ne = ne.toArray(new NamedEntity[0])[0]; // Chunk Collection<Chunk> chunks = chunksCoveringMap.get(row.token); if (chunks.size() > 0) row.chunk = chunks.toArray(new Chunk[0])[0]; //Quote annotation Collection<DirectSpeech> ds = directSpeechCoveringMap.get(row.token); if (ds.size() > 0) row.directSpeech = ds.toArray(new DirectSpeech[0])[0]; //Coref Collection<CoreferenceLink> corefLinks = corefLinksCoveringMap.get(row.token); row.corefChains = UNUSED; if (corefLinks.size() > 0) { String[] chainIds = new String[corefLinks.size()]; // StringBuilder chainIdsStr = new StringBuilder(); int k = 0; for (CoreferenceLink link : corefLinks) { CoreferenceChain chain = linkToChainMap.get(link); int chainId = corefChainToIntMap.get(chain); //chainIds[k++] = chainId; String BIOMarker = "I"; if (link.getCoveredText().substring(0, row.token.getCoveredText().length()) .equals(row.token.getCoveredText())) { BIOMarker = "B"; } chainIds[k++] = BIOMarker + "-" + chainId; } //Sort without the BIO marker Arrays.sort(chainIds, new Comparator<String>() { public int compare(String idx1, String idx2) { Integer id1 = new Integer(idx1.substring(2)); Integer id2 = new Integer(idx2.substring(2)); return Integer.compare(id1, id2); } }); StringBuilder chainIdsStr = new StringBuilder(); for (String chainId : chainIds) { chainIdsStr.append(chainId + ","); } row.corefChains = chainIdsStr.substring(0, chainIdsStr.length() - 1); } //Predicate Collection<SemanticPredicate> predsForToken = predIdx.get(row.token); if (predsForToken != null && !predsForToken.isEmpty()) { row.pred = predsForToken.iterator().next(); } ctokens.put(row.token, row); tokenId++; } // Dependencies for (Dependency rel : selectCovered(Dependency.class, sentence)) { ctokens.get(rel.getDependent()).deprel = rel; } // Semantic arguments for (int p = 0; p < preds.size(); p++) { FSArray args = preds.get(p).getArguments(); //Set the column position info Collection<Token> tokensOfPredicate = pred2TokenIdx.get(preds.get(p)); for (Token t : tokensOfPredicate) { Row row = ctokens.get(t); row.semanticArgIndex = p; } //Set the arguments information for (SemanticArgument arg : select(args, SemanticArgument.class)) { for (Token t : argIdx.get(arg)) { Row row = ctokens.get(t); row.args[p] = arg; } } } sentenceId++; } paragraphId++; } // Write to output file int maxPredArguments = 0; for (Row row : ctokens.values()) { maxPredArguments = Math.max(maxPredArguments, row.args.length); } aOut.printf("%s\n", StringUtils.join(getHeader(maxPredArguments), "\t").trim()); for (Row row : ctokens.values()) { String[] output = getData(ctokens, maxPredArguments, row); aOut.printf("%s\n", StringUtils.join(output, "\t").trim()); } }
From source file:org.apache.geode.internal.cache.Oplog.java
/** * This method is called by the async value recovery task to recover the values from the crf if * the keys were recovered from the krf. *//*from w ww.j a v a 2 s . c o m*/ public void recoverValuesIfNeeded(Map<Long, DiskRecoveryStore> diskRecoveryStores) { // Early out if we start closing the parent. if (getParent().isClosing()) { return; } List<KRFEntry> sortedLiveEntries; HashMap<Long, DiskRegionInfo> targetRegions = new HashMap<Long, DiskRegionInfo>(this.regionMap); synchronized (diskRecoveryStores) { Iterator<DiskRecoveryStore> itr = diskRecoveryStores.values().iterator(); while (itr.hasNext()) { DiskRecoveryStore store = itr.next(); if (isLruValueRecoveryDisabled(store) || store.lruLimitExceeded()) { itr.remove(); } } // Get the a sorted list of live entries from the target regions targetRegions.keySet().retainAll(diskRecoveryStores.keySet()); } sortedLiveEntries = getSortedLiveEntries(targetRegions.values()); if (sortedLiveEntries == null) { // There are no live entries in this oplog to recover. return; } final ByteArrayDataInput in = new ByteArrayDataInput(); for (KRFEntry entry : sortedLiveEntries) { // Early out if we start closing the parent. if (getParent().isClosing()) { return; } DiskEntry diskEntry = entry.getDiskEntry(); DiskRegionView diskRegionView = entry.getDiskRegionView(); long diskRegionId = diskRegionView.getId(); // TODO DAN ok, here's what we need to do // 1) lock and obtain the correct RegionEntry that we are recovering too. // this will likely mean obtaining the correct DiskRecoveryStore, since // with // that we can find the region entry I believe. // 2) Make sure that the lru limit is not exceeded // 3) Update the region entry with the value from disk, assuming the value // from // disk is still valid. That is going to be something like synchronized (diskRecoveryStores) { DiskRecoveryStore diskRecoveryStore = diskRecoveryStores.get(diskRegionId); if (diskRecoveryStore == null) { continue; } // Reset the disk region view because it may have changed // due to the region being created. diskRegionView = diskRecoveryStore.getDiskRegionView(); if (diskRegionView == null) { continue; } if (diskRecoveryStore.lruLimitExceeded()) { diskRecoveryStores.remove(diskRegionId); continue; } if (diskRegionView.isEntriesMapIncompatible()) { // Refetch the disk entry because it may have changed due to copying // an incompatible region map diskEntry = (DiskEntry) diskRecoveryStore.getRegionMap().getEntryInVM(diskEntry.getKey()); if (diskEntry == null) { continue; } } synchronized (diskEntry) { // Make sure the entry hasn't been modified if (diskEntry.getDiskId() != null && diskEntry.getDiskId().getOplogId() == oplogId) { // dear lord, this goes through a lot of layers. Maybe we should // skip some? // * specifically, this could end up faulting in from a different // oplog, causing // us to seek. // * Also, there may be lock ordering issues here, Really, I guess I // want // a flavor of faultInValue that only faults in from this oplog. // * We could have some churn here, opening and closing this oplog // * We also might not be buffering adjacent entries? Not sure about // that one // * Ideally, this would fault the thing in only if it were in this // oplog and the lru limit wasn't hit // and it would return a status if the lru limit was hit to make us // remove the store. try { DiskEntry.Helper.recoverValue(diskEntry, getOplogId(), diskRecoveryStore, in); } catch (RegionDestroyedException ignore) { // This region has been destroyed, stop recovering from it. diskRecoveryStores.remove(diskRegionId); } } } } } }
From source file:im.neon.contacts.ContactsManager.java
/** * List the local contacts.//from w ww. j a v a 2 s. c o m */ public void refreshLocalContactsSnapshot() { boolean isPopulating; synchronized (LOG_TAG) { isPopulating = mIsPopulating; } // test if there is a population is in progress if (isPopulating) { return; } synchronized (LOG_TAG) { mIsPopulating = true; } // refresh the contacts list in background Thread t = new Thread(new Runnable() { public void run() { long t0 = System.currentTimeMillis(); ContentResolver cr = mContext.getContentResolver(); HashMap<String, Contact> dict = new HashMap<>(); // test if the user allows to access to the contact if (isContactBookAccessAllowed()) { // get the names Cursor namesCur = null; try { namesCur = cr.query(ContactsContract.Data.CONTENT_URI, new String[] { ContactsContract.Contacts.DISPLAY_NAME_PRIMARY, ContactsContract.CommonDataKinds.StructuredName.CONTACT_ID, ContactsContract.Contacts.PHOTO_THUMBNAIL_URI }, ContactsContract.Data.MIMETYPE + " = ?", new String[] { ContactsContract.CommonDataKinds.StructuredName.CONTENT_ITEM_TYPE }, null); } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Contact names query Msg=" + e.getMessage()); } if (namesCur != null) { try { while (namesCur.moveToNext()) { String displayName = namesCur.getString( namesCur.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME_PRIMARY)); String contactId = namesCur.getString(namesCur.getColumnIndex( ContactsContract.CommonDataKinds.StructuredName.CONTACT_ID)); String thumbnailUri = namesCur.getString(namesCur.getColumnIndex( ContactsContract.CommonDataKinds.StructuredName.PHOTO_THUMBNAIL_URI)); if (null != contactId) { Contact contact = dict.get(contactId); if (null == contact) { contact = new Contact(contactId); dict.put(contactId, contact); } if (null != displayName) { contact.setDisplayName(displayName); } if (null != thumbnailUri) { contact.setThumbnailUri(thumbnailUri); } } } } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Contact names query2 Msg=" + e.getMessage()); } namesCur.close(); } // get the phonenumbers Cursor phonesCur = null; try { phonesCur = cr.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI, new String[] { ContactsContract.CommonDataKinds.Phone.NUMBER, ContactsContract.CommonDataKinds.Phone.NORMALIZED_NUMBER, ContactsContract.CommonDataKinds.Phone.CONTACT_ID }, null, null, null); } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Phone numbers query Msg=" + e.getMessage()); } if (null != phonesCur) { try { while (phonesCur.moveToNext()) { final String pn = phonesCur.getString( phonesCur.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER)); final String pnE164 = phonesCur.getString(phonesCur .getColumnIndex(ContactsContract.CommonDataKinds.Phone.NORMALIZED_NUMBER)); if (!TextUtils.isEmpty(pn)) { String contactId = phonesCur.getString(phonesCur .getColumnIndex(ContactsContract.CommonDataKinds.Phone.CONTACT_ID)); if (null != contactId) { Contact contact = dict.get(contactId); if (null == contact) { contact = new Contact(contactId); dict.put(contactId, contact); } contact.addPhoneNumber(pn, pnE164); } } } } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Phone numbers query2 Msg=" + e.getMessage()); } phonesCur.close(); } // get the emails Cursor emailsCur = null; try { emailsCur = cr .query(ContactsContract.CommonDataKinds.Email.CONTENT_URI, new String[] { ContactsContract.CommonDataKinds.Email.DATA, // actual email ContactsContract.CommonDataKinds.Email.CONTACT_ID }, null, null, null); } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Emails query Msg=" + e.getMessage()); } if (emailsCur != null) { try { while (emailsCur.moveToNext()) { String email = emailsCur.getString( emailsCur.getColumnIndex(ContactsContract.CommonDataKinds.Email.DATA)); if (!TextUtils.isEmpty(email)) { String contactId = emailsCur.getString(emailsCur .getColumnIndex(ContactsContract.CommonDataKinds.Email.CONTACT_ID)); if (null != contactId) { Contact contact = dict.get(contactId); if (null == contact) { contact = new Contact(contactId); dict.put(contactId, contact); } contact.addEmailAdress(email); } } } } catch (Exception e) { Log.e(LOG_TAG, "## refreshLocalContactsSnapshot(): Exception - Emails query2 Msg=" + e.getMessage()); } emailsCur.close(); } } synchronized (LOG_TAG) { mContactsList = new ArrayList<>(dict.values()); mIsPopulating = false; } if (0 != mContactsList.size()) { long delta = System.currentTimeMillis() - t0; VectorApp.sendGAStats(VectorApp.getInstance(), VectorApp.GOOGLE_ANALYTICS_STATS_CATEGORY, VectorApp.GOOGLE_ANALYTICS_STARTUP_CONTACTS_ACTION, mContactsList.size() + " contacts in " + delta + " ms", delta); } // define the PIDs listener PIDsRetriever.getInstance().setPIDsRetrieverListener(mPIDsRetrieverListener); // trigger a PIDs retrieval // add a network listener to ensure that the PIDS will be retreived asap a valid network will be found. MXSession defaultSession = Matrix.getInstance(VectorApp.getInstance()).getDefaultSession(); if (null != defaultSession) { defaultSession.getNetworkConnectivityReceiver().addEventListener(mNetworkConnectivityReceiver); // reset the PIDs retriever statuses mIsRetrievingPids = false; mArePidsRetrieved = false; // the PIDs retrieval is done on demand. } if (null != mListeners) { Handler handler = new Handler(Looper.getMainLooper()); handler.post(new Runnable() { @Override public void run() { for (ContactsManagerListener listener : mListeners) { try { listener.onRefresh(); } catch (Exception e) { Log.e(LOG_TAG, "refreshLocalContactsSnapshot : onRefresh failed" + e.getMessage()); } } } }); } } }); t.setPriority(Thread.MIN_PRIORITY); t.start(); }
From source file:edu.ku.brc.specify.conversion.ConvertVerifier.java
/** * @param databaseNameSource/*from w w w .j a va 2s .c om*/ * @param databaseNameDest * @throws Exception */ public void verifyDB(final String databaseNameSource, final String databaseNameDest) throws Exception { String path = UIRegistry.getUserHomeDir() + File.separator + "verify"; convLogger.initialize(path, databaseNameDest); String title = "From " + databaseNameSource + " to " + databaseNameDest; System.out.println("************************************************************"); System.out.println(title); System.out.println("************************************************************"); HibernateUtil.shutdown(); Properties initPrefs = BuildSampleDatabase.getInitializePrefs(databaseNameDest); String driverNameSource = ""; String databaseHostSource = ""; DatabaseDriverInfo driverInfoSource = null; String driverNameDest = ""; String databaseHostDest = ""; DatabaseDriverInfo driverInfoDest = null; log.debug("Running an non-custom MySQL convert, using old default login creds"); driverNameSource = initPrefs.getProperty("initializer.driver", "MySQL"); databaseHostSource = initPrefs.getProperty("initializer.host", "localhost"); driverNameDest = initPrefs.getProperty("initializer.driver", "MySQL"); databaseHostDest = initPrefs.getProperty("initializer.host", "localhost"); log.debug("Custom Convert Source Properties ----------------------"); log.debug("databaseNameSource: " + databaseNameSource); log.debug("driverNameSource: " + driverNameSource); log.debug("databaseHostSource: " + databaseHostSource); log.debug("Custom Convert Destination Properties ----------------------"); log.debug("databaseNameDest: " + databaseNameDest); log.debug("driverNameDest: " + driverNameDest); log.debug("databaseHostDest: " + databaseHostDest); driverInfoSource = DatabaseDriverInfo.getDriver(driverNameSource); driverInfoDest = DatabaseDriverInfo.getDriver(driverNameDest); if (driverInfoSource == null) { throw new RuntimeException( "Couldn't find Source DB driver by name [" + driverInfoSource + "] in driver list."); } if (driverInfoDest == null) { throw new RuntimeException( "Couldn't find Destination driver by name [" + driverInfoDest + "] in driver list."); } if (driverNameDest.equals("MySQL")) BasicSQLUtils.myDestinationServerType = BasicSQLUtils.SERVERTYPE.MySQL; else if (driverNameDest.equals("SQLServer")) BasicSQLUtils.myDestinationServerType = BasicSQLUtils.SERVERTYPE.MS_SQLServer; if (driverNameSource.equals("MySQL")) BasicSQLUtils.mySourceServerType = BasicSQLUtils.SERVERTYPE.MySQL; else if (driverNameSource.equals("SQLServer")) BasicSQLUtils.mySourceServerType = BasicSQLUtils.SERVERTYPE.MS_SQLServer; else { log.error("Error setting ServerType for destination database for conversion. Could affect the" + " way that SQL string are generated and executed on differetn DB egnines"); } String destConnectionString = driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest, "", itUsrPwd.first, itUsrPwd.second, driverNameDest); log.debug("attempting login to destination: " + destConnectionString); // This will log us in and return true/false // This will connect without specifying a DB, which allows us to create the DB if (!UIHelper.tryLogin(driverInfoDest.getDriverClassName(), driverInfoDest.getDialectClassName(), databaseNameDest, destConnectionString, itUsrPwd.first, itUsrPwd.second)) { log.error("Failed connection string: " + driverInfoSource.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest, databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest)); throw new RuntimeException( "Couldn't login into [" + databaseNameDest + "] " + DBConnection.getInstance().getErrorMsg()); } convLogger.setIndexTitle(databaseNameDest + " Verify " + (new SimpleDateFormat("yyy-MM-dd hh:mm:ss")).format(Calendar.getInstance().getTime())); //MEG WHY IS THIS COMMENTED OUT??? //DataBuilder.setSession(HibernateUtil.getNewSession()); log.debug("DESTINATION driver class: " + driverInfoDest.getDriverClassName()); log.debug("DESTINATION dialect class: " + driverInfoDest.getDialectClassName()); log.debug("DESTINATION Connection String: " + driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest, databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest)); // This will log us in and return true/false if (!UIHelper.tryLogin(driverInfoDest.getDriverClassName(), driverInfoDest.getDialectClassName(), databaseNameDest, driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest, databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest), itUsrPwd.first, itUsrPwd.second)) { throw new RuntimeException( "Couldn't login into [" + databaseNameDest + "] " + DBConnection.getInstance().getErrorMsg()); } String srcConStr = driverInfoSource.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostSource, databaseNameSource, itUsrPwd.first, itUsrPwd.second, driverNameSource); DBConnection oldDB = DBConnection.createInstance(driverInfoSource.getDriverClassName(), null, databaseNameSource, srcConStr, itUsrPwd.first, itUsrPwd.second); oldDBConn = oldDB.getConnection(); if (oldDBConn == null) { throw new RuntimeException(oldDB.getErrorMsg()); } newDBConn = DBConnection.getInstance().createConnection(); newDBStmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); oldDBStmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); IdMapperMgr.getInstance().setDBs(oldDBConn, newDBConn); long startTime = System.currentTimeMillis(); String[] tableNames = { "CollectingEvent", "CollectingEvent", "Locality", "Locality" }; for (int i = 0; i < tableNames.length; i += 2) { verifyTableCounts(tableNames[i].toLowerCase(), tableNames[i + 1].toLowerCase()); } progressFrame = new ProgressFrame("Checking Catalog Objects...."); progressFrame.adjustProgressFrame(); String cntSQL = compareTo6DBs ? "SELECT COUNT(*) FROM collectionobject" : "SELECT COUNT(*) FROM collectionobjectcatalog WHERE CollectionObjectTypeID > 8 && CollectionObjectTypeID < 20"; Integer numColObjs = BasicSQLUtils.getCount(oldDBConn, cntSQL); progressFrame.setProcess(0, numColObjs); //progressFrame.setDesc("Checking Catalog Objects...."); progressFrame.setOverall(0, numColObjs * 4); progressFrame.setOverall(0); progressFrame.setDesc(""); UIHelper.centerAndShow(progressFrame); SwingUtilities.invokeLater(new Runnable() { public void run() { UIHelper.centerAndShow(progressFrame); } }); HashMap<Integer, TableWriter> tblWriterHash = new HashMap<Integer, TableWriter>(); for (int i = 1; i < labels.length - 1; i++) { tblWriter = convLogger.getWriter(labels[i] + ".html", labels[i]); //printVerifyHeader(labels[i]); tblWriter.startTable(); tblWriter.logHdr("ID", "Desc"); tblWriterHash.put(codes[i], tblWriter); System.out.println(codes[i] + " - " + labels[i]); } boolean nullCEOk = false; File ceFile = new File(databaseNameDest + ".ce_all"); if (ceFile.exists()) { nullCEOk = true; //ceFile.delete(); } nullCEOk = true; // For Debug coOptions = DO_CO_ALL; //if (coOptions > NO_OPTIONS) { int i = 0; Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); String sql = compareTo6DBs ? "SELECT CatalogNumber FROM collectionobject ORDER BY CatalogNumber ASC" : "SELECT CatalogNumber FROM collectionobjectcatalog WHERE CollectionObjectTypeID > 8 && CollectionObjectTypeID < 20 AND SubNumber >= 0 ORDER BY CatalogNumber ASC"; ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { int oldCatNum = rs.getInt(1); String newCatNum = convertCatNum(oldCatNum); //if (oldCatNum < 1643) continue; if (isCOOn(DO_CO_DETERMINER)) { tblWriter = tblWriterHash.get(DO_CO_DETERMINER); if (!verifyDeterminer(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_DETERMINER]++; } } if (isCOOn(DO_CO_CATLOGER)) { tblWriter = tblWriterHash.get(DO_CO_CATLOGER); if (!verifyCataloger(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_CATLOGER]++; } } if (isCOOn(DO_CO_COLLECTORS)) { tblWriter = tblWriterHash.get(DO_CO_COLLECTORS); if (!verifyCollector(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_COLLECTORS]++; } } if (isCOOn(DO_CO_GEO)) { tblWriter = tblWriterHash.get(DO_CO_GEO); if (!verifyGeography(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_GEO]++; } } if (isCOOn(DO_CO_CE)) { tblWriter = tblWriterHash.get(DO_CO_CE); if (!verifyCollectingEvent(oldCatNum, newCatNum, nullCEOk)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_CE]++; } } if (isCOOn(DO_CO_TAXON)) { tblWriter = tblWriterHash.get(DO_CO_TAXON); if (!verifyTaxon(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_TAXON]++; } } if (isCOOn(DO_CO_LOCALITY)) { tblWriter = tblWriterHash.get(DO_CO_LOCALITY); if (!verifyCOToLocality(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_LOCALITY]++; } } if (isCOOn(DO_CO_PREPARATION)) { tblWriter = tblWriterHash.get(DO_CO_PREPARATION); if (!verifyPreparation(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_PREPARATION]++; } } if (isCOOn(DO_CO_PREPARER)) { tblWriter = tblWriterHash.get(DO_CO_PREPARER); if (!verifyPreparer(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_CO_PREPARER]++; } } if (isCOOn(DO_TAXON_CIT)) { tblWriter = tblWriterHash.get(DO_TAXON_CIT); if (!verifyTaxonCitations(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_TAXON_CIT]++; } } if (isCOOn(DO_OTHER_IDENT)) { tblWriter = tblWriterHash.get(DO_OTHER_IDENT); if (!verifyOtherIdentifier(oldCatNum, newCatNum)) { catNumsInErrHash.put(newCatNum, oldCatNum); errorCnts[DO_OTHER_IDENT]++; } } if ((i % 100) == 0) { System.out.println(i + " " + oldCatNum); progressFrame.setProcess(i); progressFrame.setOverall(i); } if ((i % 1000) == 0) { for (TableWriter tw : tblWriterHash.values()) { tw.flush(); } } i++; } rs.close(); stmt.close(); } for (int i = 0; i < errorCnts.length; i++) { if (errorCnts[i] > 0) { System.out.println(i + " -> " + errorCnts[i]); } } progressFrame.setProcess(numColObjs); if (isCOOn(DO_COLLECTORS)) { tblWriter = tblWriterHash.get(DO_COLLECTORS); //verifyCollectors(); } if (isCOOn(DO_AGENTS)) { tblWriter = tblWriterHash.get(DO_AGENTS); verifyAgents(); } progressFrame.setOverall(numColObjs * 2); if (isCOOn(DO_COLLEVENTS)) { tblWriter = tblWriterHash.get(DO_COLLEVENTS); verifyCEs(); } //progressFrame.setOverall(numColObjs*2); if (isCOOn(DO_COLLEVENTS)) { tblWriter = tblWriterHash.get(DO_COLLEVENTS); verifyShipments(); } if (isCOOn(DO_LOANS)) { tblWriter = tblWriterHash.get(DO_LOANS); verifyLoans(); verifyGifts(); verifyLoanRetPreps(); } for (TableWriter tw : tblWriterHash.values()) { tw.endTable(); } progressFrame.setOverall(numColObjs * 3); tblWriter = convLogger.getWriter("CatalogNumberSummary.html", "Catalog Nummber Summary"); tblWriter.startTable(); tblWriter.logHdr("Number", "Description"); tblWriter.logErrors(Integer.toString(numErrors), "All Errors"); tblWriter.logErrors(Integer.toString(catNumsInErrHash.size()), "Catalog Number with Errors"); tblWriter.endTable(); tblWriter.println("<BR>"); tblWriter.println("Catalog Summary:<BR>"); Vector<String> catNumList = new Vector<String>(catNumsInErrHash.keySet()); Collections.sort(catNumList); for (String catNum : catNumList) { tblWriter.println(catNum + "<BR>"); } tblWriter.println("<BR>"); numErrors = 0; //----------------------------------------------------------------------------------------------------------- // Accessions //----------------------------------------------------------------------------------------------------------- // For Debug acOptions = DO_AC_ALL; HashMap<Long, TableWriter> accTblWriterHash = new HashMap<Long, TableWriter>(); for (int i = 1; i < accLabels.length; i++) { long id = (long) Math.pow(2, i - 1); id = Math.max(id, 1); tblWriter = convLogger.getWriter("accession_" + accLabels[i] + ".html", "Accession " + accLabels[i]); tblWriter.startTable(); tblWriter.logHdr("ID", "Desc"); accTblWriterHash.put(id, tblWriter); } if (acOptions > NO_OPTIONS) { int i = 0; Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmt.executeQuery("SELECT Number FROM accession ORDER BY Number ASC"); while (rs.next()) { String oldAccNum = rs.getString(1); String newAccNum = oldAccNum; if (isACOn(DO_ACCESSIONS)) { tblWriter = accTblWriterHash.get(DO_ACCESSIONS); if (!verifyAccessions(oldAccNum, newAccNum)) { log.error("Accession Num: " + oldAccNum); accNumsInErrHash.put(newAccNum, oldAccNum); } //log.error("New SQL: "+newSQL); //log.error("Old SQL: "+oldSQL); //break; } if (isACOn(DO_AC_AGENTS)) { tblWriter = accTblWriterHash.get(DO_AC_AGENTS); if (!verifyAccessionAgents(oldAccNum, newAccNum)) { log.error("Accession Num: " + oldAccNum); accNumsInErrHash.put(newAccNum, oldAccNum); } //log.error("New SQL: "+newSQL); //log.error("Old SQL: "+oldSQL); //break; } if ((i % 100) == 0) { System.out.println(i + " " + oldAccNum); } i++; } rs.close(); stmt.close(); } progressFrame.setOverall(numColObjs * 4); newDBConn.close(); oldDBConn.close(); for (TableWriter tw : accTblWriterHash.values()) { tw.endTable(); } printAccessionTotal("Accession"); File indexFile = convLogger.closeAll(); long endTime = System.currentTimeMillis(); int convertTimeInSeconds = (int) ((endTime - startTime) / 1000.0); //ConvertStatSender sender = new ConvertStatSender("verify.php"); //sender.senConvertInfo(databaseNameDest, numColObjs, convertTimeInSeconds); log.info("Done."); progressFrame.setVisible(false); AttachmentUtils.openURI(indexFile.toURI()); System.exit(0); }
From source file:com.bstek.dorado.idesupport.initializer.CommonRuleTemplateInitializer.java
protected Collection<AutoPropertyTemplate> getProperties(Class<?> type, XmlNodeInfo xmlNodeInfo, InitializerContext initializerContext) throws Exception { HashMap<String, AutoPropertyTemplate> properties = new LinkedHashMap<String, AutoPropertyTemplate>(); RuleTemplateManager ruleTemplateManager = initializerContext.getRuleTemplateManager(); if (xmlNodeInfo != null) { if (xmlNodeInfo.isInheritable()) { AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate("impl"); propertyTemplate.setPrimitive(true); properties.put(propertyTemplate.getName(), propertyTemplate); propertyTemplate = new AutoPropertyTemplate("parent"); propertyTemplate.setPrimitive(true); properties.put(propertyTemplate.getName(), propertyTemplate); }// ww w . j a v a2 s . com if (xmlNodeInfo.isScopable()) { AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate("scope"); propertyTemplate.setPrimitive(true); Object[] ecs = Scope.class.getEnumConstants(); String[] enumValues = new String[ecs.length]; for (int i = 0; i < ecs.length; i++) { enumValues[i] = ecs[i].toString(); } propertyTemplate.setEnumValues(enumValues); properties.put(propertyTemplate.getName(), propertyTemplate); } if (StringUtils.isNotEmpty(xmlNodeInfo.getDefinitionType())) { Class<?> definitionType = ClassUtils.forName(xmlNodeInfo.getDefinitionType()); if (ListenableObjectDefinition.class.isAssignableFrom(definitionType)) { AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate("listener"); propertyTemplate.setPrimitive(true); properties.put(propertyTemplate.getName(), propertyTemplate); } if (InterceptableDefinition.class.isAssignableFrom(definitionType)) { AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate("interceptor"); propertyTemplate.setPrimitive(true); properties.put(propertyTemplate.getName(), propertyTemplate); } } for (Map.Entry<String, String> entry : xmlNodeInfo.getFixedProperties().entrySet()) { String propertyName = entry.getKey(); String value = entry.getValue(); AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate(propertyName); propertyTemplate.setDefaultValue(value); propertyTemplate.setPrimitive(true); propertyTemplate.setFixed(true); propertyTemplate.setVisible(false); properties.put(propertyName, propertyTemplate); } for (Map.Entry<String, XmlProperty> entry : xmlNodeInfo.getProperties().entrySet()) { String propertyName = entry.getKey(); XmlProperty xmlProperty = entry.getValue(); TypeInfo propertyTypeInfo = TypeInfo.parse(xmlProperty.propertyType()); Class<?> propertyType = null; if (propertyTypeInfo != null) { propertyType = propertyTypeInfo.getType(); } AutoPropertyTemplate propertyTemplate = new AutoPropertyTemplate(propertyName, xmlProperty); propertyTemplate.setPrimitive(xmlProperty.attributeOnly()); if (propertyType != null && !propertyType.equals(String.class)) { propertyTemplate.setType(propertyType.getName()); } if (xmlProperty.composite()) { initCompositeProperty(propertyTemplate, propertyType, initializerContext); } propertyTemplate.setDeprecated(xmlProperty.deprecated()); properties.put(propertyName, propertyTemplate); } } PropertyDescriptor[] propertyDescriptors = PropertyUtils.getPropertyDescriptors(type); for (PropertyDescriptor propertyDescriptor : propertyDescriptors) { Method readMethod = propertyDescriptor.getReadMethod(); if (readMethod != null && propertyDescriptor.getWriteMethod() != null) { if (readMethod.getDeclaringClass() != type) { try { readMethod = type.getDeclaredMethod(readMethod.getName(), readMethod.getParameterTypes()); } catch (NoSuchMethodException e) { continue; } } String propertyName = propertyDescriptor.getName(); XmlSubNode xmlSubNode = readMethod.getAnnotation(XmlSubNode.class); if (xmlSubNode != null) { continue; } TypeInfo propertyTypeInfo; Class<?> propertyType = propertyDescriptor.getPropertyType(); if (Collection.class.isAssignableFrom(propertyType)) { propertyTypeInfo = TypeInfo.parse((ParameterizedType) readMethod.getGenericReturnType(), true); propertyType = propertyTypeInfo.getType(); } else { propertyTypeInfo = new TypeInfo(propertyType, false); } AutoPropertyTemplate propertyTemplate = null; XmlProperty xmlProperty = readMethod.getAnnotation(XmlProperty.class); if (xmlProperty != null) { if (xmlProperty.unsupported()) { continue; } propertyTemplate = properties.get(propertyName); if (propertyTemplate == null) { propertyTemplate = new AutoPropertyTemplate(propertyName, readMethod, xmlProperty); propertyTemplate.setPrimitive(xmlProperty.attributeOnly()); } if (("dataSet".equals(propertyName) || "dataPath".equals(propertyName) || "property".equals(propertyName)) && DataControl.class.isAssignableFrom(type)) { propertyTemplate.setHighlight(1); } if (xmlProperty.composite()) { initCompositeProperty(propertyTemplate, propertyType, initializerContext); } int clientTypes = ClientType.parseClientTypes(xmlProperty.clientTypes()); if (clientTypes > 0) { propertyTemplate.setClientTypes(clientTypes); } propertyTemplate.setDeprecated(xmlProperty.deprecated()); } else if (EntityUtils.isSimpleType(propertyType) || propertyType.equals(Class.class) || propertyType.isArray() && propertyType.getComponentType().equals(String.class)) { propertyTemplate = new AutoPropertyTemplate(propertyName, readMethod, xmlProperty); } if (propertyTemplate != null) { propertyTemplate.setType(propertyDescriptor.getPropertyType().getName()); if (propertyType.isEnum()) { Object[] ecs = propertyType.getEnumConstants(); String[] enumValues = new String[ecs.length]; for (int i = 0; i < ecs.length; i++) { enumValues[i] = ecs[i].toString(); } propertyTemplate.setEnumValues(enumValues); } ComponentReference componentReference = readMethod.getAnnotation(ComponentReference.class); if (componentReference != null) { ReferenceTemplate referenceTemplate = new LazyReferenceTemplate(ruleTemplateManager, componentReference.value(), "id"); propertyTemplate.setReference(referenceTemplate); } IdeProperty ideProperty = readMethod.getAnnotation(IdeProperty.class); if (ideProperty != null) { propertyTemplate.setVisible(ideProperty.visible()); propertyTemplate.setEditor(ideProperty.editor()); propertyTemplate.setHighlight(ideProperty.highlight()); if (StringUtils.isNotEmpty(ideProperty.enumValues())) { propertyTemplate.setEnumValues(StringUtils.split(ideProperty.enumValues(), ",;")); } } ClientProperty clientProperty = readMethod.getAnnotation(ClientProperty.class); if (clientProperty != null) { propertyTemplate.setDefaultValue(clientProperty.escapeValue()); } properties.put(propertyName, propertyTemplate); } } } return properties.values(); }
From source file:com.intellectualcrafters.plot.database.SQLManager.java
/** * @return// ww w . ja v a 2 s.c o m */ @Override public LinkedHashMap<String, HashMap<PlotId, Plot>> getPlots() { final LinkedHashMap<String, HashMap<PlotId, Plot>> newplots = new LinkedHashMap<String, HashMap<PlotId, Plot>>(); try { final DatabaseMetaData data = connection.getMetaData(); ResultSet rs = data.getColumns(null, null, prefix + "plot", "plot_id"); final boolean execute = rs.next(); if (execute) { final Statement statement = connection.createStatement(); statement.addBatch("ALTER IGNORE TABLE `" + prefix + "plot` ADD `plot_id_x` int(11) DEFAULT 0"); statement.addBatch("ALTER IGNORE TABLE `" + prefix + "plot` ADD `plot_id_z` int(11) DEFAULT 0"); statement.addBatch("UPDATE `" + prefix + "plot` SET\n" + " `plot_id_x` = IF(" + " LOCATE(';', `plot_id`) > 0," + " SUBSTRING(`plot_id`, 1, LOCATE(';', `plot_id`) - 1)," + " `plot_id`" + " )," + " `plot_id_z` = IF(" + " LOCATE(';', `plot_id`) > 0," + " SUBSTRING(`plot_id`, LOCATE(';', `plot_id`) + 1)," + " NULL" + " )"); statement.addBatch("ALTER TABLE `" + prefix + "plot` DROP `plot_id`"); statement.addBatch( "ALTER IGNORE TABLE `" + prefix + "plot_settings` ADD `flags` VARCHAR(512) DEFAULT NULL"); statement.executeBatch(); statement.close(); } rs = data.getColumns(null, null, prefix + "plot_settings", "merged"); if (!rs.next()) { final Statement statement = connection.createStatement(); statement.addBatch("ALTER TABLE `" + prefix + "plot_settings` ADD `merged` int(11) DEFAULT NULL"); statement.executeBatch(); statement.close(); } } catch (final Exception e) { e.printStackTrace(); } final HashMap<Integer, Plot> plots = new HashMap<Integer, Plot>(); Statement stmt = null; try { Set<String> worlds = new HashSet<String>(); if (PlotMain.config.contains("worlds")) { worlds = PlotMain.config.getConfigurationSection("worlds").getKeys(false); } final HashMap<String, UUID> uuids = new HashMap<String, UUID>(); final HashMap<String, Integer> noExist = new HashMap<String, Integer>(); /* * Getting plots */ stmt = connection.createStatement(); ResultSet r = stmt.executeQuery( "SELECT `id`, `plot_id_x`, `plot_id_z`, `owner`, `world` FROM `" + prefix + "plot`"); PlotId plot_id; int id; Plot p; String o; UUID user; while (r.next()) { plot_id = new PlotId(r.getInt("plot_id_x"), r.getInt("plot_id_z")); id = r.getInt("id"); final String worldname = r.getString("world"); if (!worlds.contains(worldname)) { if (noExist.containsKey(worldname)) { noExist.put(worldname, noExist.get(worldname) + 1); } else { noExist.put(worldname, 1); } } o = r.getString("owner"); user = uuids.get(o); if (user == null) { user = UUID.fromString(o); uuids.put(o, user); } p = new Plot(plot_id, user, Biome.FOREST, new ArrayList<UUID>(), new ArrayList<UUID>(), new ArrayList<UUID>(), "", PlotHomePosition.DEFAULT, null, worldname, new boolean[] { false, false, false, false }); plots.put(id, p); } // stmt.close(); /* * Getting helpers */ // stmt = connection.createStatement(); r = stmt.executeQuery("SELECT `user_uuid`, `plot_plot_id` FROM `" + prefix + "plot_helpers`"); while (r.next()) { id = r.getInt("plot_plot_id"); o = r.getString("user_uuid"); user = uuids.get(o); if (user == null) { user = UUID.fromString(o); uuids.put(o, user); } final Plot plot = plots.get(id); if (plot != null) { plot.addHelper(user); } else { PlotMain.sendConsoleSenderMessage("&cPLOT " + id + " in plot_helpers does not exist. Please create the plot or remove this entry."); } } // stmt.close(); /* * Getting trusted */ // stmt = connection.createStatement(); r = stmt.executeQuery("SELECT `user_uuid`, `plot_plot_id` FROM `" + prefix + "plot_trusted`"); while (r.next()) { id = r.getInt("plot_plot_id"); o = r.getString("user_uuid"); user = uuids.get(o); if (user == null) { user = UUID.fromString(o); uuids.put(o, user); } final Plot plot = plots.get(id); if (plot != null) { plot.addTrusted(user); } else { PlotMain.sendConsoleSenderMessage("&cPLOT " + id + " in plot_trusted does not exist. Please create the plot or remove this entry."); } } // stmt.close(); /* * Getting denied */ // stmt = connection.createStatement(); r = stmt.executeQuery("SELECT `user_uuid`, `plot_plot_id` FROM `" + prefix + "plot_denied`"); while (r.next()) { id = r.getInt("plot_plot_id"); o = r.getString("user_uuid"); user = uuids.get(o); if (user == null) { user = UUID.fromString(o); uuids.put(o, user); } final Plot plot = plots.get(id); if (plot != null) { plot.addDenied(user); } else { PlotMain.sendConsoleSenderMessage("&cPLOT " + id + " in plot_denied does not exist. Please create the plot or remove this entry."); } } // stmt.close(); // stmt = connection.createStatement(); r = stmt.executeQuery("SELECT * FROM `" + prefix + "plot_settings`"); while (r.next()) { id = r.getInt("plot_plot_id"); final Plot plot = plots.get(id); if (plot != null) { final String b = r.getString("biome"); Biome biome = null; if (b != null) { for (final Biome mybiome : Biome.values()) { if (mybiome.toString().equalsIgnoreCase(b)) { biome = mybiome; break; } } } final String alias = r.getString("alias"); if (alias != null) { plot.settings.setAlias(alias); } final String pos = r.getString("position"); if (pos != null) { for (final PlotHomePosition plotHomePosition : PlotHomePosition.values()) { if (plotHomePosition.isMatching(pos)) { if (plotHomePosition != PlotHomePosition.DEFAULT) { plot.settings.setPosition(plotHomePosition); } break; } } } final Integer m = r.getInt("merged"); if (m != null) { final boolean[] merged = new boolean[4]; for (int i = 0; i < 4; i++) { merged[3 - i] = ((m) & (1 << i)) != 0; } plot.settings.setMerged(merged); } else { plot.settings.setMerged(new boolean[] { false, false, false, false }); } String[] flags_string; final String myflags = r.getString("flags"); if (myflags == null) { flags_string = new String[] {}; } else { flags_string = myflags.split(","); } final ArrayList<Flag> flags = new ArrayList<Flag>(); boolean exception = false; for (final String element : flags_string) { if (element.contains(":")) { final String[] split = element.split(":"); try { flags.add(new Flag(FlagManager.getFlag(split[0], true), split[1].replaceAll("\u00AF", ":").replaceAll("", ","))); } catch (final Exception e) { exception = true; } } else { flags.add(new Flag(FlagManager.getFlag(element, true), "")); } } if (exception) { PlotMain.sendConsoleSenderMessage( "&cPlot " + id + " had an invalid flag. A fix has been attempted."); setFlags(id, flags.toArray(new Flag[0])); } plot.settings.setFlags(flags.toArray(new Flag[0])); } else { PlotMain.sendConsoleSenderMessage("&cPLOT " + id + " in plot_settings does not exist. Please create the plot or remove this entry."); } } stmt.close(); for (final Plot plot : plots.values()) { final String world = plot.world; if (!newplots.containsKey(world)) { newplots.put(world, new HashMap<PlotId, Plot>()); } newplots.get(world).put(plot.id, plot); } boolean invalidPlot = false; for (final String worldname : noExist.keySet()) { invalidPlot = true; PlotMain.sendConsoleSenderMessage("&c[WARNING] Found " + noExist.get(worldname) + " plots in DB for non existant world; '" + worldname + "'."); } if (invalidPlot) { PlotMain.sendConsoleSenderMessage( "&c[WARNING] - Please create the world/s or remove the plots using the purge command"); } } catch (final SQLException e) { Logger.add(LogLevel.WARNING, "Failed to load plots."); e.printStackTrace(); } return newplots; }