List of usage examples for java.util HashSet size
public int size()
From source file:com.peterbochs.PeterBochsDebugger.java
private void jBochsCommandTextFieldKeyPressed(KeyEvent evt) { if (bochsCommandTextField.getText().equals("")) { commandHistoryIndex = 0;/*www . ja v a 2 s . c om*/ } HashSet<String> vector = Setting.getInstance().getBochsCommandHistory(); if (evt.getKeyCode() == 38) { if (commandHistoryIndex < vector.size()) { commandHistoryIndex++; this.bochsCommandTextField .setText(vector.toArray()[vector.size() - commandHistoryIndex].toString()); } } else if (evt.getKeyCode() == 40) { if (commandHistoryIndex > 1) { commandHistoryIndex--; this.bochsCommandTextField .setText(vector.toArray()[vector.size() - commandHistoryIndex].toString()); } } }
From source file:com.krawler.formbuilder.servlet.ReportBuilderDaoImpl.java
public String saveReportGridConfig(String jsonstr, String reportid, boolean createTable, String tbar, String bbar) throws ServiceException { String result = "{\"success\":true}"; String tableName = ""; // String jsonstr = request.getParameter("jsondata"); try {/*from ww w. j a v a2 s .co m*/ JSONObject jobj = new JSONObject(); // String reportid = request.getParameter("reportid"); // boolean createTable = Boolean.parseBoolean(request.getParameter("createtable")); mb_reportlist report = (mb_reportlist) get(mb_reportlist.class, reportid); if (createTable) { tableName = "rb_" + toLZ(report.getReportkey(), 3) + "_" + report.getReportname().replace(" ", "").toLowerCase(); } else { tableName = report.getTablename(); } HashSet<String> hashSet = new HashSet<String>(); HashSet<String> finalHashSet = new HashSet<String>(); String hql = "delete from com.krawler.esp.hibernate.impl.mb_gridconfig as mb_gridconfig where mb_gridconfig.reportid = ? "; int numDelRec = executeUpdate(hql, new Object[] { report }); JSONArray jsonArray = new JSONArray(jsonstr); int confCnt = 0; for (int k = 0; k < jsonArray.length(); k++) { jobj = jsonArray.getJSONObject(k); if (!jobj.getString("name").equals("id")) { com.krawler.esp.hibernate.impl.mb_gridconfig gridConf = new com.krawler.esp.hibernate.impl.mb_gridconfig(); // java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy-MM-d HH:mm:ss"); // java.sql.Timestamp timestamp1 = Timestamp.valueOf(sdf.format(new java.util.Date())); if (jobj.getString("name").indexOf(".") > -1) { String[] tablecolumn = jobj.getString("name").split("\\."); gridConf.setName( tablecolumn[0] + PropsValues.REPORT_HARDCODE_STR + tablecolumn[1].toLowerCase()); } else { if (jobj.getString("name").indexOf(PropsValues.REPORT_HARDCODE_STR) == -1) { // String Columnname = moduleBuilderMethods.getColumnName(moduleBuilderMethods.getcolumnNameStr(jobj.getString("name").toLowerCase())); String Columnname = jobj.getString("name").toLowerCase(); gridConf.setName(tableName + PropsValues.REPORT_HARDCODE_STR + Columnname); } } if (StringUtil.isNullOrEmpty(jobj.getString("displayfield"))) gridConf.setDisplayfield(jobj.getString("name")); else gridConf.setDisplayfield(jobj.getString("displayfield")); if (!StringUtil.isNullOrEmpty(jobj.getString("reftable"))) { gridConf.setReftable(jobj.getString("reftable")); } else if (StringUtil.isNullOrEmpty(jobj.getString("reftable")) && !jobj.getString("combogridconfig").equals("-1")) { gridConf.setReftable(""); } else { if (createTable) gridConf.setReftable(tableName); } gridConf.setXtype(jobj.getString("xtype")); renderer render = null; if (jobj.getString("renderer").length() > 0) { render = (renderer) get(renderer.class, jobj.getString("renderer")); } else { render = (renderer) get(renderer.class, "0"); } // gridConf.setRenderer(render); // gridConf.setFilter(jobj.getString("filter")); gridConf.setSummaryType(jobj.getString("summaryType")); gridConf.setDefaultValue(jobj.getString("defaultValue")); gridConf.setHidden(Boolean.parseBoolean(jobj.getString("hidden"))); gridConf.setCountflag(Boolean.parseBoolean(jobj.getString("countflag"))); String combogridconfig = "-1"; String refTable = jobj.getString("reftable"); String xtype = jobj.getString("xtype"); if (xtype.equals("Combobox") && !StringUtil.isNullOrEmpty(refTable) && !refTable.equals(tableName)) { String SELECT_QUERY = "Select mb_reportlist.reportid from com.krawler.esp.hibernate.impl.mb_reportlist as mb_reportlist " + " where mb_reportlist.tablename = ?"; List list = find(SELECT_QUERY, new Object[] { refTable }); Iterator ite = list.iterator(); String reportid1 = null; if (ite.hasNext()) { reportid1 = (String) ite.next(); } if (reportid1 != null) { String name = null; if (jobj.getString("name").indexOf(".") > -1) { String[] tablecolumn = jobj.getString("name").split("\\."); name = tablecolumn[0] + PropsValues.REPORT_HARDCODE_STR + tablecolumn[1].toLowerCase(); } else { if (jobj.getString("name").indexOf(PropsValues.REPORT_HARDCODE_STR) == -1) name = tableName + PropsValues.REPORT_HARDCODE_STR + jobj.getString("name").toLowerCase(); } mb_reportlist report1 = (mb_reportlist) get(mb_reportlist.class, reportid1); SELECT_QUERY = "select mb_gridconfig.combogridconfig from com.krawler.esp.hibernate.impl.mb_gridconfig as mb_gridconfig " + "where mb_gridconfig.reportid = ? and mb_gridconfig.name = ?"; list = find(SELECT_QUERY, new Object[] { report1, name }); ite = list.iterator(); if (ite.hasNext()) { combogridconfig = (String) ite.next(); } } } else if (!jobj.getString("combogridconfig").equals("-1")) { combogridconfig = jobj.getString("combogridconfig"); } gridConf.setCombogridconfig(combogridconfig); gridConf.setColumnindex(k); gridConf.setReportid(report); save(gridConf); String strid = gridConf.getId(); confCnt++; if (!StringUtil.isNullOrEmpty(jobj.getString("reftable")) && !jobj.getString("reftable").equals(tableName)) { String fkKeyName = jobj.getString("reftable") + "." + (getPrimaryColName(jobj.getString("reftable"))); if (fkKeyName.equals(jobj.getString("name"))) { hashSet.add(fkKeyName); finalHashSet.remove(fkKeyName); } else if (!hashSet.contains(fkKeyName)) { finalHashSet.add(fkKeyName); } } } } if (finalHashSet.size() > 0) { Iterator itr = finalHashSet.iterator(); while (itr.hasNext()) { //Insert id fields of reference tables com.krawler.esp.hibernate.impl.mb_gridconfig gridConf = new com.krawler.esp.hibernate.impl.mb_gridconfig(); String tablecolumn = itr.next().toString(); tablecolumn = tablecolumn.replace(".", PropsValues.REPORT_HARDCODE_STR); gridConf.setName(tablecolumn); gridConf.setDisplayfield(tablecolumn); gridConf.setReftable(tablecolumn.split(PropsValues.REPORT_HARDCODE_STR)[0]); gridConf.setXtype("None"); gridConf.setHidden(true); renderer render = (renderer) get(renderer.class, "0"); gridConf.setRenderer(render); gridConf.setColumnindex(confCnt++); gridConf.setReportid(report); gridConf.setCombogridconfig("-1"); //gridConf.setFilter(""); gridConf.setCountflag(false); save(gridConf); } // String actionType = "Add Report Grid Config"; // String details = "Grid Config added for Report "+report.getReportname(); // long actionId = AuditTrialHandler.getActionId(session, actionType); // AuditTrialHandler.insertAuditLog(session, actionId, details, request); } if (createTable) { int cnt = 0; //Insert id field of new table com.krawler.esp.hibernate.impl.mb_gridconfig gridConf = new com.krawler.esp.hibernate.impl.mb_gridconfig(); gridConf.setName(tableName + PropsValues.REPORT_HARDCODE_STR + "id"); gridConf.setDisplayfield("id"); gridConf.setReftable(tableName); gridConf.setXtype("None"); gridConf.setHidden(true); renderer render = (renderer) get(renderer.class, "0"); gridConf.setRenderer(render); gridConf.setColumnindex(confCnt++); gridConf.setReportid(report); gridConf.setCombogridconfig("-1"); // gridConf.setFilter(""); gridConf.setCountflag(false); save(gridConf); // save report table name report.setTablename(tableName); save(report); ArrayList<Hashtable<String, Object>> aList = new ArrayList<Hashtable<String, Object>>(); Object[] objArrField = new Object[] { "name", "type", "primaryid", "default" }; Object[] objArr = new Object[] { "id", "String", "true", "" }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); objArrField = new Object[] { "name", "type", "default" }; objArr = new Object[] { "createdby", "String", "" }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); objArr = new Object[] { "createddate", "Date", "" }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); objArr = new Object[] { "modifieddate", "Date", "" }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); objArr = new Object[] { "deleteflag", "double", "" }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); HashSet<String> hs = new HashSet<String>(); for (int k = 0; k < jsonArray.length(); k++) { JSONObject obj = jsonArray.getJSONObject(k); if (!StringUtil.isNullOrEmpty(obj.getString("reftable")) && !obj.getString("reftable").equals(tableName)) { if (!Boolean.parseBoolean(obj.getString("countflag"))) { if (hs.add(obj.getString("reftable"))) { Object[] objArrField1 = new Object[] { "name", "reftable", "type", "foreignid", "default" }; String fkKeyName = obj.getString("reftable") .concat(getPrimaryColName(obj.getString("reftable"))); objArr = new Object[] { fkKeyName, obj.getString("reftable"), "String", true, obj.getString("defaultValue") }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField1); } } } else { if (!obj.getString("name").equals("id")) { String type = ""; if (obj.getString("xtype").equals("Checkbox") || obj.getString("xtype").equals("Radio")) { type = "boolean"; } else if (obj.getString("xtype").equals("Date")) { type = "Date"; } else if (obj.getString("xtype").equals("Number(Integer)")) { type = "int"; } else if (obj.getString("xtype").equals("Number(Float)")) { type = "double"; } else if (obj.getString("xtype").equals("Combobox")) { type = "String"; } else { type = "String"; } objArr = new Object[] { obj.getString("name").toLowerCase(), type, obj.getString("defaultValue") }; moduleBuilderGenerateTable.makeEntryToArrayList(cnt, aList, objArr, objArrField); } } } hs.clear(); ServiceBuilder sb = new ServiceBuilder(); // sb.createServiceXMLFile(aList, tableName); sb.createJavaFile(tableName, true); // String actionType = "Add Report Grid Config Table"; // String details = "Grid Cofig Table added for Report "+report.getReportname(); // long actionId = AuditTrialHandler.getActionId(session, actionType); // AuditTrialHandler.insertAuditLog(session, actionId, details, request); } else { String className = "rb_" + toLZ(report.getReportkey(), 3) + "_" + report.getReportname().replace(" ", "").toLowerCase(); // save report table name //report.setTablename(className); //session.save(report); //Create only implementation java class for report for which no new table is created. ServiceBuilder sb = new ServiceBuilder(); sb.createImplJavaFile(className, true); } // if(numDelRec==0) { // if first time store then add permission entry for add/edit/delete action // mb_permgrmaster permgrmaster = new mb_permgrmaster(); // accessRight.addPermGrp(session,permgrmaster,report); // com.krawler.esp.hibernate.impl.mb_permmaster permmaster = null; // for(int i=2;i<9;i++) { // permmaster = new com.krawler.esp.hibernate.impl.mb_permmaster(); // mb_permactions permaction = (mb_permactions) session.load(mb_permactions.class,i); // permmaster.setPermaction(permaction); // permmaster.setPermname(permaction.getName()); // permmaster.setDescription(permaction.getName()); // permmaster.setPermgrid(permgrmaster); // permmaster.setPermid(accessRight.getMaxPermid(session, permgrmaster.getPermgrid())); // session.save(permmaster); // } // } storeToolbarConf(reportid, tbar, bbar); hql = "SELECT mb_gridconfig.columnindex,mb_gridconfig.hidden,mb_gridconfig.reftable,mb_gridconfig.renderer,mb_gridconfig.xtype,mb_gridconfig.displayfield,mb_gridconfig.name " + "FROM com.krawler.esp.hibernate.impl.mb_gridconfig AS mb_gridconfig " + "WHERE mb_gridconfig.reportid = ?"; List list = find(hql, new Object[] { report }); Iterator ite = list.iterator(); JSONObject r = new JSONObject(); while (ite.hasNext()) { Object[] row = (Object[]) ite.next(); JSONObject temp = new JSONObject(); temp.put("index", row[0]); temp.put("hidden", row[1]); temp.put("reftable", row[2]); temp.put("renderer", row[3]); temp.put("xtype", row[4]); temp.put("displayfield", row[5]); temp.put("name", row[6]); r.append("data", temp); } r.put("success", true); r.put("reportId", reportid); r.put("tablename", tableName); result = r.toString(); } catch (JSONException e) { logger.warn(e.getMessage(), e); result = "{\"success\":false}"; throw ServiceException.FAILURE("reportbuilder.saveReportGridConfig", e); } catch (Exception e) { logger.warn(e.getMessage(), e); result = "{\"success\":false}"; throw ServiceException.FAILURE("reportbuilder.saveReportGridConfig", e); } return result; }
From source file:dao.CollabrumDaoDb.java
private void deleteRBCollabrum(String directoryId, String collabrumId, String userId, String userLogin) throws BaseDaoException { if (RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(collabrumId) || RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); }/*ww w . ja v a 2 s. co m*/ List tidList = getTidList(collabrumId); List blobEntryList = getBlobEntryList(collabrumId); Vector ridVector = new Vector(); for (int i = 0; i < tidList.size(); i++) { /* get list of rids from collmessages */ List ridList = getRidList((String) ((ColTopic) tidList.get(i)).getValue(DbConstants.TID)); ridVector.add(ridList); } /** * get the members list from collmembers, then access each record in this table * collblock (deleteAllColBlockQuery) partitioned on loginid * deleteColBlockQuery.run(conn, collabrumId); */ deleteBlockedMembers(collabrumId); /** * Get scalability datasource with no partitions for colladmin, collmembers, dircoll, collabrum */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { StringBuffer sb = new StringBuffer("ds is null, deleteCollabrum() "); sb.append(sourceName); sb.append(" collabrumId = "); sb.append(collabrumId); throw new BaseDaoException(sb.toString()); } HashSet result = null; Connection conn = null; /** * non partitioned tables */ try { conn = ds.getConnection(); conn.setAutoCommit(false); result = listModeratorQuery.run(conn, collabrumId); /** * Not partitioned * collabrum, (deleteQuery) * colladmin (deleteAdminQuery) * dircoll (deleteDirColQuery) * collmembers (deleteColMembersQuery), * * collblobtags (deleteColBlobTagsQuery) * collblogtags (deleteColBlogTagsQuery) * collabrum_ind, (deleteCollabrumIndexQuery) * collblob_ind, (deleteColBlobIndexQuery) * collmessages_ind, (deleteColMessagesIndexQuery) * colltopics_ind, (deleteColTopicsIndexQuery) */ deleteQuery.run(conn, collabrumId); deleteAdminQuery.run(conn, collabrumId); deleteDircollQuery.run(conn, collabrumId); deleteAllMembersQuery.run(conn, collabrumId); /* new ones */ deleteColBlobTagsQuery.run(conn, collabrumId); deleteColBlogTagsQuery.run(conn, collabrumId); deleteCollabrumIndexQuery.run(conn, collabrumId); for (int i = 0; i < blobEntryList.size(); i++) { deleteColBlobIndexQuery.run(conn, (String) ((Photo) blobEntryList.get(i)).getValue(DbConstants.ENTRYID)); } for (int i = 0; i < tidList.size(); i++) { deleteColTopicsIndexQuery.run(conn, (String) ((ColTopic) tidList.get(i)).getValue(DbConstants.TID)); } for (int i = 0; i < ridVector.size(); i++) { List ridList = (List) ridVector.elementAt(i); for (int j = 0; i < ridList.size(); j++) { deleteColMessagesIndexQuery.run(conn, (String) ((ColMessage) ridList.get(j)).getValue(DbConstants.RID)); } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { StringBuffer sb = new StringBuffer( "conn.close exception for rollback(), for deleteCollabrum() "); sb.append("collabrumId = "); sb.append(collabrumId); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e2); } StringBuffer sb = new StringBuffer(" rollback() exception, for deleteCollabrum() "); sb.append("collabrumId = "); sb.append(collabrumId); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e1); } } // connection commit try { conn.commit(); } catch (Exception e3) { StringBuffer sb = new StringBuffer(" commit() exception, for deleteCollabrum() collabrumId = "); sb.append(collabrumId); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { StringBuffer sb = new StringBuffer( " conn.close() exception, for commit(), deleteCollabrum() collabrumId = "); sb.append(collabrumId); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e4); } deleteCollMessages(collabrumId, tidList); deleteCollTopics(collabrumId, tidList); /** * Jboss methods * fqn - full qualified name * check if the collabrum already exists in the cache * If it exists, remove the collabrum from the cache */ Fqn fqn = cacheUtil.fqn(DbConstants.COLLABRUM); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.ORGANIZERS); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.COLLABRUM_EDIT); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.COLTOPICS); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.COLTRAFFIC); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } /** * delete collabrum messages */ for (int i = 0; i < tidList.size(); i++) { StringBuffer sb = new StringBuffer(collabrumId); sb.append("-"); sb.append(tidList.get(i)); String key = sb.toString(); fqn = cacheUtil.fqn(DbConstants.COLMSGS); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.COLTOPIC); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } } fqn = cacheUtil.fqn(DbConstants.COLLABRUM_STREAM_BLOBS); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } // deleting user pages for each admin as we want them to be updated if ((result != null) && (result.size() > 0)) { Iterator it = result.iterator(); StringBuffer sb = new StringBuffer(); while (it.hasNext()) { Collabrum collabrum = (Collabrum) it.next(); String adminUser = collabrum.getValue(DbConstants.LOGIN); if (!RegexStrUtil.isNull(adminUser)) { fqn = cacheUtil.fqn(DbConstants.USER_PAGE); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } fqn = cacheUtil.fqn(DbConstants.MEM_AS_ORGANIZER_LIST); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } fqn = cacheUtil.fqn(DbConstants.MEM_AS_MODERATOR_LIST); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } String adminId = collabrum.getValue(DbConstants.LOGIN_ID); fqn = cacheUtil.fqn(DbConstants.BLOCKED_COLLABRUM_LIST); if (treeCache.exists(fqn, adminId)) { treeCache.remove(fqn, adminId); } // delete organizer key = collabrumid-memberid sb.delete(0, sb.length()); sb.append(collabrumId); sb.append("-"); sb.append(adminId); fqn = cacheUtil.fqn(DbConstants.ORGANIZER); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } } } } fqn = cacheUtil.fqn(DbConstants.COLLABRUM_LIST); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } /** * Jboss methods - * fqn - full qualified name * check if the streamblob already set in the cache * If it exists, remove the bean from the cache. */ for (int i = 0; i < blobEntryList.size(); i++) { String entryId = (String) ((Photo) blobEntryList.get(i)).getValue(DbConstants.ENTRYID); fqn = cacheUtil.fqn(DbConstants.PHOTO); if (treeCache.exists(fqn, entryId)) { treeCache.remove(fqn, entryId); } StringBuffer buf = new StringBuffer(collabrumId); buf.append("-"); buf.append(entryId); String key = buf.toString(); fqn = cacheUtil.fqn(DbConstants.COL_STREAM_BLOB); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DEFAULT_PHOTO); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } } fqn = cacheUtil.fqn(DbConstants.COLL_CAT); StringBuffer sb = new StringBuffer(collabrumId); sb.append("-"); sb.append(DbConstants.PHOTO_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } sb.delete(0, sb.length()); sb.append(collabrumId); sb.append("-"); sb.append(DbConstants.FILE_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } }
From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java
/** * @return/*from w ww.j a v a 2s . c o m*/ */ public CollectionResultType initialize() { collectionInfoList = CollectionInfo.getCollectionInfoList(oldDBConn, false); //fixIdaho(); if (collectionInfoList == null) { if (CollectionInfo.isAskForFix()) { if (ConvertTaxonHelper.fixTaxonomicUnitType(oldDBConn)) { collectionInfoList = CollectionInfo.getCollectionInfoList(oldDBConn, true); } else { try { oldDBConn.close(); } catch (SQLException e) { } System.exit(0); } } else { try { oldDBConn.close(); } catch (SQLException e) { } System.exit(0); } } collectionInfoShortList = CollectionInfo.getFilteredCollectionInfoList(); if (collectionInfoList != null && collectionInfoList.size() > 0) { int paleoCnt = 0; // This is a Hash of TaxonObjectType to see how many collections use the same TaxonObjectType HashMap<Integer, HashSet<CollectionInfo>> taxonomyTypeHash = new HashMap<Integer, HashSet<CollectionInfo>>(); // Get a List for each type of Paleo Collection, hashed by the Root Id HashMap<Integer, Vector<CollectionInfo>> paleoColInfoHash = new HashMap<Integer, Vector<CollectionInfo>>(); HashMap<Integer, HashSet<DisciplineType.STD_DISCIPLINES>> paleoDispTypeHash = new HashMap<Integer, HashSet<DisciplineType.STD_DISCIPLINES>>(); for (CollectionInfo colInfo : collectionInfoShortList) { // Tracks a 'set' of CollectionInfo objects for each TaxonomyTypeId HashSet<CollectionInfo> taxonomyTypeSet = taxonomyTypeHash.get(colInfo.getTaxonomyTypeId()); if (taxonomyTypeSet == null) { System.out.println("Creating TxTypeID: " + colInfo.getTaxonomyTypeId() + " From " + colInfo.getCatSeriesName()); taxonomyTypeSet = new HashSet<CollectionInfo>(); taxonomyTypeHash.put(colInfo.getTaxonomyTypeId(), taxonomyTypeSet); } else { System.out.println("Adding TxTypeID: " + colInfo.getTaxonomyTypeId() + " From " + colInfo.getCatSeriesName() + " " + taxonomyTypeSet.size()); } taxonomyTypeSet.add(colInfo); //--- DisciplineType dType = getStandardDisciplineName(colInfo.getTaxonomyTypeName(), colInfo.getColObjTypeName(), colInfo.getCatSeriesName()); colInfo.setDisciplineTypeObj(dType); if (dType != null && dType.isPaleo()) { Vector<CollectionInfo> ciList = paleoColInfoHash.get(colInfo.getTaxonNameId()); if (ciList == null) { ciList = new Vector<CollectionInfo>(); paleoColInfoHash.put(colInfo.getTaxonNameId(), ciList); } ciList.add(colInfo); HashSet<DisciplineType.STD_DISCIPLINES> typeDispSet = paleoDispTypeHash .get(colInfo.getTaxonNameId()); if (typeDispSet == null) { typeDispSet = new HashSet<DisciplineType.STD_DISCIPLINES>(); paleoDispTypeHash.put(colInfo.getTaxonNameId(), typeDispSet); } typeDispSet.add(colInfo.getDisciplineTypeObj().getDisciplineType()); paleoCnt++; } System.out.println("--------------------------------------"); //System.out.println(colInfo.toString()+"\n"); } // for loop int cnt = 0; StringBuilder msg = new StringBuilder(); for (Integer taxonomyTypId : taxonomyTypeHash.keySet()) { HashSet<CollectionInfo> taxonomyTypeSet = taxonomyTypeHash.get(taxonomyTypId); if (taxonomyTypeSet.size() > 1) { msg.append( String.format("<html>TaxonomyTypeId %d has more than one Discpline/Collection:<br><OL>", taxonomyTypId)); for (CollectionInfo ci : taxonomyTypeSet) { msg.append(String.format("<LI>%s - %s - %s</LI>", ci.getCatSeriesName(), ci.getColObjTypeName(), ci.getTaxonomyTypeName())); } msg.append("</OL>"); cnt++; } } if (cnt > 0) { JOptionPane.showConfirmDialog(null, msg.toString(), "Taxomony Type Issues", JOptionPane.CLOSED_OPTION, JOptionPane.QUESTION_MESSAGE); } // Will be zero for no Paleo collections if (paleoCnt > 1) { // Check to see if they all use the same tree if (paleoColInfoHash.size() > 1) { msg.setLength(0); // We get here when there is more than one Taxon Tree for the Paleo Collections for (Integer treeId : paleoColInfoHash.keySet()) { Vector<CollectionInfo> ciList = paleoColInfoHash.get(treeId); CollectionInfo colInfo = ciList.get(0); msg.append(String.format("The following collections use Taxon Tree '%s':\n", colInfo.getTaxonomyTypeName())); for (CollectionInfo ci : paleoColInfoHash.get(treeId)) { DisciplineType dType = getStandardDisciplineName(ci.getTaxonomyTypeName(), ci.getColObjTypeName(), ci.getCatSeriesName()); String name = String.format("%s / %s / %s / %s / %s", ci.getCatSeriesPrefix(), ci.getCatSeriesName(), ci.getColObjTypeName(), ci.getTaxonomyTypeName(), dType.toString()); msg.append(name); msg.append("\n"); } msg.append("\n"); } JOptionPane.showConfirmDialog(null, msg.toString(), "Paleo Taxon Tree Issues", JOptionPane.OK_OPTION, JOptionPane.QUESTION_MESSAGE); } else { StringBuilder colNames = new StringBuilder(); for (Integer treeId : paleoColInfoHash.keySet()) { for (CollectionInfo ci : paleoColInfoHash.get(treeId)) { colNames.append("<LI>"); colNames.append(ci.getCatSeriesName()); colNames.append("</LI>"); } } // You get here when all the Paleo Disciplines use the same tree String msgStr = "<html>All the Paleo Collections need to use the same Taxon Tree and<br>therefore needs to be in the same discipline:<br><ol>"; JOptionPane.showConfirmDialog(null, msgStr + colNames.toString(), "Paleo Taxon Tree Issues", JOptionPane.CLOSED_OPTION, JOptionPane.QUESTION_MESSAGE); for (Integer treeId : paleoColInfoHash.keySet()) { Vector<CollectionInfo> ciList = paleoColInfoHash.get(treeId); CollectionInfo colInfo = ciList.get(0); for (CollectionInfo ci : paleoColInfoHash.get(treeId)) { ci.setDisciplineTypeObj(colInfo.getDisciplineTypeObj()); } } } // } DefaultTableModel model = CollectionInfo.getCollectionInfoTableModel(false); if (model.getRowCount() > 1) { TableWriter colInfoTblWriter = convLogger.getWriter("colinfo.html", "Collection Info"); colInfoTblWriter.startTable(); colInfoTblWriter.logHdr(CollectionInfoModel.getHeaders()); Object[] row = new Object[model.getColumnCount()]; for (int r = 0; r < model.getRowCount(); r++) { for (int i = 0; i < model.getColumnCount(); i++) { row[i] = model.getValueAt(r, i); } colInfoTblWriter.logObjRow(row); } colInfoTblWriter.endTable(); colInfoTblWriter.println("<BR><h3>Collections to be Created.</h3>"); colInfoTblWriter.startTable(); colInfoTblWriter.logHdr(CollectionInfoModel.getHeaders()); model = CollectionInfo.getCollectionInfoTableModel(true); row = new Object[model.getColumnCount()]; for (int r = 0; r < model.getRowCount(); r++) { for (int i = 0; i < model.getColumnCount(); i++) { row[i] = model.getValueAt(r, i); } colInfoTblWriter.logObjRow(row); } colInfoTblWriter.endTable(); colInfoTblWriter.close(); File file = new File(colInfoTblWriter.getFileName()); if (file != null && file.exists()) { try { AttachmentUtils.openURI(file.toURI()); } catch (Exception ex) { ex.printStackTrace(); } } } for (CollectionInfo ci : CollectionInfo.getFilteredCollectionInfoList()) { String sql = "select preparationmethod, ct.* from usyscollobjprepmeth pt inner join usysmetafieldsetsubtype st on st.fieldsetsubtypeid = pt.fieldsetsubtypeid " + "inner join collectionobjecttype ct1 on ct1.collectionobjecttypeid = st.fieldvalue " + "inner join collectionobjecttype ct on ct.collectionobjecttypename = replace(ct1.collectionobjecttypename, ' Preparation', '') " + "inner join catalogseriesdefinition csd on csd.objecttypeid = ct.collectionobjecttypeid " + "inner join catalogseries cs on cs.catalogseriesid = csd.catalogseriesid " + "WHERE csd.catalogseriesid = " + ci.getCatSeriesId(); System.out.println("\n------------------"); System.out.println(ci.getCatSeriesName()); System.out.println(sql); System.out.println("------------------"); int i = 0; Vector<Object[]> list = BasicSQLUtils.query(oldDBConn, sql); if (list.size() > 0) { for (Object[] row : list) { System.out.print(i + " - "); for (Object col : row) { System.out.print(col != null ? col.toString() : "null"); System.out.print(", "); } System.out.println(); i++; } } else { System.out.println("No Results"); } sql = "select ct.*, (select relatedsubtypevalues from usysmetacontrol c " + "left join usysmetafieldsetsubtype fst on fst.fieldsetsubtypeid = c.fieldsetsubtypeid " + "where objectid = 10290 and ct.taxonomytypeid = c.relatedsubtypevalues) as DeterminationTaxonType " + "from collectiontaxonomytypes ct where ct.biologicalobjecttypeid = " + ci.getColObjTypeId(); sql = String.format( "SELECT CollectionTaxonomyTypesID, BiologicalObjectTypeID, CollectionObjectTypeName FROM (select ct.*, " + "(SELECT distinct relatedsubtypevalues FROM usysmetacontrol c " + "LEFT JOIN usysmetafieldsetsubtype fst ON fst.fieldsetsubtypeid = c.fieldsetsubtypeid " + "WHERE objectid = 10290 AND ct.taxonomytypeid = c.relatedsubtypevalues) AS DeterminationTaxonType " + "FROM collectiontaxonomytypes ct WHERE ct.biologicalobjecttypeid = %d) T1 " + "INNER JOIN collectionobjecttype cot ON T1.biologicalobjecttypeid = cot.CollectionObjectTypeID", ci.getColObjTypeId()); System.out.println("\n------------------"); System.out.println(ci.getColObjTypeName()); System.out.println(sql); System.out.println("------------------"); i = 0; list = BasicSQLUtils.query(oldDBConn, sql); if (list.size() > 0) { for (Object[] row : list) { System.out.print(i + " - "); for (Object col : row) { System.out.print(col != null ? col.toString() : "null"); System.out.print(", "); } System.out.println(); i++; } } else { System.out.println("No Results"); } } /* String sql = " select ct.*, (select relatedsubtypevalues from usysmetacontrol c " + "left join usysmetafieldsetsubtype fst on fst.fieldsetsubtypeid = c.fieldsetsubtypeid " + "where objectid = 10290 and ct.taxonomytypeid = c.relatedsubtypevalues) as DeterminationTaxonType " + "from collectiontaxonomytypes ct where ct.biologicalobjecttypeid = 13"; System.out.println("\n------------------"); System.out.println("List of the taxonomytypes associated with a CollectionObjectTypeID"); System.out.println(sql); System.out.println("------------------"); int i = 0; Vector<Object[]> list = BasicSQLUtils.query(oldDBConn, sql); if (list.size() > 0) { for (Object[] row : list) { System.out.print(i+" - "); for (Object col: row) { System.out.print(col != null ? col.toString() : "null"); System.out.print(", "); } System.out.println(); } } else { System.out.println("No Results"); }*/ CellConstraints cc = new CellConstraints(); PanelBuilder pb = new PanelBuilder(new FormLayout("f:p:g", "p,2px,f:p:g,10px,p,2px,p:g,8px")); JTable tableTop = new JTable(CollectionInfo.getCollectionInfoTableModel(false)); JTable tableBot = new JTable( CollectionInfo.getCollectionInfoTableModel(!CollectionInfo.DOING_ACCESSSION)); int rows = 10; tableTop.setPreferredScrollableViewportSize(new Dimension( tableTop.getPreferredScrollableViewportSize().width, rows * tableTop.getRowHeight())); tableBot.setPreferredScrollableViewportSize(new Dimension( tableBot.getPreferredScrollableViewportSize().width, rows * tableBot.getRowHeight())); pb.add(UIHelper.createLabel("Available Specify 5 Taxononmic Types", SwingConstants.CENTER), cc.xy(1, 1)); pb.add(UIHelper.createScrollPane(tableTop), cc.xy(1, 3)); pb.add(UIHelper.createLabel("Specify 5 Collections to be Created", SwingConstants.CENTER), cc.xy(1, 5)); pb.add(UIHelper.createScrollPane(tableBot), cc.xy(1, 7)); pb.setDefaultDialogBorder(); CustomDialog dlg = new CustomDialog(null, "Taxononic Types", true, pb.getPanel()); dlg.createUI(); dlg.setSize(1024, 500); UIHelper.centerWindow(dlg); dlg.setAlwaysOnTop(true); dlg.setVisible(true); if (dlg.isCancelled()) { return CollectionResultType.eCancel; } Pair<CollectionInfo, DisciplineType> pair = CollectionInfo.getDisciplineType(oldDBConn); if (pair == null || pair.second == null) { CollectionInfo colInfo = pair.first; disciplineType = getStandardDisciplineName(colInfo.getTaxonomyTypeName(), colInfo.getColObjTypeName(), colInfo.getCatSeriesName()); } else { disciplineType = pair.second; } return disciplineType != null ? CollectionResultType.eOK : CollectionResultType.eError; } return CollectionResultType.eError; }
From source file:com.sonicle.webtop.calendar.CalendarManager.java
public void syncRemoteCalendar(int calendarId, boolean full) throws WTException { final UserProfile.Data udata = WT.getUserData(getTargetProfileId()); final ICalendarInput icalInput = new ICalendarInput(udata.getTimeZone()); final String PENDING_KEY = String.valueOf(calendarId); CalendarDAO calDao = CalendarDAO.getInstance(); Connection con = null;/*from w w w . jav a2s. c o m*/ if (pendingRemoteCalendarSyncs.putIfAbsent(PENDING_KEY, RunContext.getRunProfileId()) != null) { throw new ConcurrentSyncException("Sync activity is already running [{}, {}]", calendarId, RunContext.getRunProfileId()); } try { //checkRightsOnCalendarFolder(calendarId, "READ"); con = WT.getConnection(SERVICE_ID, false); Calendar cal = ManagerUtils.createCalendar(calDao.selectById(con, calendarId)); if (cal == null) throw new WTException("Calendar not found [{0}]", calendarId); if (!Calendar.Provider.WEBCAL.equals(cal.getProvider()) && !Calendar.Provider.CALDAV.equals(cal.getProvider())) { throw new WTException("Specified calendar is not remote (webcal or CalDAV) [{0}]", calendarId); } // Force a full update if last-sync date is null if (cal.getRemoteSyncTimestamp() == null) full = true; CalendarRemoteParameters params = LangUtils.deserialize(cal.getParameters(), CalendarRemoteParameters.class); if (params == null) throw new WTException("Unable to deserialize remote parameters"); if (params.url == null) throw new WTException("Remote URL is undefined"); if (Calendar.Provider.WEBCAL.equals(cal.getProvider())) { final String PREFIX = "webcal-"; File tempFile = null; URIBuilder builder = new URIBuilder(params.url); if (StringUtils.equalsIgnoreCase(builder.getScheme(), "webcal")) { builder.setScheme("http"); // Force http scheme } if (!StringUtils.isBlank(params.username) && !StringUtils.isBlank(params.username)) { builder.setUserInfo(params.username, params.password); } URI newUrl = URIUtils.buildQuietly(builder); try { final DateTime newLastSync = DateTimeUtils.now(); tempFile = WT.createTempFile(PREFIX, null); // Retrieve webcal content (iCalendar) from the specified URL // and save it locally logger.debug("Downloading iCalendar file from URL [{}]", newUrl); HttpClient httpCli = null; FileOutputStream os = null; try { httpCli = HttpClientUtils.createBasicHttpClient(HttpClientUtils.configureSSLAcceptAll(), newUrl); os = new FileOutputStream(tempFile); HttpClientUtils.writeContent(httpCli, newUrl, os); } catch (IOException ex) { throw new WTException(ex, "Unable to retrieve webcal [{0}]", newUrl); } finally { IOUtils.closeQuietly(os); HttpClientUtils.closeQuietly(httpCli); } logger.debug("Saved to temp file [{}]", tempFile.getName()); // Parse downloaded iCalendar logger.debug("Parsing downloaded iCalendar file"); net.fortuna.ical4j.model.Calendar ical = null; FileInputStream is = null; try { is = new FileInputStream(tempFile); ICalendarUtils.relaxParsingAndCompatibility(); ical = ICalendarUtils.parse(is); //TODO: add support to FILENAME property (Google https://github.com/ical4j/ical4j/issues/69) } catch (IOException | ParserException ex) { throw new WTException(ex, "Unable to read webcal"); } finally { IOUtils.closeQuietly(os); } icalInput.withIncludeVEventSourceInOutput(true); ArrayList<EventInput> input = icalInput.fromICalendarFile(ical, null); logger.debug("Found {} events", input.size()); Map<String, VEventHrefSync> syncByHref = null; if (full) { logger.debug("Cleaning up calendar [{}]", calendarId); doEventsDeleteByCalendar(con, calendarId, false); } else { EventDAO evtDao = EventDAO.getInstance(); syncByHref = evtDao.viewHrefSyncDataByCalendar(con, calendarId); } // Inserts/Updates data... logger.debug("Inserting/Updating events..."); try { String autoUidPrefix = DigestUtils.md5Hex(newUrl.toString()); // auto-gen base prefix in case of missing UID HashSet<String> hrefs = new HashSet<>(); HashMap<String, OEvent> cache = new HashMap<>(); int i = 0; for (EventInput ei : input) { if (StringUtils.isBlank(ei.event.getPublicUid())) { String autoUid = autoUidPrefix + "-" + i; ei.event.setPublicUid(autoUid); logger.trace("Missing UID: using auto-gen value. [{}]", autoUid); } String href = ManagerUtils.buildHref(ei.event.getPublicUid()); //if (logger.isTraceEnabled()) logger.trace("{}", ICalendarUtils.print(ICalendarUtils.getVEvent(devt.getCalendar()))); if (hrefs.contains(href)) { logger.trace("Event duplicated. Skipped! [{}]", href); continue; } boolean skip = false; Integer matchingEventId = null; String eiHash = DigestUtils.md5Hex(ei.sourceEvent.toString()); if (syncByHref != null) { // Only if... (!full) see above! VEventHrefSync hrefSync = syncByHref.remove(href); if (hrefSync != null) { // Href found -> maybe updated item if (!StringUtils.equals(hrefSync.getEtag(), eiHash)) { matchingEventId = hrefSync.getEventId(); logger.trace("Event updated [{}, {}]", href, eiHash); } else { skip = true; logger.trace("Event not modified [{}, {}]", href, eiHash); } } else { // Href not found -> added item logger.trace("Event newly added [{}, {}]", href, eiHash); } } if (!skip) { ei.event.setCalendarId(calendarId); ei.event.setHref(href); ei.event.setEtag(eiHash); if (matchingEventId != null) { ei.event.setEventId(matchingEventId); boolean updated = doEventInputUpdate(con, cache, ei); if (!updated) throw new WTException("Event not found [{}]", ei.event.getEventId()); } else { doEventInputInsert(con, cache, ei); } } hrefs.add(href); // Marks as processed! } if (syncByHref != null) { // Only if... (!full) see above! // Remaining hrefs -> deleted items for (VEventHrefSync hrefSync : syncByHref.values()) { logger.trace("Event deleted [{}]", hrefSync.getHref()); doEventDelete(con, hrefSync.getEventId(), false); } } cache.clear(); calDao.updateRemoteSyncById(con, calendarId, newLastSync, null); DbUtils.commitQuietly(con); } catch (Exception ex) { DbUtils.rollbackQuietly(con); throw new WTException(ex, "Error importing iCalendar"); } } finally { if (tempFile != null) { logger.debug("Removing temp file [{}]", tempFile.getName()); WT.deleteTempFile(tempFile); } } } else if (Calendar.Provider.CALDAV.equals(cal.getProvider())) { CalDav dav = getCalDav(params.username, params.password); try { DavCalendar dcal = dav.getCalendarSyncToken(params.url.toString()); if (dcal == null) throw new WTException("DAV calendar not found"); final boolean syncIsSupported = !StringUtils.isBlank(dcal.getSyncToken()); final DateTime newLastSync = DateTimeUtils.now(); if (!full && (syncIsSupported && !StringUtils.isBlank(cal.getRemoteSyncTag()))) { // Partial update using SYNC mode String newSyncToken = dcal.getSyncToken(); logger.debug("Querying CalDAV endpoint for changes [{}, {}]", params.url.toString(), cal.getRemoteSyncTag()); List<DavSyncStatus> changes = dav.getCalendarChanges(params.url.toString(), cal.getRemoteSyncTag()); logger.debug("Returned {} items", changes.size()); try { if (!changes.isEmpty()) { EventDAO evtDao = EventDAO.getInstance(); Map<String, List<Integer>> eventIdsByHref = evtDao.selectHrefsByByCalendar(con, calendarId); // Process changes... logger.debug("Processing changes..."); HashSet<String> hrefs = new HashSet<>(); for (DavSyncStatus change : changes) { String href = FilenameUtils.getName(change.getPath()); //String href = change.getPath(); if (DavUtil.HTTP_SC_TEXT_OK.equals(change.getResponseStatus())) { hrefs.add(href); } else { // Event deleted List<Integer> eventIds = eventIdsByHref.get(href); Integer eventId = (eventIds != null) ? eventIds.get(eventIds.size() - 1) : null; if (eventId == null) { logger.warn("Deletion not possible. Event path not found [{}]", PathUtils.concatPaths(dcal.getPath(), FilenameUtils.getName(href))); continue; } doEventDelete(con, eventId, false); } } // Retrieves events list from DAV endpoint (using multiget) logger.debug("Retrieving inserted/updated events [{}]", hrefs.size()); Collection<String> paths = hrefs.stream().map( href -> PathUtils.concatPaths(dcal.getPath(), FilenameUtils.getName(href))) .collect(Collectors.toList()); List<DavCalendarEvent> devts = dav.listCalendarEvents(params.url.toString(), paths); //List<DavCalendarEvent> devts = dav.listCalendarEvents(params.url.toString(), hrefs); // Inserts/Updates data... logger.debug("Inserting/Updating events..."); HashMap<String, OEvent> cache = new HashMap<>(); for (DavCalendarEvent devt : devts) { String href = FilenameUtils.getName(devt.getPath()); //String href = devt.getPath(); if (logger.isTraceEnabled()) logger.trace("{}", ICalendarUtils.print(ICalendarUtils.getVEvent(devt.getCalendar()))); List<Integer> eventIds = eventIdsByHref.get(href); Integer eventId = (eventIds != null) ? eventIds.get(eventIds.size() - 1) : null; final ArrayList<EventInput> input = icalInput .fromICalendarFile(devt.getCalendar(), null); if (input.size() != 1) throw new WTException("iCal must contain one event"); final EventInput ei = input.get(0); if (eventId != null) { doEventDelete(con, eventId, false); } ei.event.setCalendarId(calendarId); ei.event.setHref(href); ei.event.setEtag(devt.geteTag()); doEventInputInsert(con, cache, ei); } } calDao.updateRemoteSyncById(con, calendarId, newLastSync, newSyncToken); DbUtils.commitQuietly(con); } catch (Exception ex) { DbUtils.rollbackQuietly(con); throw new WTException(ex, "Error importing iCalendar"); } } else { // Full update or partial computing hashes String newSyncToken = null; if (syncIsSupported) { // If supported, saves last sync-token issued by the server newSyncToken = dcal.getSyncToken(); } // Retrieves cards from DAV endpoint logger.debug("Querying CalDAV endpoint [{}]", params.url.toString()); List<DavCalendarEvent> devts = dav.listCalendarEvents(params.url.toString()); logger.debug("Returned {} items", devts.size()); // Handles data... try { Map<String, VEventHrefSync> syncByHref = null; if (full) { logger.debug("Cleaning up calendar [{}]", calendarId); doEventsDeleteByCalendar(con, calendarId, false); } else if (!full && !syncIsSupported) { // This hash-map is only needed when syncing using hashes EventDAO evtDao = EventDAO.getInstance(); syncByHref = evtDao.viewHrefSyncDataByCalendar(con, calendarId); } logger.debug("Processing results..."); // Define a simple map in order to check duplicates. // eg. SOGo passes same card twice :( HashSet<String> hrefs = new HashSet<>(); HashMap<String, OEvent> cache = new HashMap<>(); for (DavCalendarEvent devt : devts) { String href = PathUtils.getFileName(devt.getPath()); //String href = devt.getPath(); String etag = devt.geteTag(); if (logger.isTraceEnabled()) logger.trace("{}", ICalendarUtils.print(ICalendarUtils.getVEvent(devt.getCalendar()))); if (hrefs.contains(href)) { logger.trace("Card duplicated. Skipped! [{}]", href); continue; } boolean skip = false; Integer matchingEventId = null; if (syncByHref != null) { // Only if... (!full && !syncIsSupported) see above! //String prodId = ICalendarUtils.buildProdId(ManagerUtils.getProductName()); //String hash = DigestUtils.md5Hex(new ICalendarOutput(prodId, true).write(devt.getCalendar())); String hash = DigestUtils .md5Hex(ICalendarUtils.getVEvent(devt.getCalendar()).toString()); VEventHrefSync hrefSync = syncByHref.remove(href); if (hrefSync != null) { // Href found -> maybe updated item if (!StringUtils.equals(hrefSync.getEtag(), hash)) { matchingEventId = hrefSync.getEventId(); etag = hash; logger.trace("Event updated [{}, {}]", href, hash); } else { skip = true; logger.trace("Event not modified [{}, {}]", href, hash); } } else { // Href not found -> added item logger.trace("Event newly added [{}]", href); etag = hash; } } if (!skip) { final ArrayList<EventInput> input = icalInput .fromICalendarFile(devt.getCalendar(), null); if (input.size() != 1) throw new WTException("iCal must contain one event"); final EventInput ei = input.get(0); ei.event.setCalendarId(calendarId); ei.event.setHref(href); ei.event.setEtag(etag); if (matchingEventId == null) { doEventInputInsert(con, cache, ei); } else { ei.event.setEventId(matchingEventId); boolean updated = doEventInputUpdate(con, cache, ei); if (!updated) throw new WTException("Event not found [{}]", ei.event.getEventId()); } } hrefs.add(href); // Marks as processed! } if (syncByHref != null) { // Only if... (!full && !syncIsSupported) see above! // Remaining hrefs -> deleted items for (VEventHrefSync hrefSync : syncByHref.values()) { logger.trace("Event deleted [{}]", hrefSync.getHref()); doEventDelete(con, hrefSync.getEventId(), false); } } calDao.updateRemoteSyncById(con, calendarId, newLastSync, newSyncToken); DbUtils.commitQuietly(con); } catch (Exception ex) { DbUtils.rollbackQuietly(con); throw new WTException(ex, "Error importing iCalendar"); } } } catch (DavException ex) { throw new WTException(ex, "CalDAV error"); } } } catch (SQLException | DAOException ex) { throw wrapException(ex); } finally { DbUtils.closeQuietly(con); pendingRemoteCalendarSyncs.remove(PENDING_KEY); } }
From source file:it.cnr.icar.eric.client.ui.thin.RegistryObjectCollectionBean.java
/** * Delete selected composed Objects. Returns Failure/Success string for display. * * @param none/*from www . j a v a 2s .c o m*/ * @return String */ public String doDeleteOnCurrentComposedROB() { String status = "failure"; int totalCount = 0; int successCount = 0; if (!isUserAllowedToPublish()) { status = RegistryBrowser.getInstance().getAuthenticationStatus(); } else { getCurrentRegistryObjectBean().setFormUpdateIgnored(false); HashSet<RegistryObjectBean> robsToDelete = new HashSet<RegistryObjectBean>(); if (relatedSearchResults != null && relatedSearchResults.size() > 0) { Iterator<RegistryObjectBean> itr = relatedSearchResults.iterator(); RegistryObject ro = currentRegistryObject.getRegistryObject(); while (itr.hasNext()) { RegistryObjectBean rob = itr.next(); if (rob.isRelatedSelected()) { totalCount++; Method m = null; String method = null; Class<?>[] args = new Class[1]; Class<?> clazz = null; Object[] composedObject = null; String parentClassName = null; String parentType = null; String composedClassName = null; String composedType = null; try { // Use reflection to remove the composed object //Class clazz = javax.xml.registry.infomodel.RegistryObject.class; parentType = currentRegistryObject.getObjectType(); if (parentType.equals("ClassificationNode")) { parentType = "Concept"; } parentClassName = "javax.xml.registry.infomodel." + parentType; clazz = this.getClass().getClassLoader().loadClass(parentClassName); // Get Composed object to remove Object nonRO = rob.getNonRegistryObject(); composedObject = new Object[1]; if (nonRO != null) { composedObject[0] = nonRO; } else { composedObject[0] = rob.getRegistryObject(); } // Get class of composed object composedType = rob.getObjectType(); if (composedType.equals("ClassificationNode")) { composedType = "Concept"; } composedClassName = "javax.xml.registry.infomodel." + composedType; args[0] = this.getClass().getClassLoader().loadClass(composedClassName); method = "remove" + composedType; if ((parentType.equalsIgnoreCase("ClassificationScheme") || parentType.equalsIgnoreCase("Concept")) && method.equalsIgnoreCase("removeConcept")) { method = "removeChildConcept"; } if (parentType.equalsIgnoreCase("Organization") && method.equalsIgnoreCase("removeOrganization")) { method = "removeChildOrganization"; } boolean canDelete = true; if (parentType.equalsIgnoreCase("Organization") && method.equalsIgnoreCase("removeUser")) { User primaryContact = ((OrganizationImpl) currentRegistryObject.getRegistryObject()) .getPrimaryContact(); if ((primaryContact.getKey().getId()) .equals(rob.getRegistryObject().getKey().getId())) { canDelete = false; append(WebUIResourceBundle.getInstance() .getString("message.cannotDeletePrimaryContact")); } } if (composedType.equalsIgnoreCase("Association") && method.equalsIgnoreCase("removeAssociation")) { if (nonRO instanceof Association) { if (!rob.getRegistryObject().getKey().getId() .equals(((Association) nonRO).getSourceObject().getKey().getId())) { canDelete = false; append(WebUIResourceBundle.getInstance() .getString("message.cannotDeleteTargetAssociation")); status = "targetAssociaton"; } } } if (canDelete) { m = clazz.getMethod(method, args); m.invoke(ro, composedObject); if (composedType.equalsIgnoreCase("User")) { currentRegistryObject.removeRelatedObject(composedObject[0]); } else { prepareToDelete(rob, robsToDelete, composedObject); } handleSavesToDrilldownObject(composedObject[0]); itr.remove(); successCount++; status = "publishSuccessful"; } } catch (Throwable t) { try { // Try JAXR provider classes parentClassName = "it.cnr.icar.eric.client.xml.registry.infomodel." + parentType + "Impl"; clazz = this.getClass().getClassLoader().loadClass(parentClassName); Object nonRO = rob.getNonRegistryObject(); composedObject = new Object[1]; if (nonRO != null) { composedObject[0] = nonRO; } else { composedObject[0] = rob.getRegistryObject(); } // Get class of composed object composedType = rob.getObjectType(); composedClassName = "javax.xml.registry.infomodel." + composedType; args[0] = this.getClass().getClassLoader().loadClass(composedClassName); method = "remove" + composedType; boolean canDelete = true; if (composedType.equalsIgnoreCase("Association") && method.equalsIgnoreCase("removeAssociation")) { if (nonRO instanceof Association) { if (!rob.getRegistryObject().getKey().getId() .equals(((Association) nonRO).getSourceObject().getKey().getId())) { canDelete = false; append(WebUIResourceBundle.getInstance() .getString("message.cannotDeleteTargetAssociation")); status = "targetAssociaton"; } } } if (canDelete) { m = clazz.getMethod(method, args); m.invoke(ro, composedObject); prepareToDelete(rob, robsToDelete, composedObject); handleSavesToDrilldownObject(composedObject[0]); itr.remove(); successCount++; status = "publishSuccessful"; } } catch (Throwable t3) { try { parentClassName = "javax.xml.registry.infomodel.ExtensibleObject"; clazz = this.getClass().getClassLoader().loadClass(parentClassName); args[0] = String.class; m = clazz.getMethod(method, args); Object objArgs[] = new Object[1]; objArgs[0] = ((Slot) composedObject[0]).getName(); m.invoke(ro, objArgs); prepareToDelete(rob, robsToDelete, composedObject); handleSavesToDrilldownObject(composedObject[0]); itr.remove(); successCount++; status = "publishSuccessful"; } catch (Throwable t2) { log.error(WebUIResourceBundle.getInstance() .getString("message.CouldNotDeleteComposedObject"), t2); } } } } } } storePseduoComposedObjects(robsToDelete); if (robsToDelete.size() > 0) { relatedSearchResults.removeAll(robsToDelete); } } //No Objects were selected for deletion if (totalCount == 0) { status = "showSearchPanel"; append(WebUIResourceBundle.getInstance().getString("message.noObjectsDeleted")); } else if (totalCount != 0 && status.equals("targetAssociaton")) { status = "showSearchPanel"; } else { //All objects successfully deleted, or partial failure if (status.equals("publishSuccessful")) { append(WebUIResourceBundle.getInstance().getString("message.objectsSuccessfulyDeleted", new Object[] { new Integer(successCount) })); if (successCount < totalCount) { append(WebUIResourceBundle.getInstance().getString("message.someObjectsFailedDeletion", new Object[] { new Integer(totalCount - successCount) })); } } //All objects failed deletion else { append(WebUIResourceBundle.getInstance().getString("message.someObjectsFailedDeletion", new Object[] { new Integer(totalCount - successCount) })); } } return status; }
From source file:com.mindcognition.mindraider.tools.Checker.java
public static void checkAndFixRepository() { logger.debug("Repository integrity check..."); // {{debug}} // fields//w ww.j av a 2s. c o m HashSet<ResourceDescriptor> allOutlines = new HashSet<ResourceDescriptor>(); OutlineCustodian outlineCustodian = MindRaider.outlineCustodian; int fixes = 0; int totalConcepts = 0; // labels (folders) RDF Model labelsRdfModel = MindRaider.labelCustodian.getRdfModel(); // folders.rdf.xml ResourceDescriptor[] labelDescriptors = MindRaider.labelCustodian.getLabelDescriptors(); if (!ArrayUtils.isEmpty(labelDescriptors)) { for (ResourceDescriptor labelDescriptor : labelDescriptors) { String labelUri = labelDescriptor.getUri(); // check whether [label]/folder.xml exists (eventually re-create it) StatusBar.show("Checking label XML resource: " + labelUri); Resource labelXmlResource = MindRaider.labelCustodian.get(labelUri); if (labelXmlResource == null) { try { StatusBar.show("Fixing label XML resource: " + labelUri); // create directory String labelDirectory = MindRaider.labelCustodian.createLabelDirectory(labelUri); // resource Resource resource = MindRaider.labelCustodian .createXmlResourceForLabel(labelDescriptor.getLabel(), labelUri); MindRaider.labelCustodian.addOutlinesGroupToLabelXmlResource(resource); resource.toXmlFile(MindRaider.labelCustodian.getLabelXmlResourceFileName(labelDirectory)); // label resource doesn't exist => must be re-created from RDF ResourceDescriptor[] outlineDescriptors = MindRaider.labelCustodian .getOutlineDescriptors(labelUri); if (outlineDescriptors != null && outlineDescriptors.length > 0) { for (int i = 0; i < outlineDescriptors.length; i++) { MindRaider.labelCustodian.addOutlineToLabelXmlResourceAndSave(labelUri, outlineDescriptors[i].getUri()); System.out.println("Fixing label XML resource: " + labelUri + " -> " + outlineDescriptors[i].getUri()); ++fixes; } } } catch (Exception ee) { logger.debug("Unable to fix label: " + labelDescriptor.getUri(), ee); // {{debug}} } } // folder.rdf.xml ResourceDescriptor[] outlineDescriptors = MindRaider.labelCustodian.getOutlineDescriptors(labelUri); if (outlineDescriptors != null) { for (ResourceDescriptor outlineDescriptor : outlineDescriptors) { if (!allOutlines.contains(outlineDescriptor)) { allOutlines.add(outlineDescriptor); StatusBar.show("Checking outline: " + outlineDescriptor.getLabel() + " (" + outlineDescriptor.getUri() + ")"); logger.debug(" Outline: '" + outlineDescriptor.getLabel() + "', " + outlineDescriptor.getUri()); // {{debug}} Model outlineRdfModel; OutlineResource outlineResource; Resource outlineXmlResource; String outlineModelFilename; String outlineResourceFilename; try { // outline's RDF (notebook.rdf.xml) outlineModelFilename = outlineCustodian.getModelFilenameByDirectory( outlineCustodian.getOutlineDirectory(outlineDescriptor.getUri())); logger.debug(" RDF: " + outlineModelFilename); outlineRdfModel = RdfModel.loadModel(outlineModelFilename, false); // detect whether it is active outline if (MindRaider.outlineCustodian.getActiveOutlineResource() != null && MindRaider.outlineCustodian.getActiveOutlineResource().getUri() .equals(outlineDescriptor.getUri())) { //JOptionPane.showConfirmDialog(MindRaider.mainJFrame, "Fixing active outline: "+outlineDescriptor.getUri()); outlineRdfModel = MindRaider.spidersGraph.getRdfModel().getModel(); } if (outlineRdfModel == null) { // RDF model doesn't exist - such outline can not be restored, just delete it final String fixMessage = "Fix: removing broken outline '" + outlineDescriptor.getLabel() + "'"; StatusBar.show(fixMessage); System.out.println(fixMessage); // check that outline is NOT BROKEN - otherwise standard functions will not // be able to discard and delete it com.hp.hpl.jena.rdf.model.Resource orphan = labelsRdfModel .getResource(outlineDescriptor.getUri()); if (orphan != null) { logger.debug(" Orphan outline found: " + outlineDescriptor.getUri()); // {{debug}} if (RdfModel.getLabel(labelsRdfModel, orphan) == null) { logger.debug(" ... having no label"); // {{debug}} if (RdfModel.getHref(labelsRdfModel, orphan) == null) { logger.debug(" ... having no href"); // {{debug}} // if it has no HREF, then fix it -> standard functions will delete that String relativePath = MindRaider.profile .getRelativePath(outlineCustodian .getOutlineDirectory(outlineDescriptor.getUri())); RdfModel.setHref(orphan, relativePath + OutlineCustodian.FILENAME_XML_RESOURCE); } } } MindRaider.labelCustodian.discardOutline(outlineDescriptor.getUri()); MindRaider.labelCustodian.deleteOutline(outlineDescriptor.getUri()); ++fixes; continue; } // outline's XML (notebook.xml) outlineResourceFilename = outlineCustodian.getResourceFilenameByDirectory( outlineCustodian.getOutlineDirectory(outlineDescriptor.getUri())); logger.debug(" XML: " + outlineResourceFilename); outlineXmlResource = new Resource(outlineResourceFilename); outlineResource = new OutlineResource(outlineXmlResource); } catch (Exception e) { logger.debug("Unable to load outline" + outlineDescriptor.getUri(), e); // TODO fix it continue; } //logger.debug(" Loaded: "+outlineRdfModel+" / "+outlineXmlResource); // {{debug}} // FIX outline label: on rename changed only in folder's RDF, not xml name (and notebook's XML) // FIX rename: notebook name is changed on rename ONLY in the labels (folders) RDF model, // in here it is propagated to notebook's XML and (notebook.xml) and RDF (notebook.rdf.xml) String outlineLabel = MindRaider.labelCustodian .getOutlineDescriptor(outlineDescriptor.getUri()).getLabel(); String outlineComment = "'" + outlineLabel + "' outline."; if (outlineLabel != null && outlineLabel.length() > 0) { if (!outlineLabel.equals(outlineResource.getLabel())) { fixes++; StatusBar.show("Fixing title and description: " + outlineDescriptor.getUri()); System.out.println(" Fix: inconsistent outline's title & description (" + outlineDescriptor.getUri() + ")"); // {{debug}} logger.debug(" Label's RDF : " + outlineLabel); // {{debug}} logger.debug(" Outline's XML: " + outlineResource.getLabel()); // {{debug}} if (outlineResource.getLabelProperty() != null) { outlineResource.getLabelProperty().setLabelContent(outlineLabel); } if (outlineResource.getAnnotationProperty() != null) { outlineResource.getAnnotationProperty().setAnnotation(outlineComment); } try { outlineResource.save(); } catch (Exception e) { logger.debug("Unable to save outline XML resource", e); // {{debug}} } } } com.hp.hpl.jena.rdf.model.Resource rdfResource = outlineRdfModel .getResource(outlineDescriptor.getUri()); if (rdfResource != null) { rdfResource.removeAll(RDFS.label); rdfResource.addProperty(RDFS.label, outlineLabel); rdfResource.removeAll(RDFS.comment); rdfResource.addProperty(RDFS.comment, outlineComment); RdfModel.saveModel(outlineRdfModel, outlineModelFilename); } // iterate outline's concepts final SimpleSelector simpleSelector = new SimpleSelector(null, RDF.type, outlineRdfModel.createResource(MindRaiderConstants.MR_OWL_CLASS_CONCEPT)); StmtIterator conceptsIterator = outlineRdfModel.listStatements(simpleSelector); while (conceptsIterator.hasNext()) { ++totalConcepts; Statement statement = (Statement) conceptsIterator.next(); final com.hp.hpl.jena.rdf.model.Resource conceptRdfResource = statement .getSubject(); //logger.debug(" Concept: " +totalConcepts+" "+conceptRdfResource.getURI()); // TODO check whether the concept is in notebook.xml // load note resource [concept name].xml try { ConceptResource noteResource = MindRaider.noteCustodian .get(outlineDescriptor.getUri(), conceptRdfResource.getURI()); // TODO check and fix note's attachments: if attachment is in the resource and not in RDF, add it to RDF logger.debug("Attachments:"); AttachmentProperty[] attachments = noteResource.getAttachments(); if (attachments != null && attachments.length > 0) { for (AttachmentProperty attachmentProperty : attachments) { logger.debug(" " + attachmentProperty.getUrl()); StmtIterator listStatements = outlineRdfModel .listStatements(conceptRdfResource, outlineRdfModel.getProperty( MindRaiderConstants.MR_RDF_NS, "attachment"), attachmentProperty.getUrl()); if (!listStatements.hasNext()) { //JOptionPane.showConfirmDialog(MindRaider.mainJFrame, "Missing attach in RDF: "+attachmentProperty.getUrl()); conceptRdfResource.addProperty( outlineRdfModel.getProperty( MindRaiderConstants.MR_RDF_NS + "attachment"), attachmentProperty.getUrl()); RdfModel.saveModel(outlineRdfModel, outlineModelFilename); ++fixes; } } } } catch (Exception e) { // there is a problem (file doesn't exist, it is empty file, ...) // fix: build *.xml resource from RDF and write it back // rdf contains: label/timestamp/comment/?attachments ignored for now String label = RdfModel.getLabel(outlineRdfModel, conceptRdfResource); String comment = RdfModel.getComment(outlineRdfModel, conceptRdfResource); long timestamp = RdfModel.getTimestamp(outlineRdfModel, conceptRdfResource); try { ConceptResource conceptResource = new ConceptResource( new Resource(MindRaider.profile.getProfileName(), timestamp, 1, System.currentTimeMillis(), conceptRdfResource.getURI())); conceptResource.resource.getMetadata() .setMindRaiderVersion(MindRaider.getVersion()); conceptResource.resource.getMetadata() .setType(MindRaiderConstants.MR_OWL_CLASS_CONCEPT); conceptResource.resource.getData().addProperty(new LabelProperty(label)); conceptResource.resource.getData() .addProperty(new AnnotationProperty(comment)); conceptResource.resource.getData() .addProperty(new AnnotationContentTypeProperty( MindRaiderConstants.MR_OWL_CONTENT_TYPE_PLAIN_TEXT)); conceptResource.resource.getData().addProperty( new NotebookProperty(new URI(outlineDescriptor.getUri()))); conceptResource.resource .toXmlFile(MindRaider.noteCustodian.getConceptResourceFilename( outlineDescriptor.getUri(), conceptRdfResource.getURI())); } catch (Exception exception) { logger.error("Unable to ressurect concept from RDF - deleting " + conceptRdfResource.getURI(), e); // TODO purge concept from the filesystem (a robust implementation that expects // that [concept].xml is not there/is locked // TODO do purge } } } // TODO FIX: remove concepts from notebook.xml.rdf, that do not exist in notebook.xml OR rather create notebook.xml from what's in RDF // TODO FIX: concepts in RDF vs. notebook.xml vs. on the filesystem // TODO run discarded :-) MindRaider.outlineCustodian.getDiscardedConceptDescriptors(outlineDescriptor.getUri()); } } } } } // TODO rebuild search index (after low level changes, FTS index must be updated) // clean memory Runtime.getRuntime().runFinalization(); Runtime.getRuntime().gc(); logger.debug("Total outlines: " + allOutlines.size()); // {{debug}} logger.debug("Fixed problems: " + fixes); // {{debug}} StatusBar.show("Check & fix results: total outlines " + allOutlines.size() + ", total concepts " + totalConcepts + ", fixed problems " + fixes); }
From source file:com.industrieit.ohr.OHRJavassister.java
public static Class ohr(Class cll) { try {//from w w w .j av a 2s. c o m //System.out.println("++++++++++ "+cll.getName()); /*if (cll.getName().startsWith("ohr.")) { throw new RuntimeException(cll.getName()); }*/ if (processed2.containsKey(cll)) { return processed2.get(cll); } HashSet<Long> handleOffsets = new HashSet<Long>(); String cnam = cll.getName(); if (!cnam.startsWith("ohr.")) { cnam = "ohr." + cll.getName(); //cnam=cnam.substring(4); } Class cl = Class.forName(cnam.substring(4)); int clnumber = incrementClsCounter(); List<Integer> owned = new ArrayList<Integer>(); //remove the old implementation if its around from another process String fname = "target/classes/" + cnam.replace(".", "/") + ".class"; System.out.println("deleted" + fname + " " + (new File(fname).delete())); if (!Modifier.isAbstract(cl.getModifiers())) { throw new RuntimeException("not an abstract class " + cl.getName()); } System.out.println("processing ohr " + cnam); CtClass bc = getDefault().getCtClass(cl.getName()); CtClass cc = getDefault().makeClass(cnam, bc); StringBuilder initBuilder = new StringBuilder(); initBuilder.append("public void internalInit() {\n"); StringBuilder constructBuilder = new StringBuilder(); constructBuilder.append("{"); String intname = OHRBase.class.getName(); System.out.println("intername is " + intname); CtClass ci = getDefault().getCtClass(intname); CtClass extern = getDefault().getCtClass(Externalizable.class.getName()); //cc.addInterface(ci); cc.setInterfaces(new CtClass[] { ci, extern }); cc.setSuperclass(bc); //add base implmenetation methods and properties setBaseMixinsPre(cc, false); //first long for id and other stuff long offset = 8; BeanInfo bi = Introspector.getBeanInfo(cl); PropertyDescriptor[] pds = bi.getPropertyDescriptors(); for (int co = 0; co < propertyOrdering.length; co++) { Class cprop = propertyOrdering[co]; for (int i = 0; i < pds.length; i++) { // Get property name String propName = pds[i].getName(); if (propName.equals("class")) { continue; } String typ = pds[i].getPropertyType().getName(); Class type = pds[i].getPropertyType(); //if (propName.startsWith("fath")) //PL.pl("[[[[["+type+" "+propName+" "+cprop); if (cprop == Object.class) { //handle refs only if (type.isPrimitive()) { continue; } if (type == String.class) { continue; } if (type == CharSequence.class) { continue; } if (type == OHRLongArray.class) { continue; } if (type == OHRIntArray.class) { continue; } if (type == OHRShortArray.class) { continue; } if (type == OHRByteArray.class) { continue; } if (type == OHRBooleanArray.class) { continue; } if (type == OHRDoubleArray.class) { continue; } if (type == OHRFloatArray.class) { continue; } } else if (cprop != type) { //PL.pl("skipping "+type+" "+cprop); continue; } //PL.pl("[[[[[ " + type + " - " + propName + " - " + cprop); //System.out.println("--prop--" + propName); String rname = pds[i].getReadMethod().getName(); String wname = null; if (pds[i].getWriteMethod() != null) { wname = pds[i].getWriteMethod().getName(); } boolean reifread = isMethodReifAnnotated(pds[i].getReadMethod()); boolean reifwrite = isMethodReifAnnotated(pds[i].getWriteMethod()); String wcons = getConsistencyAsString(pds[i].getWriteMethod()); String rcons = getConsistencyAsString(pds[i].getReadMethod()); System.out.println("TYPE " + pds[i].getPropertyType().getName() + " " + pds[i].getPropertyType().getInterfaces()); if (pds[i].getPropertyType() == String.class && isInlineString(pds[i])) { //NOTE - only for inline strings - normal strings are handled as extrefs like any other object System.out.println("ITS An inline string!!!!"); int length = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).length(); boolean trim = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class) .trimOverflow(); boolean ascii = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).asciiOnly(); String wmeth = "public void " + wname + "(" + typ + " o) { ohwritestr" + wcons + "(" + offset + "l,o," + length + "," + trim + "," + ascii + "); }"; //add setter CtMethod wmethod = CtNewMethod.make(wmeth, cc); cc.addMethod(wmethod); System.out.println(wmeth); String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadstr" + rcons + "(" + offset + "l," + ascii + "); }"; //add setter CtMethod rmethod = CtNewMethod.make(rmeth, cc); //rmethod.getMethodInfo().addAttribute(attr); cc.addMethod(rmethod); System.out.println(rmeth); int bytesperchar = ascii ? 1 : 2; //pad to 16 bits int ll = 4 + length * bytesperchar; if (ll % 2 != 0) { ll++; } offset += ll; //lebgth marker as well as unicode 16 encoded characters } else if (pds[i].getPropertyType() == CharSequence.class && isInlineString(pds[i])) { //NOTE - only for inline strings - normal strings are handled as extrefs like any other object System.out.println("ITS An inline charsequence!!!!"); int length = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).length(); boolean trim = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class) .trimOverflow(); boolean ascii = pds[i].getWriteMethod().getAnnotation(InlineStringReify.class).asciiOnly(); String wmeth = "public void " + wname + "(" + typ + " o) { ohwritestr" + wcons + "(" + offset + "l,o," + length + "," + trim + "," + ascii + "); }"; //add setter CtMethod wmethod = CtNewMethod.make(wmeth, cc); cc.addMethod(wmethod); System.out.println(wmeth); String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadcs" + rcons + "(" + offset + "l," + ascii + "); }"; //add setter CtMethod rmethod = CtNewMethod.make(rmeth, cc); //rmethod.getMethodInfo().addAttribute(attr); cc.addMethod(rmethod); System.out.println(rmeth); int bytesperchar = ascii ? 1 : 2; //pad to 8 byte boundary! int ll = (int) Math.ceil((4.0 + length * bytesperchar) / 8) * 8; offset += ll; //lebgth marker as well as unicode 16 encoded characters } else if ((pds[i].getPropertyType() == OHRLongArray.class || pds[i].getPropertyType() == OHRIntArray.class || pds[i].getPropertyType() == OHRShortArray.class || pds[i].getPropertyType() == OHRByteArray.class || pds[i].getPropertyType() == OHRFloatArray.class || pds[i].getPropertyType() == OHRDoubleArray.class || pds[i].getPropertyType() == OHRBooleanArray.class) && pds[i].getReadMethod().isAnnotationPresent(InlineArrayReify.class)) { int bitsperitem = 0; String cldef = null; Class at = pds[i].getPropertyType(); boolean unchecked = pds[i].getReadMethod().isAnnotationPresent(UncheckedBoundsXXX.class); if (at == OHRLongArray.class) { bitsperitem = 8 * 8; cldef = LongInlineOHRArray.class.getName(); } else if (at == OHRIntArray.class) { bitsperitem = 4 * 8; //cldef=IntInlineOHRArrayCop.class.getName(); if (unchecked) { cldef = IntInlineOHRArrayUnchecked.class.getName(); } else { cldef = IntInlineOHRArray.class.getName(); } } if (at == OHRDoubleArray.class) { bitsperitem = 8 * 8; cldef = DoubleInlineOHRArray.class.getName(); } if (at == OHRFloatArray.class) { bitsperitem = 4 * 8; cldef = FloatInlineOHRArray.class.getName(); } if (at == OHRShortArray.class) { bitsperitem = 2 * 8; cldef = ShortInlineOHRArray.class.getName(); } if (at == OHRByteArray.class) { bitsperitem = 1 * 8; cldef = ByteInlineOHRArray.class.getName(); } if (at == OHRBooleanArray.class) { bitsperitem = 1; cldef = BooleanInlineOHRArray.class.getName(); } //NOTE - only for inline strings - normal strings are handled as extrefs like any other object System.out.println("ITS An inline array!!!!"); int length = pds[i].getReadMethod().getAnnotation(InlineArrayReify.class).length(); long bytealloc = OHRInlineArrayHandler.getGenericArrayAllocationSize(bitsperitem, length); //PL.pl("byte allocation for logn array length "+length+" "+bytealloc); CtClass ctc = getDefault().getCtClass(cldef); String varname = "var" + i; CtField cf = new CtField(ctc, varname, cc); cf.setModifiers(Modifier.PRIVATE); cc.addField(cf); //add data to constructor initBuilder.append( "com.industrieit.ohr.OHRInlineArrayHandler.initialiseInlineGenericArray(this.basePtr+" + offset + "l," + length + "l," + bitsperitem + ");\n"); constructBuilder.append(varname + "=new " + cldef + "(this," + offset + "l);\n"); //+ "//this.basePtr"+offset+"l);"); //String wmeth = "public void " + wname + "(" + typ + " o) { throw new java.lang.RuntimeException(\"not supported\"); }"; //add setter //CtMethod wmethod = CtNewMethod.make(wmeth, cc); //cc.addMethod(wmethod); //System.out.println(wmeth); String rmeth = "public " + typ + " " + rname + "() { return " + varname + "; }"; //add setter CtMethod rmethod = CtNewMethod.make(rmeth, cc); //rmethod.getMethodInfo().addAttribute(attr); cc.addMethod(rmethod); System.out.println("||||||||" + rmeth + "|||||||||"); offset += bytealloc; } else if (pds[i].getPropertyType().isPrimitive()) { //PL.pl("ITS A PRIMITIVE!"); int vv = 0; if (cprop == long.class) { vv = 8; } if (cprop == double.class) { vv = 8; } if (cprop == int.class) { vv = 4; } if (cprop == float.class) { vv = 4; } if (cprop == short.class) { vv = 2; } if (cprop == byte.class) { vv = 1; } System.out.println( "for " + pds[i].getName() + " typ is " + pds[i].getPropertyType().getName()); String wmeth = "public void " + wname + "(" + typ + " o) { ohwrite" + wcons + "(" + offset + "l,o); }"; //add setter //ConstPool constpool = cc.getClassFile().getConstPool(); if (reifwrite) { CtMethod wmethod = CtNewMethod.make(wmeth, cc); cc.addMethod(wmethod); System.out.println("&&&&&&&" + wmeth); } String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohread" + typ + rcons + "(" + offset + "l); }"; //add setter //rmethod.getMethodInfo().addAttribute(attr); if (reifread) { CtMethod rmethod = CtNewMethod.make(rmeth, cc); cc.addMethod(rmethod); System.out.println("&&&&&&&&&&&&&&&&&&&&&&&&&&&" + rmeth + vv); } offset += vv; } else { System.out.println("ITS AN ASSUMED REIFY!!!"); if (pds[i].getWriteMethod().isAnnotationPresent(Owned.class)) { owned.add(i); } //CtClass tc = getDefault().getCtClass(pds[i].getPropertyType().getName()); CtClass tc = getDefault().getCtClass(OHRBase.class.getName()); //String fnam="ohrt"+i; //CtField f = new CtField(tc, fnam, cc); //f.setModifiers(Modifier.PROTECTED); //cc.addField(f); //store by reify //handleOffsets.add(offset); String wmeth = "public void " + wname + "(" + typ + " o) { ohwritere" + wcons + "(" + offset + "l,o); }"; //add setter CtMethod wmethod = CtNewMethod.make(wmeth, cc); if (reifwrite) { cc.addMethod(wmethod); } System.out.println(wmeth); //String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadre(" + offset + "l," + typ + ".class); }"; String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadre" + rcons + "(" + offset + "l); };"; //add setter CtMethod rmethod = CtNewMethod.make(rmeth, cc); //rmethod.getMethodInfo().addAttribute(attr); if (reifread) { cc.addMethod(rmethod); } System.out.println(rmeth); handleOffsets.add(offset); offset += 8; } /* if (!isReif(type)) { PL.pl(""+pds[i].getName()+" is a non reified handle!!!!"); //store by handle handleOffsets.add(offset); String wmeth = "public void " + wname + "(" + typ + " o) { ohwritehand(" + offset + "l,o); }"; //add setter CtMethod wmethod = CtNewMethod.make(wmeth, cc); if (reifwrite) { cc.addMethod(wmethod); } System.out.println(wmeth); String rmeth = "public " + typ + " " + rname + "() { return (" + typ + ") ohreadhand(" + offset + "l); }"; //add setter CtMethod rmethod = CtNewMethod.make(rmeth, cc); //rmethod.getMethodInfo().addAttribute(attr); if (reifread) { cc.addMethod(rmethod); } System.out.println(rmeth); }*/ } //PL.pl("offset is "+offset); } //offset+=8; //ok create the get handleoffsets method //print out total byts allocated //PL.pl("%%%%%%%%%% TOTAL BYTES = " + offset); StringBuilder sb = new StringBuilder(); sb.append("public long[] handleOffsets() { "); sb.append("long a[] = new long[").append(handleOffsets.size()).append("];"); int c = 0; for (long l : handleOffsets) { sb.append("a[").append(c).append("]=").append(l).append("l;"); c++; } sb.append("return a; }"); System.out.println(sb.toString()); CtMethod om = CtNewMethod.make(sb.toString(), cc); cc.addMethod(om); String sizem = "public long gsize() { return " + (offset) + "l; }"; //PL.pl(sizem); CtMethod sm = CtNewMethod.make(sizem, cc); cc.addMethod(sm); //add clsid CtMethod cmid = CtNewMethod.make("public int ohclassId() { return " + clnumber + "; }", cc); cc.addMethod(cmid); setBaseMixinsPost(cc, false, owned, pds, constructBuilder, initBuilder); cc.writeFile("target/classes"); /*for (Method me : cc.toClass().getDeclaredMethods()) { //test print, ok //System.out.println(me.getName()); }*/ Class ppp = Class.forName(cnam); Field f = ppp.getDeclaredField("u"); f.setAccessible(true); f.set(ppp.newInstance(), USafe.getUnsafe()); //synchronized (mutex) //{ processed2.put(cl, ppp); processed2.put(ppp, ppp); cls[clnumber] = ppp; return ppp; //} } catch (Exception e) { throw new RuntimeException(e); } }
From source file:imitationNLG.SFX.java
public Double evaluateGeneration(HashMap<String, JAROW> classifierAttrs, HashMap<String, HashMap<String, JAROW>> classifierWords, ArrayList<DatasetInstance> trainingData, ArrayList<DatasetInstance> testingData, HashMap<String, HashSet<String>> availableAttributeActions, HashMap<String, HashMap<String, HashSet<Action>>> availableWordActions, HashMap<Integer, HashSet<String>> nGrams, boolean printResults, int epoch) { System.out.println("Evaluate argument generation "); int totalArgDistance = 0; ArrayList<ScoredFeaturizedTranslation<IString, String>> generations = new ArrayList<>(); ArrayList<ArrayList<Action>> generationActions = new ArrayList<>(); HashMap<ArrayList<Action>, DatasetInstance> generationActionsMap = new HashMap<>(); ArrayList<ArrayList<Sequence<IString>>> finalReferences = new ArrayList<>(); ArrayList<String> predictedStrings = new ArrayList<>(); ArrayList<String> predictedStringMRs = new ArrayList<>(); ArrayList<Double> attrCoverage = new ArrayList<>(); ArrayList<ArrayList<String>> predictedAttrLists = new ArrayList<>(); HashSet<HashMap<String, HashSet<String>>> mentionedAttrs = new HashSet<HashMap<String, HashSet<String>>>(); for (DatasetInstance di : testingData) { String predicate = di.getMeaningRepresentation().getPredicate(); ArrayList<Action> predictedActionList = new ArrayList<>(); ArrayList<Action> predictedWordList = new ArrayList<>(); //PHRASE GENERATION EVALUATION String predictedAttr = ""; ArrayList<String> predictedAttrValues = new ArrayList<>(); ArrayList<String> predictedAttributes = new ArrayList<>(); HashSet<String> attrValuesToBeMentioned = new HashSet<>(); HashSet<String> attrValuesAlreadyMentioned = new HashSet<>(); HashMap<String, ArrayList<String>> valuesToBeMentioned = new HashMap<>(); for (String attribute : di.getMeaningRepresentation().getAttributes().keySet()) { for (String value : di.getMeaningRepresentation().getAttributes().get(attribute)) { attrValuesToBeMentioned.add(attribute.toLowerCase() + "=" + value.toLowerCase()); }//from w w w. j av a 2s . com valuesToBeMentioned.put(attribute, new ArrayList<>(di.getMeaningRepresentation().getAttributes().get(attribute))); } if (attrValuesToBeMentioned.isEmpty()) { attrValuesToBeMentioned.add("empty=empty"); } HashSet<String> attrValuesToBeMentionedCopy = new HashSet<>(attrValuesToBeMentioned); while (!predictedAttr.equals(SFX.TOKEN_END) && predictedAttrValues.size() < maxAttrRealizationSize) { if (!predictedAttr.isEmpty()) { attrValuesToBeMentioned.remove(predictedAttr); } Instance attrTrainingVector = SFX.this.createAttrInstance(predicate, "@TOK@", predictedAttrValues, predictedActionList, attrValuesAlreadyMentioned, attrValuesToBeMentioned, di.getMeaningRepresentation(), availableAttributeActions); if (attrTrainingVector != null) { Prediction predictAttr = classifierAttrs.get(predicate).predict(attrTrainingVector); if (predictAttr.getLabel() != null) { predictedAttr = predictAttr.getLabel().trim(); String predictedValue = ""; if (!predictedAttr.equals(SFX.TOKEN_END)) { predictedValue = chooseNextValue(predictedAttr, attrValuesToBeMentioned, trainingData); HashSet<String> rejectedAttrs = new HashSet<String>(); while (predictedValue.isEmpty() && !predictedAttr.equals(SFX.TOKEN_END)) { rejectedAttrs.add(predictedAttr); predictedAttr = SFX.TOKEN_END; double maxScore = -Double.MAX_VALUE; for (String attr : predictAttr.getLabel2Score().keySet()) { if (!rejectedAttrs.contains(attr) && (Double .compare(predictAttr.getLabel2Score().get(attr), maxScore) > 0)) { maxScore = predictAttr.getLabel2Score().get(attr); predictedAttr = attr; } } if (!predictedAttr.equals(SFX.TOKEN_END)) { predictedValue = chooseNextValue(predictedAttr, attrValuesToBeMentioned, trainingData); } } } if (!predictedAttr.equals(SFX.TOKEN_END)) { predictedAttr += "=" + predictedValue; } predictedAttrValues.add(predictedAttr); String attribute = predictedAttrValues.get(predictedAttrValues.size() - 1).split("=")[0]; String attrValue = predictedAttrValues.get(predictedAttrValues.size() - 1); predictedAttributes.add(attrValue); //GENERATE PHRASES if (!attribute.equals(SFX.TOKEN_END)) { if (classifierWords.get(predicate).containsKey(attribute)) { String predictedWord = ""; boolean isValueMentioned = false; String valueTBM = ""; if (attrValue.contains("=")) { valueTBM = attrValue.substring(attrValue.indexOf('=') + 1); } if (valueTBM.isEmpty()) { isValueMentioned = true; } ArrayList<String> subPhrase = new ArrayList<>(); while (!predictedWord.equals(RoboCup.TOKEN_END) && predictedWordList.size() < maxWordRealizationSize) { ArrayList<String> predictedAttributesForInstance = new ArrayList<>(); for (int i = 0; i < predictedAttributes.size() - 1; i++) { predictedAttributesForInstance.add(predictedAttributes.get(i)); } if (!predictedAttributes.get(predictedAttributes.size() - 1) .equals(attrValue)) { predictedAttributesForInstance .add(predictedAttributes.get(predictedAttributes.size() - 1)); } Instance wordTrainingVector = createWordInstance(predicate, new Action("@TOK@", attrValue), predictedAttributesForInstance, predictedActionList, isValueMentioned, attrValuesAlreadyMentioned, attrValuesToBeMentioned, di.getMeaningRepresentation(), availableWordActions.get(predicate), nGrams, false); if (wordTrainingVector != null) { if (classifierWords.get(predicate) != null) { if (classifierWords.get(predicate).get(attribute) != null) { Prediction predictWord = classifierWords.get(predicate) .get(attribute).predict(wordTrainingVector); if (predictWord.getLabel() != null) { predictedWord = predictWord.getLabel().trim(); predictedActionList.add(new Action(predictedWord, attrValue)); if (!predictedWord.equals(SFX.TOKEN_END)) { subPhrase.add(predictedWord); predictedWordList.add(new Action(predictedWord, attrValue)); } } else { predictedWord = SFX.TOKEN_END; predictedActionList.add(new Action(predictedWord, attrValue)); } } else { predictedWord = SFX.TOKEN_END; predictedActionList.add(new Action(predictedWord, attrValue)); } } } if (!isValueMentioned) { if (!predictedWord.equals(SFX.TOKEN_END)) { if (predictedWord.startsWith(SFX.TOKEN_X) && (valueTBM.matches("\"[xX][0-9]+\"") || valueTBM.matches("[xX][0-9]+") || valueTBM.startsWith(SFX.TOKEN_X))) { isValueMentioned = true; } else if (!predictedWord.startsWith(SFX.TOKEN_X) && !(valueTBM.matches("\"[xX][0-9]+\"") || valueTBM.matches("[xX][0-9]+") || valueTBM.startsWith(SFX.TOKEN_X))) { String valueToCheck = valueTBM; if (valueToCheck.equals("no") || valueToCheck.equals("yes") || valueToCheck.equals("yes or no") || valueToCheck.equals("none") || valueToCheck.equals("dont_care") || valueToCheck.equals("empty")) { if (attribute.contains("=")) { valueToCheck = attribute.replace("=", ":"); } else { valueToCheck = attribute + ":" + valueTBM; } } if (!valueToCheck.equals("empty:empty") && valueAlignments.containsKey(valueToCheck)) { for (ArrayList<String> alignedStr : valueAlignments .get(valueToCheck).keySet()) { if (endsWith(subPhrase, alignedStr)) { isValueMentioned = true; break; } } } } } if (isValueMentioned) { attrValuesAlreadyMentioned.add(attrValue); attrValuesToBeMentioned.remove(attrValue); } } String mentionedAttrValue = ""; if (!predictedWord.startsWith(SFX.TOKEN_X)) { for (String attrValueTBM : attrValuesToBeMentioned) { if (attrValueTBM.contains("=")) { String value = attrValueTBM .substring(attrValueTBM.indexOf('=') + 1); if (!(value.matches("\"[xX][0-9]+\"") || value.matches("[xX][0-9]+") || value.startsWith(SFX.TOKEN_X))) { String valueToCheck = value; if (valueToCheck.equals("no") || valueToCheck.equals("yes") || valueToCheck.equals("yes or no") || valueToCheck.equals("none") || valueToCheck.equals("dont_care") || valueToCheck.equals("empty")) { valueToCheck = attrValueTBM.replace("=", ":"); } if (!valueToCheck.equals("empty:empty") && valueAlignments.containsKey(valueToCheck)) { for (ArrayList<String> alignedStr : valueAlignments .get(valueToCheck).keySet()) { if (endsWith(subPhrase, alignedStr)) { mentionedAttrValue = attrValueTBM; break; } } } } } } } if (!mentionedAttrValue.isEmpty()) { attrValuesAlreadyMentioned.add(attrValue); attrValuesToBeMentioned.remove(mentionedAttrValue); } } if (predictedWordList.size() >= maxWordRealizationSize && !predictedActionList .get(predictedActionList.size() - 1).getWord().equals(SFX.TOKEN_END)) { predictedWord = SFX.TOKEN_END; predictedActionList.add(new Action(predictedWord, predictedAttrValues.get(predictedAttrValues.size() - 1))); } } else { String predictedWord = SFX.TOKEN_END; predictedActionList.add(new Action(predictedWord, attrValue)); } } } else { predictedAttr = SFX.TOKEN_END; } } } ArrayList<String> predictedAttrs = new ArrayList<>(); for (String attributeValuePair : predictedAttrValues) { predictedAttrs.add(attributeValuePair.split("=")[0]); } ArrayList<Action> cleanActionList = new ArrayList<Action>(); for (Action action : predictedActionList) { if (!action.getWord().equals(SFX.TOKEN_END) && !action.getWord().equals(SFX.TOKEN_START)) { cleanActionList.add(action); } } for (int i = 0; i < cleanActionList.size(); i++) { for (ArrayList<Action> surrounds : punctPatterns.keySet()) { boolean matches = true; int m = 0; for (int s = 0; s < surrounds.size(); s++) { if (surrounds.get(s) != null) { if (i + s < cleanActionList.size()) { if (!cleanActionList.get(i + s).getWord().equals(surrounds.get(s) .getWord()) /*|| !cleanActionList.get(i).getAttribute().equals(surrounds.get(s).getAttribute())*/) { matches = false; s = surrounds.size(); } else { m++; } } else { matches = false; s = surrounds.size(); } } } if (matches && m > 0) { cleanActionList.add(i + 2, punctPatterns.get(surrounds)); } } } String predictedString = ""; ArrayList<String> predictedAttrList = new ArrayList<String>(); HashSet<String> redundants = new HashSet<String>(); for (Action action : cleanActionList) { if (action.getWord().startsWith(SFX.TOKEN_X)) { predictedString += di.getMeaningRepresentation().getDelexMap().get(action.getWord()) + " "; //predictedString += "x "; if (di.getMeaningRepresentation().getDelexMap().get(action.getWord()) == null || di.getMeaningRepresentation().getDelexMap().get(action.getWord()).equals("null")) { redundants.add(action.getWord()); } } else { predictedString += action.getWord() + " "; } if (predictedAttrList.isEmpty()) { predictedAttrList.add(action.getAttribute()); } else if (!predictedAttrList.get(predictedAttrList.size() - 1).equals(action.getAttribute())) { predictedAttrList.add(action.getAttribute()); } } predictedAttrLists.add(predictedAttrList); if (attrValuesToBeMentionedCopy.size() != 0.0) { double redundAttrs = 0.0; double missingAttrs = 0.0; for (String attr : predictedAttrList) { if (!attrValuesToBeMentionedCopy.contains(attr)) { redundAttrs += 1.0; } } for (String attr : attrValuesToBeMentionedCopy) { if (!predictedAttrList.contains(attr)) { missingAttrs += 1.0; } } double attrSize = (double) attrValuesToBeMentionedCopy.size(); attrCoverage.add((redundAttrs + missingAttrs) / attrSize); } if (predicate.startsWith("?")) { predictedString = predictedString.trim() + "?"; } else { predictedString = predictedString.trim() + "."; } predictedString = predictedString.replaceAll("\\?", " \\? ").replaceAll(":", " : ") .replaceAll("\\.", " \\. ").replaceAll(",", " , ").replaceAll(" ", " ").trim(); if (!mentionedAttrs.contains(di.getMeaningRepresentation().getAttributes())) { predictedStrings.add(predictedString); predictedStringMRs.add(di.getMeaningRepresentation().getMRstr()); mentionedAttrs.add(di.getMeaningRepresentation().getAttributes()); } Sequence<IString> translation = IStrings .tokenize(NISTTokenizer.tokenize(predictedString.toLowerCase())); ScoredFeaturizedTranslation<IString, String> tran = new ScoredFeaturizedTranslation<>(translation, null, 0); generations.add(tran); generationActions.add(predictedActionList); generationActionsMap.put(predictedActionList, di); ArrayList<Sequence<IString>> references = new ArrayList<>(); for (ArrayList<Action> realization : di.getEvalRealizations()) { String cleanedWords = ""; for (Action nlWord : realization) { if (!nlWord.equals(new Action(SFX.TOKEN_START, "")) && !nlWord.equals(new Action(SFX.TOKEN_END, ""))) { if (nlWord.getWord().startsWith(SFX.TOKEN_X)) { cleanedWords += di.getMeaningRepresentation().getDelexMap().get(nlWord.getWord()) + " "; } else { cleanedWords += nlWord.getWord() + " "; } } } cleanedWords = cleanedWords.trim(); if (!cleanedWords.endsWith(".")) { cleanedWords += "."; } cleanedWords = cleanedWords.replaceAll("\\?", " \\? ").replaceAll(":", " : ") .replaceAll("\\.", " \\. ").replaceAll(",", " , ").replaceAll(" ", " ").trim(); references.add(IStrings.tokenize(NISTTokenizer.tokenize(cleanedWords))); } finalReferences.add(references); //EVALUATE ATTRIBUTE SEQUENCE HashSet<ArrayList<String>> goldAttributeSequences = new HashSet<>(); for (DatasetInstance di2 : testingData) { if (di2.getMeaningRepresentation().getAttributes() .equals(di.getMeaningRepresentation().getAttributes())) { goldAttributeSequences.addAll(di2.getEvalMentionedAttributeSequences().values()); } } int minTotArgDistance = Integer.MAX_VALUE; for (ArrayList<String> goldArgs : goldAttributeSequences) { int totArgDistance = 0; HashSet<Integer> matchedPositions = new HashSet<>(); for (int i = 0; i < predictedAttrs.size(); i++) { if (!predictedAttrs.get(i).equals(SFX.TOKEN_START) && !predictedAttrs.get(i).equals(SFX.TOKEN_END)) { int minArgDistance = Integer.MAX_VALUE; int minArgPos = -1; for (int j = 0; j < goldArgs.size(); j++) { if (!matchedPositions.contains(j)) { if (goldArgs.get(j).equals(predictedAttrs.get(i))) { int argDistance = Math.abs(j - i); if (argDistance < minArgDistance) { minArgDistance = argDistance; minArgPos = j; } } } } if (minArgPos == -1) { totArgDistance += 100; } else { matchedPositions.add(minArgPos); totArgDistance += minArgDistance; } } } ArrayList<String> predictedCopy = (ArrayList<String>) predictedAttrs.clone(); for (String goldArg : goldArgs) { if (!goldArg.equals(SFX.TOKEN_END)) { boolean contained = predictedCopy.remove(goldArg); if (!contained) { totArgDistance += 1000; } } } if (totArgDistance < minTotArgDistance) { minTotArgDistance = totArgDistance; } } totalArgDistance += minTotArgDistance; } previousResults = generationActions; crossAvgArgDistances.add(totalArgDistance / (double) testingData.size()); NISTMetric NIST = new NISTMetric(finalReferences); BLEUMetric BLEU = new BLEUMetric(finalReferences, 4, false); BLEUMetric BLEUsmooth = new BLEUMetric(finalReferences, 4, true); Double nistScore = NIST.score(generations); Double bleuScore = BLEU.score(generations); Double bleuSmoothScore = BLEUsmooth.score(generations); double finalCoverage = 0.0; for (double c : attrCoverage) { finalCoverage += c; } finalCoverage /= (double) attrCoverage.size(); crossNIST.add(nistScore); crossBLEU.add(bleuScore); crossBLEUSmooth.add(bleuSmoothScore); System.out.println("Avg arg distance: \t" + totalArgDistance / (double) testingData.size()); System.out.println("NIST: \t" + nistScore); System.out.println("BLEU: \t" + bleuScore); System.out.println("COVERAGE: \t" + finalCoverage); System.out.println("g: " + generations); System.out.println("attr: " + predictedAttrLists); System.out.println("BLEU smooth: \t" + bleuSmoothScore); previousBLEU = bleuScore; if (printResults) { BufferedWriter bw = null; File f = null; try { f = new File("random_SFX" + dataset + "TextsAfter" + (epoch) + "_" + JDAggerForSFX.earlyStopMaxFurtherSteps + "_" + JDAggerForSFX.p + "epochsTESTINGDATA.txt"); } catch (NullPointerException e) { System.err.println("File not found." + e); } try { bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f))); } catch (FileNotFoundException e) { System.err.println("Error opening file for writing! " + e); } try { bw.write("BLEU:" + bleuScore); bw.write("\n"); } catch (IOException e) { System.err.println("Write error!"); } for (int i = 0; i < predictedStrings.size(); i++) { try { //Grafoume to String sto arxeio //SFX HOTEL TEXTS WITH LOLS -> 3 //SFX RESTAURANT TEXTS WITH LOLS -> 5 bw.write("MR;" + predictedStringMRs.get(i).replaceAll(";", ",") + ";"); if (dataset.equals("hotel")) { bw.write("LOLS_SFHOT;"); } else { bw.write("LOLS_SFRES;"); } //bw.write("@@srcdoc@@" + (i + 1)); /*String out = predictedStrings.get(i).replaceAll(" i ", " I ").replaceAll(" -ly ", "ly ").replaceAll(" s ", "s ").replaceAll("\\?", " \\? ").replaceAll(":", " : ").replaceAll("\\.", " \\. ").replaceAll(",", " , ").replaceAll(" ", " "); out = out.substring(0, 1).toUpperCase() + out.substring(1); bw.write(out + ";"); if (dataset.equals("hotel")) { bw.write("WEN_SFHOT;"); } else { bw.write("WEN_SFRES;"); } if (!wenDaToGen.containsKey(predictedStringMRs.get(i).trim().toLowerCase())) { System.out.println(wenDaToGen.keySet()); System.out.println(predictedStringMRs.get(i).trim().toLowerCase()); System.exit(0); } out = wenDaToGen.get(predictedStringMRs.get(i).trim().toLowerCase()).replaceAll(" i ", " I ").replaceAll(" -ly ", "ly ").replaceAll(" s ", "s ").replaceAll("\\?", " \\? ").replaceAll(":", " : ").replaceAll("\\.", " \\. ").replaceAll(",", " , ").replaceAll(" ", " "); out = out.substring(0, 1).toUpperCase() + out.substring(1); bw.write(out + ";");*/ //bw.write("@@judgeFluency@@-1"); //bw.write("@@judgeInform@@-1"); //bw.write("@@judgeQuality@@-1"); bw.write("\n"); } catch (IOException e) { System.err.println("Write error!"); } } try { bw.close(); } catch (IOException e) { System.err.println("Error closing file."); } catch (Exception e) { } } return bleuScore; }