List of usage examples for com.google.gson Gson Gson
public Gson()
From source file:at.stefanproell.PersistentIdentifierRestfulService.OrganizationsService.java
License:Apache License
/** * List all pids per otganization/*from w ww.j av a 2 s . co m*/ * * @return */ @GET @Path("/list/{prefix}") @Produces("application/json") public String getAllPIDsFromOrganization(@PathParam("prefix") int organizationPrefix) { Organization org = null; List<PersistentIdentifier> listOfIdentifiers; Map<Integer, String> listOfNumberedIdentifiers = new HashMap(); ; if (organizationPrefix > 0) { if (this.pidAPI.checkOrganizationPrefix(organizationPrefix)) { this.logger.info("The prefix exists."); org = this.pidAPI.getOrganizationObjectByPrefix(organizationPrefix); listOfIdentifiers = this.pidAPI.listAllPIDsOfOrganization(org); this.logger.info("listofidentifiers size " + listOfIdentifiers.size()); int pidCounter = 0; for (PersistentIdentifier pid : listOfIdentifiers) { pidCounter++; listOfNumberedIdentifiers.put(pidCounter, pid.getIdentifier()); } Gson gson = new Gson(); String json = gson.toJson(listOfNumberedIdentifiers); return json; } return "Prefix does not exist"; } return "Invalid organizational prefix"; }
From source file:at.supp.JsonTokenUtil2.java
License:Apache License
public static String toJson(JsonObject json) { return new Gson().toJson(json); }
From source file:at.theduke.spector.client.Configuration.java
License:Open Source License
private static ConfigData readConfiguration(String configPath) { String content = ""; try {/* ww w . j av a2 s . c o m*/ String line = null; File configFile = new File(configPath); if (!configFile.exists()) { throw new FileNotFoundException(); } BufferedReader reader = new BufferedReader(new FileReader(configFile)); while ((line = reader.readLine()) != null) { content += line; } reader.close(); } catch (FileNotFoundException e) { return new ConfigData(); } catch (IOException e) { return new ConfigData(); } finally { } Gson gson = new Gson(); ConfigData data = gson.fromJson(content, ConfigData.class); // Fill in default values. return data; }
From source file:at.theduke.spector.client.Configuration.java
License:Open Source License
private static boolean writeConfiguration(String configPath, ConfigData data) { File file = new File(configPath); try {// w w w . j a v a2 s. c o m BufferedWriter writer = new BufferedWriter(new FileWriter(file)); Gson gson = new Gson(); writer.write(gson.toJson(data)); writer.close(); } catch (IOException e) { return false; } return true; }
From source file:at.theduke.spector.notify.Configuration.java
License:Open Source License
public static ConfigData readConfiguration() { String content = ""; try {// w w w . jav a2 s . c o m String line = null; BufferedReader reader = new BufferedReader(new FileReader(getConfigFilePath())); while ((line = reader.readLine()) != null) { content += line; } } catch (FileNotFoundException e) { return null; } catch (IOException e) { return null; } Gson gson = new Gson(); ConfigData data = gson.fromJson(content, ConfigData.class); return data; }
From source file:at.theduke.spector.notify.Configuration.java
License:Open Source License
public static boolean writeConfiguration(ConfigData data) { String path = getConfigFilePath(); try {//ww w. j a va2 s . c o m BufferedWriter writer = new BufferedWriter(new FileWriter(path)); Gson gson = new Gson(); writer.write(gson.toJson(data)); writer.close(); } catch (IOException e) { return false; } return true; }
From source file:at.tomtasche.reader.background.UpLoader.java
License:Open Source License
@Override public Document loadInBackground() { if (uri == DocumentLoader.URI_INTRO) { cancelLoad();// w ww. j av a 2 s . c o m return null; } String type = getContext().getContentResolver().getType(uri); if (type == null) type = URLConnection.guessContentTypeFromName(uri.toString()); if (type == null) { try { InputStream stream = getContext().getContentResolver().openInputStream(uri); try { type = URLConnection.guessContentTypeFromStream(stream); } finally { stream.close(); } } catch (Exception e) { e.printStackTrace(); } } if (type != null && (type.equals("text/html") || type.equals("text/plain") || type.equals("image/png") || type.equals("image/jpeg"))) { try { document = new Document(null); document.addPage(new Page("Document", new URI(uri.toString()), 0)); return document; } catch (URISyntaxException e) { e.printStackTrace(); } } String name = uri.getLastPathSegment(); try { name = URLEncoder.encode(name, "UTF-8"); type = URLEncoder.encode(type, "UTF-8"); } catch (Exception e) { } HttpClient httpclient = new DefaultHttpClient(); HttpPost httppost = new HttpPost(SERVER_URL + "file?name=" + name + "&type=" + type); InputStream stream = null; try { stream = getContext().getContentResolver().openInputStream(uri); InputStreamEntity reqEntity = new InputStreamEntity(stream, -1); httppost.setEntity(reqEntity); HttpResponse response = httpclient.execute(httppost); if (response.getStatusLine().getStatusCode() == 200) { Map<String, Object> container = new Gson().fromJson(EntityUtils.toString(response.getEntity()), Map.class); String key = container.get("key").toString(); URI viewerUri = URI.create("https://docs.google.com/viewer?embedded=true&url=" + URLEncoder.encode(SERVER_URL + "file?key=" + key, "UTF-8")); document = new Document(null); document.addPage(new Page("Document", viewerUri, 0)); } else { throw new RuntimeException("server couldn't handle request"); } } catch (Throwable e) { e.printStackTrace(); lastError = e; } finally { try { if (stream != null) { stream.close(); } } catch (IOException e) { } httpclient.getConnectionManager().shutdown(); } return document; }
From source file:at.treedb.backup.Export.java
License:Open Source License
/** * Dumps all entities of the database, or just all entities of a domain to * the archive.//from w w w. j av a 2 s . c o m * * @param clazz * entity to be dumped * @param domain * optional domain, or {@code null} for dumping all entities of * the database to the archive * @param backupType * type of the backup * @throws Exception */ private void dumpClass(DBexportInfo dbInfo, Class<?> clazz, Domain domain, DBexportInfo.BACKUP_TYPE backupType) throws Exception { String archivePath = ROOT_DIR + clazz.getSimpleName() + "/"; createDirEntry(archivePath, date); Iterator iter = new Iterator(dao, clazz, domain, status, entityFetchThreshold); int blockCounter = 0; switch (serialization) { case JSON: gson = new Gson(); break; case XML: xstream = new XStream(); break; default: break; } dbInfo.addEntityCount(clazz.getSimpleName() + ":" + iter.getEntitiesNum()); ArrayList<Field> fieldList = new ArrayList<Field>(); boolean userFields = false; if (backupType == DBexportInfo.BACKUP_TYPE.DOMAIN) { userFields = collectUserFields(clazz, fieldList); if (clazz.getSuperclass() != null) { collectUserFields(clazz.getSuperclass(), fieldList); } } if (fieldList.size() > 0) { userFields = true; } boolean singleStep = false; if (clazz.equals(CIblob.class)) { singleStep = true; } while (iter.hasNext()) { List<Object> l = null; if (!singleStep) { l = iter.next(); l = detachBinaryData(dao, clazz, l); } else { ArrayList<Object> list = new ArrayList<Object>(); for (int i = 0; i < entityFetchThreshold; ++i) { l = iter.nextObject(); if (l == null) { break; } l = detachBinaryData(dao, clazz, l); list.add(l.get(0)); } if (list.size() == 0) { continue; } l = list; } SevenZArchiveEntry entry = new SevenZArchiveEntry(); entry = new SevenZArchiveEntry(); entry.setName(archivePath + blockCounter++); entry.setAccessDate(date); entry.setCreationDate(date); entry.setLastModifiedDate(date); entry.setDirectory(false); sevenZOutput.putArchiveEntry(entry); byte[] byteStream = null; switch (serialization) { case JSON: byteStream = gson.toJson(l).getBytes(); break; case XML: byteStream = xstream.toXML(l).getBytes(); break; case BINARY: ByteArrayOutputStream bs = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bs); out.writeObject(l); out.close(); byteStream = bs.toByteArray(); break; } // collect all user references if (userFields) { for (Object o : l) { for (Field f : fieldList) { f.setAccessible(true); userIDs.add(f.getInt(o)); } } } sevenZOutput.write(byteStream); sevenZOutput.closeArchiveEntry(); if (l instanceof CIblob) { if (dao.isJPA() && dao.getJPAimpl() == DAO.JPA_IMPL.ECLIPSELINK) { dao.clear(); } else { dao.detach(l); } ((CIblob) l).resetBlob(); } l = null; } iter.close(); }
From source file:at.treedb.backup.Export.java
License:Open Source License
/** * Dumps all users to the archive.//from w w w .ja va2 s .co m * * @param dbInfo * database information * @param privacy * privacy export filter * @throws Exception */ private void dumpUser(DBexportInfo dbInfo, EnumSet<User.PRIVACY> privacy) throws Exception { String archivePath = ROOT_DIR + User.class.getSimpleName() + "/"; createDirEntry(archivePath, date); Iterator iter = new Iterator(dao, User.class, null, null, entityFetchThreshold); int blockCounter = 0; switch (serialization) { case JSON: gson = new Gson(); break; case XML: xstream = new XStream(); break; default: break; } int counter = 0; while (iter.hasNext()) { List<Object> l = iter.next(); ArrayList<Object> newList = new ArrayList<Object>(); for (Object o : l) { User u = (User) o; // user must be member of the domain and ACTIVE or deleted if (userIDs.contains(u.getDBid()) && (u.getHistStatus() == STATUS.ACTIVE || u.getHistStatus() == STATUS.DELETED)) { User clone = (User) u.clone(); UpdateMap map = new UpdateMap(User.Fields.class); map.addString(User.Fields.password, null); // apply privacy filter if (privacy == null || !privacy.contains(User.PRIVACY.NAME)) { Pseudonym fName = Pseudonym.generatePseudonym(Gender.RANDOM); map.addString(User.Fields.firstName, fName.getFirstName()); map.addString(User.Fields.lastName, fName.getLastName()); map.addString(User.Fields.displayName, fName.getFirstName() + " " + fName.getLastName()); map.addString(User.Fields.nickName, null); map.addString(User.Fields.lastName, null); map.addString(User.Fields.email, fName.getEmail()); } if (privacy == null || !privacy.contains(User.PRIVACY.PHONE)) { map.addString(User.Fields.phone, null); } if (privacy == null || !privacy.contains(User.PRIVACY.MOBILE)) { map.addString(User.Fields.mobile, null); } if (privacy == null || !privacy.contains(User.PRIVACY.USERID)) { map.addString(User.Fields.userId, null); } clone.simpleUpdate(map, true); // exported user is VIRTUAL clone.setHistStatus(STATUS.VIRTUAL); clone.setCreatedBy(0); clone.setModifiedBy(0); newList.add(clone); ++counter; } } l = newList; SevenZArchiveEntry entry = new SevenZArchiveEntry(); entry = new SevenZArchiveEntry(); entry.setName(archivePath + blockCounter++); entry.setAccessDate(date); entry.setCreationDate(date); entry.setLastModifiedDate(date); entry.setDirectory(false); sevenZOutput.putArchiveEntry(entry); byte[] byteStream = null; switch (serialization) { case JSON: byteStream = gson.toJson(l).getBytes(); break; case XML: byteStream = xstream.toXML(l).getBytes(); break; case BINARY: ByteArrayOutputStream bs = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bs); out.writeObject(l); out.close(); byteStream = bs.toByteArray(); break; } sevenZOutput.write(byteStream); sevenZOutput.closeArchiveEntry(); } dbInfo.addEntityCount(User.class.getSimpleName() + ":" + counter); }
From source file:at.treedb.backup.Import.java
License:Open Source License
/** * Full restore of a database/*from ww w. ja v a2 s .co m*/ * * @throws Exception */ @SuppressWarnings("unchecked") public void fullRestore() throws Exception { archiveMap = readEntries(); archiveInfo = new String(readData("dbinfo.xml"), "UTF8"); xstream = new XStream(); DBexportInfo dbInfo = (DBexportInfo) xstream.fromXML(archiveInfo); serialization = dbInfo.getSerialization(); switch (serialization) { case JSON: gson = new Gson(); break; case XML: if (xstream == null) { xstream = new XStream(); } break; default: break; } DAOiface dao = DAO.getDAO(); // dummy array for binary data byte[] dummyArray = new byte[1]; // first loop - restore data without detached data for (Class<?> c : DBentities.getClasses()) { if (ignoreClass(c)) { continue; } HashMap<Integer, Integer> idMap = new HashMap<Integer, Integer>(); classIdMap.put(c, idMap); HashMap<Integer, Integer> detachMap = new HashMap<Integer, Integer>(); detachIdMap.put(c, detachMap); HashMap<Integer, Integer> historicMap = new HashMap<Integer, Integer>(); historicIdMap.put(c, historicMap); int dataBlockCounter = 0; while (true) { String path = "treeDB/" + c.getSimpleName() + "/" + dataBlockCounter; if (!archiveMap.containsKey(path)) { break; // not entities available of this class } byte[] data = readData(path); // detached binary data available? ArrayList<Field> fieldList = new ArrayList<Field>(); for (Field f : c.getDeclaredFields()) { if (f.getAnnotation(Detach.class) != null) { f.setAccessible(true); fieldList.add(f); } } // data de-serialization List<Base> list = null; switch (serialization) { case JSON: ParameterizedTypeImpl pti = new ParameterizedTypeImpl(List.class, new Type[] { c }, null); list = gson.fromJson(new String(data), pti); break; case XML: list = (List<Base>) xstream.fromXML(new String(data)); break; case BINARY: ByteArrayInputStream is = new ByteArrayInputStream(data); ObjectInputStream input = new ObjectInputStream(is); list = (List<Base>) input.readObject(); input.close(); break; default: break; } // traverse entities for (Base b : list) { if (!fieldList.isEmpty()) { for (Field f : fieldList) { // set a dummy binary array to avoid // @Column(nullable = false) conflicts f.set(b, dummyArray); } } int oldDBid = b.getDBid(); dao.beginTransaction(); Base.restore(dao, b); dao.endTransaction(); // store the pair old DB ID / new DB ID idMap.put(oldDBid, b.getDBid()); if (historicMap.get(b.getHistId()) == null) { historicMap.put(b.getHistId(), b.getDBid()); } if (!fieldList.isEmpty()) { // store the pair new DB ID / old DB ID detachMap.put(b.getDBid(), oldDBid); } if (c.equals(DBfile.class)) { fileIdMap.put(b.getDBid(), oldDBid); } } ++dataBlockCounter; } } // second loop - adjust DB IDs for (Class<?> c : DBentities.getClasses()) { if (ignoreClass(c)) { continue; } dao.beginTransaction(); Iterator iter = new Iterator(dao, c, null, null, entityFetchThreshold); adjustFields(dao, c, iter); dao.endTransaction(); } }