List of usage examples for com.mongodb.gridfs GridFSInputFile save
@Override public void save()
From source file:com.card.loop.xyz.dao.LearningElementDAO.java
public boolean addFile(LearningElement le) throws UnknownHostException, IOException { File file = new File(AppConfig.USER_VARIABLE + le.getFilePath() + le.getFilename()); Mongo mongo = new Mongo(AppConfig.mongodb_host, AppConfig.mongodb_port); DB db = mongo.getDB(AppConfig.DATABASE_LOOP); GridFS gf = new GridFS(db, "le.meta"); GridFSInputFile gfsFile = gf.createFile(file); gfsFile.setFilename(le.getFilename()); gfsFile.setContentType(le.getContentType()); gfsFile.put("_class", "com.card.loop.xyz.model.LearningElement"); gfsFile.put("title", le.getTitle()); gfsFile.put("filePath", le.getFilePath()); gfsFile.put("subject", le.getSubject()); gfsFile.put("description", le.getDescription()); gfsFile.put("downloads", le.getDownloads()); gfsFile.put("rating", le.getRating()); gfsFile.put("comments", le.getComments()); gfsFile.put("uploadedBy", le.getUploadedBy()); gfsFile.put("status", le.getStatus()); gfsFile.put("rev", le.getRev()); gfsFile.put("type", le.getType()); gfsFile.save(); // Let's store our document to MongoDB /* System.out.println("SEARCH: " + search(gfsFile.getMD5(), "le.meta")); if(search(gfsFile.getMD5(), "le.meta") > 1){ deleteLE(le.getFileName(),"le.meta"); }*///from w ww . j a va 2 s .c o m // // collection.insert(info, WriteConcern.SAFE); return true; }
From source file:com.card.loop.xyz.dao.LearningElementDAO.java
public boolean addFile(LearningElement le, File file) throws UnknownHostException, IOException { Mongo mongo = new Mongo(AppConfig.mongodb_host, AppConfig.mongodb_port); DB db = mongo.getDB(AppConfig.DATABASE_LOOP); GridFS gf = new GridFS(db, "le.meta"); GridFSInputFile gfsFile = gf.createFile(file); gfsFile.setFilename(le.getFilename()); gfsFile.setContentType(le.getContentType()); gfsFile.put("_class", "com.card.loop.xyz.model.LearningElement"); gfsFile.put("title", le.getTitle()); gfsFile.put("filePath", le.getFilePath()); gfsFile.put("subject", le.getSubject()); gfsFile.put("description", le.getDescription()); gfsFile.put("downloads", le.getDownloads()); gfsFile.put("rating", le.getRating()); gfsFile.put("comments", le.getComments()); gfsFile.put("uploadedBy", le.getUploadedBy()); gfsFile.put("status", le.getStatus()); gfsFile.put("rev", le.getRev()); gfsFile.put("type", le.getType()); gfsFile.save(); // Let's store our document to MongoDB /* System.out.println("SEARCH: " + search(gfsFile.getMD5(), "le.meta")); if(search(gfsFile.getMD5(), "le.meta") > 1){ deleteLE(le.getFileName(),"le.meta"); }*//*ww w . j a v a 2 s .com*/ // // collection.insert(info, WriteConcern.SAFE); return true; }
From source file:com.cloudbees.demo.beesshop.service.MongoDbFileStorageService.java
License:Apache License
@Nonnull public String storeFile(InputStream in, String contentType) { String extension = defaultFileExtensionByContentType.get(contentType); String fileName = Joiner.on(".").skipNulls().join(Math.abs(random.nextLong()), extension); GridFSInputFile file = gridFS.createFile(in, fileName); file.setContentType(contentType);/*from w w w .j a v a 2 s .c om*/ file.save(); return "gridfs://" + gridFS.getBucketName() + "/" + file.getId().toString(); }
From source file:com.cognifide.aet.vs.artifacts.ArtifactsDAOMongoDBImpl.java
License:Apache License
@Override public String saveArtifact(DBKey dbKey, InputStream data, String contentType) { String resultObjectId = null; GridFS gfs = getGridFS(dbKey);//w ww .ja v a2 s .c om if (gfs != null) { GridFSInputFile file = gfs.createFile(data); if (file != null) { file.setContentType(contentType); file.save(); resultObjectId = file.getId().toString(); } } return resultObjectId; }
From source file:com.edgytech.umongo.DbPanel.java
License:Apache License
public void uploadFile(final ButtonBase button) { final DbNode dbNode = getDbNode(); final DB db = dbNode.getDb(); final String path = getStringFieldValue(Item.uploadFilePath); if (path.isEmpty()) { return;/*w ww . j a va 2 s . c o m*/ } final File src = new File(path); final String fileName = getStringFieldValue(Item.uploadFileName); final String contentType = getStringFieldValue(Item.uploadContentType); final DBObject metadata = ((DocBuilderField) getBoundUnit(Item.uploadMetadata)).getDBObject(); new DbJob() { @Override public Object doRun() throws IOException { final GridFSInputFile file = getGridFS().createFile(src); if (!fileName.isEmpty()) { file.setFilename(fileName); } if (!contentType.isEmpty()) { file.setContentType(contentType); } if (metadata != null) { file.setMetaData(metadata); } file.save(); return file; } @Override public String getNS() { return db.getName(); } @Override public String getShortName() { return "Upload File"; } @Override public DBObject getRoot(Object result) { return new BasicDBObject("path", path); } @Override public void wrapUp(Object res) { super.wrapUp(res); // may have new collections dbNode.structureComponent(); } @Override public ButtonBase getButton() { return button; } }.addJob(); }
From source file:com.englishtown.integration.java.IntegrationTestHelper.java
License:Open Source License
public static String createFile(JsonObject config, String bucket) { GridFS gridFS = IntegrationTestHelper.getGridFS(config, bucket); GridFSInputFile inputFile = gridFS .createFile(IntegrationTestHelper.class.getResourceAsStream("/EF_Labs_ENG_logo.JPG")); inputFile.setContentType(DEFAULT_CONTENT_TYPE); inputFile.setFilename(DEFAULT_FILENAME); inputFile.setChunkSize(DEFAULT_CHUNK_SIZE); inputFile.setMetaData(DEFAULT_METADATA); inputFile.save(); return inputFile.getId().toString(); }
From source file:com.fileoperations.CopyClass.java
public Boolean forSingleFile() { try {/*from w w w. ja v a 2 s.co m*/ if (name.contains(".")) { BasicDBObject query = new BasicDBObject(); query.put("_id", parentPath + pathMerger + name); DBCursor cursor = collection.find(query); if (cursor.hasNext()) { BasicDBObject checknewquery = new BasicDBObject(); checknewquery.put("_id", newPath + pathMerger + name); DBCursor tempCursor = collection.find(checknewquery); if (tempCursor.hasNext()) { return false; } DBObject copyFile = cursor.next(); GridFS fileDB = new GridFS(mymongo.getDB(), userCollectionName); InputStream data = fileDB.findOne(query).getInputStream(); BasicDBObject document = new BasicDBObject(); document.append("_id", newPath + pathMerger + name); document.append("folder", "0"); document.append("parent", newPath); document.append("name", name); document.append("type", copyFile.get("type").toString()); collection.insert(document); GridFSInputFile inputFile = fileDB.createFile(data); inputFile.setId(newPath + pathMerger + name); inputFile.put("path", newPath); inputFile.setFilename(name); inputFile.save(); return true; } else { return false; } } else { return false; } } finally { mymongo.closeConnection(); } }
From source file:com.fileoperations.FolderDownload.java
public Boolean copyFolder(String newPath) throws IOException { try {// w ww . ja v a2 s .co m String mongoFolder = parentPath + pathMerger + folderName; BasicDBObject query = new BasicDBObject(); query.put("_id", mongoFolder); DBCursor cursor = collection.find(query); if (cursor.hasNext()) { BasicDBObject newquery = new BasicDBObject(); newquery.put("_id", newPath + pathMerger + folderName); if (collection.find(newquery).hasNext()) { return false; } getPathOfAllChildrenFolder(parentPath, folderName); BasicDBObject toFindAllFilesInFolder = new BasicDBObject(); toFindAllFilesInFolder.put("$or", pathOfChildrenFolders); GridFS fileStore = new GridFS(mymongo.getDB(), userCollectionName); List<GridFSDBFile> AllFiles = fileStore.find(toFindAllFilesInFolder); for (int i = 0; i < AllFiles.size(); i++) { GridFSDBFile indivFile = AllFiles.get(i); InputStream data = indivFile.getInputStream(); String zipPath; zipPath = indivFile.get("path").toString(); String tempFileName = indivFile.getFilename(); zipPath = zipPath.replaceFirst(parentPath, newPath); BasicDBObject document = new BasicDBObject(); document.append("_id", zipPath + pathMerger + tempFileName); document.append("folder", "0"); document.append("parent", zipPath); document.append("name", tempFileName); int index = tempFileName.lastIndexOf("."); document.append("type", tempFileName.substring(index)); collection.insert(document); GridFSInputFile inputFile = fileStore.createFile(data); inputFile.setId(zipPath + pathMerger + tempFileName); inputFile.put("path", zipPath); inputFile.setFilename(tempFileName); inputFile.save(); } BasicDBObject toFindAllEmptyFilesInFolder = new BasicDBObject(); toFindAllEmptyFilesInFolder.put("$or", pathOfChildrenEmptyFolders); DBCursor allFolders = collection.find(toFindAllEmptyFilesInFolder); while (allFolders.hasNext()) { DBObject temp = allFolders.next(); if (temp.get("folder").toString().equals("1")) { String tempPath = temp.get("parent").toString().replaceFirst(parentPath, newPath); BasicDBObject document = new BasicDBObject(); document.put("_id", tempPath + pathMerger + temp.get("name")); document.put("folder", "1"); document.put("name", temp.get("name")); document.put("parent", tempPath); document.put("type", "1"); collection.insert(document); } } return true; } else { return false; } } finally { mymongo.closeConnection(); } }
From source file:com.fileoperations.FolderDownload.java
public Boolean renameFolder(String newName) throws IOException { try {/*w ww. ja va 2s .c o m*/ String mongoFolder = parentPath + pathMerger + folderName; BasicDBObject query = new BasicDBObject(); query.put("_id", mongoFolder); DBCursor cursor = collection.find(query); if (cursor.hasNext()) { BasicDBObject newquery = new BasicDBObject(); newquery.put("_id", parentPath + pathMerger + newName); if (collection.find(newquery).hasNext()) { return false; } BasicDBObject doc = new BasicDBObject(); doc.put("_id", parentPath + pathMerger + newName); doc.put("folder", "1"); doc.put("name", newName); doc.put("parent", parentPath); doc.put("type", "1"); collection.insert(doc); getPathOfAllChildrenFolder(parentPath, folderName); BasicDBObject toFindAllFilesInFolder = new BasicDBObject(); toFindAllFilesInFolder.put("$or", pathOfChildrenFolders); GridFS fileStore = new GridFS(mymongo.getDB(), userCollectionName); List<GridFSDBFile> AllFiles = fileStore.find(toFindAllFilesInFolder); for (int i = 0; i < AllFiles.size(); i++) { GridFSDBFile indivFile = AllFiles.get(i); InputStream data = indivFile.getInputStream(); String zipPath; zipPath = indivFile.get("path").toString(); String tempFileName = indivFile.getFilename(); zipPath = zipPath.replaceFirst(parentPath + pathMerger + folderName, parentPath + pathMerger + newName); BasicDBObject document = new BasicDBObject(); document.append("_id", zipPath + pathMerger + tempFileName); document.append("folder", "0"); document.append("parent", zipPath); document.append("name", tempFileName); int index = tempFileName.lastIndexOf("."); document.append("type", tempFileName.substring(index)); collection.insert(document); GridFSInputFile inputFile = fileStore.createFile(data); inputFile.setId(zipPath + pathMerger + tempFileName); inputFile.put("path", zipPath); inputFile.setFilename(tempFileName); inputFile.save(); } BasicDBObject toFindAllEmptyFilesInFolder = new BasicDBObject(); toFindAllEmptyFilesInFolder.put("$or", pathOfChildrenEmptyFolders); DBCursor allFolders = collection.find(toFindAllEmptyFilesInFolder); while (allFolders.hasNext()) { DBObject temp = allFolders.next(); if (temp.get("folder").toString().equals("1")) { String tempPath = temp.get("parent").toString(); tempPath = tempPath.replaceFirst(parentPath + pathMerger + folderName, parentPath + pathMerger + newName); BasicDBObject updocument = new BasicDBObject(); updocument.put("_id", tempPath + pathMerger + temp.get("name")); updocument.put("folder", "1"); updocument.put("name", temp.get("name")); updocument.put("parent", tempPath); updocument.put("type", "1"); collection.insert(updocument); } } return true; } else { return false; } } finally { mymongo.closeConnection(); } }
From source file:com.fileoperations.RenameFolder.java
public Boolean forSingleFile() { try {//from ww w.j a va2 s .c o m if (oldName.contains(".")) { BasicDBObject query = new BasicDBObject(); query.put("_id", parentPath + pathMerger + oldName); DBCursor cursor = collection.find(query); if (cursor.hasNext()) { DBObject renameFile = cursor.next(); BasicDBObject checknewquery = new BasicDBObject(); checknewquery.put("_id", parentPath + pathMerger + newName + renameFile.get("type").toString()); DBCursor tempCursor = collection.find(checknewquery); if (tempCursor.hasNext()) { return false; } GridFS file = new GridFS(mymongo.getDB(), userCollectionName); InputStream data = file.findOne(query).getInputStream(); BasicDBObject document = new BasicDBObject(); document.append("_id", parentPath + pathMerger + newName + renameFile.get("type").toString()); document.append("folder", "0"); document.append("parent", parentPath); document.append("name", newName + renameFile.get("type").toString()); document.append("type", renameFile.get("type").toString()); collection.insert(document); GridFSInputFile inputFile = file.createFile(data); inputFile.setId(parentPath + pathMerger + newName + renameFile.get("type").toString()); inputFile.put("path", parentPath); inputFile.setFilename(newName + renameFile.get("type").toString()); inputFile.save(); file.remove(file.findOne(query)); collection.remove(renameFile); return true; } else { return false; } } else { return false; } } finally { mymongo.closeConnection(); } }