Example usage for com.mongodb.util JSON parse

List of usage examples for com.mongodb.util JSON parse

Introduction

In this page you can find the example usage for com.mongodb.util JSON parse.

Prototype

public static Object parse(final String jsonString) 

Source Link

Document

Parses a JSON string and returns a corresponding Java object.

Usage

From source file:fr.gouv.vitam.mdbes.MainIngestMDBESFromFile.java

License:Open Source License

private static final void runOnce(final MongoDbAccess dbvitam)
        throws InterruptedException, InstantiationException, IllegalAccessException, IOException {
    System.out.println("Load starting... ");
    int nbThread = ingest.length;

    final long date11 = System.currentTimeMillis();
    if (ingest.length == 1) {
        final FileInputStream fstream = new FileInputStream(ingest[0]);
        final DataInputStream in = new DataInputStream(fstream);
        final BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String strLine;//from   w  w  w .  ja v  a 2  s .c o  m
        int nb = 0;
        final HashMap<String, String> esIndex = new HashMap<>();
        BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
        while ((strLine = br.readLine()) != null) {
            final DBObject bson = (DBObject) JSON.parse(strLine);
            bulk.insert(bson);
            ElasticSearchAccess.addEsIndex(dbvitam, model, esIndex, bson);
            nb++;
            if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                BulkWriteResult result = bulk.execute();
                int check = result.getInsertedCount();
                if (check != nb) {
                    System.out.print("x");
                } else {
                    System.out.print(".");
                }
                bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                MainIngestFile.cptMaip.addAndGet(check);
                nb = 0;
            }
        }
        if (!esIndex.isEmpty()) {
            System.out.println("Last bulk ES");
            dbvitam.addEsEntryIndex(true, esIndex, model);
            esIndex.clear();
        }
        if (nb != 0) {
            bulk.execute();
            MainIngestFile.cptMaip.addAndGet(nb);
            nb = 0;
        }
    } else {
        // threads
        ExecutorService executorService = Executors.newFixedThreadPool(ingest.length + 1);
        for (int i = 0; i < ingest.length; i++) {
            MainIngestMDBESFromFile ingestrun = new MainIngestMDBESFromFile();
            ingestrun.file = ingest[i];
            executorService.execute(ingestrun);
        }
        // ES
        MainIngestMDBESFromFile ingestrun = new MainIngestMDBESFromFile();
        ingestrun.file = null;
        ingestrun.files = ingest;
        ingestrun.original = dbvitam;
        executorService.execute(ingestrun);

        executorService.shutdown();
        while (!executorService.awaitTermination(10000, TimeUnit.MILLISECONDS)) {
            ;
        }
        System.out.println("Load ended");
        final long nbBigM = dbvitam.getDaipSize();
        final long nbBigD = dbvitam.getPaipSize();
        System.out.println("\n Big Test (" + nbThread + " nb MAIP: " + MainIngestFile.cptMaip.get()
                + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));

        System.out.println("\nThread;nbLoad;nbTotal;Load");
        System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));
    }
    final long date12 = System.currentTimeMillis();
    MainIngestMDBESFromFile.loadt.set(date12 - date11);

    System.out.println("Load ended");
    /*
     * System.out.println("All elements\n================================================================");
     * DbVitam.printStructure(dbvitam);
     */
    final long nbBigM = dbvitam.getDaipSize();
    final long nbBigD = dbvitam.getPaipSize();
    System.out.println("\n Big Test (" + nbThread + " Threads chacune " + MainIngestFile.nb + " nb MAIP: "
            + MainIngestFile.cptMaip.get() + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
            + (MainIngestMDBESFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));

    System.out.println("\nThread;nbLoad;nbTotal;Load");
    System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
            + (MainIngestMDBESFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBESFromFile.java

License:Open Source License

@Override
public void run() {
    if (file == null) {
        // ES/*from   w ww .  j ava  2  s  .  c  om*/
        //Thread.sleep(1000);
        try {
            for (int i = 0; i < files.length - 1; i++) {
                System.out.println("ESFile: " + files[i]);
                final HashMap<String, String> esIndex = new HashMap<>();
                final FileInputStream fstream = new FileInputStream(files[i]);
                final DataInputStream in = new DataInputStream(fstream);
                final BufferedReader br = new BufferedReader(new InputStreamReader(in));
                String strLine;
                // Read File Line By Line
                while ((strLine = br.readLine()) != null) {
                    final BSONObject bson = (BSONObject) JSON.parse(strLine);
                    ElasticSearchAccess.addEsIndex(original, model, esIndex, bson);
                }
                // Close the input stream
                br.close();
                in.close();
                fstream.close();
                if (!esIndex.isEmpty()) {
                    System.out.println("Last bulk ES");
                    original.addEsEntryIndex(true, esIndex, model);
                    esIndex.clear();
                }
            }
            // last file might contains already inserted but to be updated DAip
            int i = files.length - 1;
            System.out.println("ESFile: " + files[i]);
            final FileInputStream fstream = new FileInputStream(files[i]);
            final DataInputStream in = new DataInputStream(fstream);
            final BufferedReader br = new BufferedReader(new InputStreamReader(in));
            String strLine;
            // Read File Line By Line
            while ((strLine = br.readLine()) != null) {
                final BSONObject bson = (BSONObject) JSON.parse(strLine);
                ElasticSearchAccess.addEsIndex(original, model, bson);
            }
            // Close the input stream
            br.close();
            in.close();
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return;
    }
    MongoDbAccess dbvitam = null;
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        System.out.println("MDFile: " + file);
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBFromFile.java

License:Open Source License

private static final void runOnce(final MongoDbAccess dbvitam)
        throws InterruptedException, InstantiationException, IllegalAccessException, IOException {
    System.out.println("Load starting... ");
    int nbThread = ingest.length;

    final long date11 = System.currentTimeMillis();
    if (ingest.length == 1) {
        final FileInputStream fstream = new FileInputStream(ingest[0]);
        final DataInputStream in = new DataInputStream(fstream);
        final BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String strLine;/*from  ww w  . ja v  a2  s.c o  m*/
        int nb = 0;
        BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
        while ((strLine = br.readLine()) != null) {
            final DBObject bson = (DBObject) JSON.parse(strLine);
            bulk.insert(bson);
            nb++;
            if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                BulkWriteResult result = bulk.execute();
                int check = result.getInsertedCount();
                if (check != nb) {
                    System.out.print("x");
                } else {
                    System.out.print(".");
                }
                bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                MainIngestFile.cptMaip.addAndGet(check);
                nb = 0;
            }
        }
        if (nb != 0) {
            bulk.execute();
            MainIngestFile.cptMaip.addAndGet(nb);
            nb = 0;
        }
    } else {
        // threads
        ExecutorService executorService = Executors.newFixedThreadPool(ingest.length);
        for (int i = 0; i < ingest.length; i++) {
            MainIngestMDBFromFile ingestrun = new MainIngestMDBFromFile();
            ingestrun.file = ingest[i];
            executorService.execute(ingestrun);
            Thread.sleep(200);
        }
        Thread.sleep(1000);
        executorService.shutdown();
        while (!executorService.awaitTermination(10000, TimeUnit.MILLISECONDS)) {
            ;
        }
        System.out.println("Load ended");
        final long nbBigM = dbvitam.getDaipSize();
        final long nbBigD = dbvitam.getPaipSize();
        System.out.println("\n Big Test (" + nbThread + " nb MAIP: " + MainIngestFile.cptMaip.get()
                + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));

        System.out.println("\nThread;nbLoad;nbTotal;Load");
        System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
                + (loadt.get()) / ((float) MainIngestFile.cptMaip.get() * nbThread));
    }
    final long date12 = System.currentTimeMillis();
    MainIngestMDBFromFile.loadt.set(date12 - date11);

    System.out.println("Load ended");
    /*
     * System.out.println("All elements\n================================================================");
     * DbVitam.printStructure(dbvitam);
     */
    final long nbBigM = dbvitam.getDaipSize();
    final long nbBigD = dbvitam.getPaipSize();
    System.out.println("\n Big Test (" + nbThread + " Threads chacune " + MainIngestFile.nb + " nb MAIP: "
            + MainIngestFile.cptMaip.get() + ") with MAIP: " + nbBigM + " DATA: " + nbBigD + " => Load:"
            + (MainIngestMDBFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));

    System.out.println("\nThread;nbLoad;nbTotal;Load");
    System.out.println(nbThread + ";" + MainIngestFile.cptMaip.get() + ";" + nbBigM + ";"
            + (MainIngestMDBFromFile.loadt.get()) / ((float) MainIngestFile.cptMaip.get()));
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBFromFile.java

License:Open Source License

@Override
public void run() {
    MongoDbAccess dbvitam = null;//w w w . j  a va 2 s  . c  o  m
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:fr.gouv.vitam.mdbes.MainSimpleRequest.java

License:Open Source License

protected static void oneShot(MongoDbAccess dbvitam) throws InvalidParseOperationException,
        InvalidExecOperationException, InstantiationException, IllegalAccessException {
    // Requesting
    String comdtree = request.toString();
    BasicDBObject query = (BasicDBObject) JSON.parse(comdtree);
    if (ids != null) {
        BasicDBObject id = (BasicDBObject) JSON.parse(ids);
        DateTime date = new DateTime(-123456789012345L);
        query = new BasicDBObject("OldDate", date.toDate());
        System.out.println("Date: " + date + " upd: " + query + " => " + date.getYear());
        dbvitam.daips.collection.update(id, query);
        final DBCursor cursor = dbvitam.daips.collection.find(id);
        while (cursor.hasNext()) {
            final DAip maip = (DAip) cursor.next();
            maip.load(dbvitam);/*from www .  ja v a2s .co m*/
            System.out.println(maip);
        }
        cursor.close();
        System.out.println("====");
        date = date.plusYears(10);
        id.append("OldDate", new BasicDBObject("$lt", date.toDate()));
        System.out.println("Date: " + date + " find: " + id + " => " + date.getYear());
        final DBCursor cursor2 = dbvitam.daips.collection.find(id);
        while (cursor2.hasNext()) {
            final DAip maip = (DAip) cursor2.next();
            Date madate = maip.getDate("OldDate");
            System.out.println("Madate: " + madate);
            System.out.println("Madate: " + madate.getTime());
            System.out.println("Madate: " + new DateTime(madate));
            maip.load(dbvitam);
            System.out.println(maip);
        }
        cursor2.close();
    } else {
        final DBCursor cursor = dbvitam.find(dbvitam.daips, query, ID_NBCHILD);
        while (cursor.hasNext()) {
            final DAip maip = (DAip) cursor.next();
            maip.load(dbvitam);
            System.out.println(maip);
        }
        cursor.close();
    }
}

From source file:fr.gouv.vitam.mdbes.ParserIngest.java

License:Open Source License

/**
 * Parse the model of ingest from the string
 *
 * @param ingest// w  w w  .  j  ava  2 s  .co m
 * @throws InvalidParseOperationException
 */
public void parse(final String ingest) throws InvalidParseOperationException {
    internalParseClean();
    this.ingest = ingest;
    final BSONObject bson = (BSONObject) JSON.parse(ingest);
    model = (String) bson.removeField(FIELD_ARGS.__model.name());
    getDomain(bson);
    parseDAip(bson, 1);
    domObj = bson;
}

From source file:fr.gouv.vitam.mdbes.ParserIngest.java

License:Open Source License

/**
 * Save generated DAip to the file and to ElasticSearch
 *
 * @param dbvitam//w  w  w  . java 2  s .  c  o m
 * @param start
 * @param stop
 * @param saveEs True means save to ES
 * @return the number of element inserted
 * @throws InvalidExecOperationException
 * @throws InvalidUuidOperationException
 */
public long executeToFile(final MongoDbAccess dbvitam, final int start, final int stop, final boolean saveEs)
        throws InvalidExecOperationException, InvalidUuidOperationException {
    if (simulate) {
        return executeSimulate(start, stop);
    }
    if (GlobalDatas.PRINT_REQUEST) {
        System.out.println("Start To File");
    }
    this.dbvitam = dbvitam;
    // Domain
    domobj = (Domain) dbvitam.fineOne(VitamCollections.Cdomain, REFID, domRefid);
    LOGGER.debug("Found Domain ? " + (domobj != null));
    if (domobj == null) {
        domobj = new Domain();
        domobj.put(REFID, domRefid);
        domobj.put("name", domRefid);
        domobj.putAll(domObj);
        domobj.save(dbvitam);
        domobj.setRoot();
        LOGGER.warn("Create Domain: {}", domobj);
        // LOGGER.error("Load: "+domobj);
    }
    // Set DISTRIB to start-stop
    if (distribOccurence != null) {
        final int lstop = (stop - start + 1 > distribOccurence.occur) ? start + distribOccurence.occur - 1
                : stop;
        distribOccurence.low = start;
        distribOccurence.high = lstop;
        distribOccurence.occur = lstop - start + 1;
        System.out.println("Distrib: " + start + ":" + lstop);
    }
    // First level using start-stop
    AtomicLong cpt = context.cpts.get(CPTLEVEL + 1);
    final List<TypeField> fields = daips.get(0);
    final Occurence occurence = occurences.get(0);
    cpt.set(occurence.low);
    if (occurence.idcpt != null) {
        cpt = context.cpts.get(occurence.idcpt);
    }
    final long lstart = cpt.get();
    final long lstop = lstart + occurence.occur - 1;
    ArrayList<DAip> listmetaaips;
    final HashMap<String, Integer> subdepth = new HashMap<>();
    subdepth.put(domobj.getId(), 1);
    HashMap<String, String> esIndex = null;
    if (saveEs) {
        esIndex = new HashMap<>();
    }
    try {
        listmetaaips = execDAipNewToFile(null, subdepth, esIndex, 1, occurence, lstart, lstop, cpt, fields);
    } catch (InstantiationException | IllegalAccessException e) {
        throw new InvalidExecOperationException(e);
    } catch (Exception e) {
        throw new InvalidExecOperationException(e);
    }
    if (GlobalDatas.PRINT_REQUEST) {
        System.out.println("End of MAIPs");
    }
    if (listmetaaips != null && !listmetaaips.isEmpty()) {
        if (MainIngestFile.minleveltofile > 1) {
            domobj.addDAip(dbvitam, listmetaaips);
            System.out.println("To be saved: " + listmetaaips.size());
            for (DAip dAip : listmetaaips) {
                savedDaips.put(dAip.getId(), dAip);
            }
        } else {
            // XXX NO SAVE OF MAIP!
            domobj.addDAipNoSave(dbvitam, bufferedOutputStream, listmetaaips);
            if (saveEs) {
                for (DAip dAip : listmetaaips) {
                    final BSONObject bson = (BSONObject) JSON.parse(dAip.toStringDirect());
                    ElasticSearchAccess.addEsIndex(dbvitam, model, esIndex, bson);
                }
            }
        }
        domobj.save(dbvitam);
        listmetaaips.clear();
        listmetaaips = null;
    }
    if (saveEs && !esIndex.isEmpty()) {
        System.out.println("Last bulk ES");
        dbvitam.addEsEntryIndex(true, esIndex, model);
        esIndex.clear();
    }
    System.out.println("End of Domain");
    return totalCount.get();
}

From source file:fr.gouv.vitam.mdbes.ParserIngest.java

License:Open Source License

/**
 * Level 1 to MD, not > 1/*from   www .j a v a2 s. c o m*/
 *
 * Save to file and to ElasticSearch
 *
 * @param father
 * @param subdepth22
 * @param esIndex
 * @param level
 * @param occurence
 * @param lstart
 * @param lstop
 * @param cpt
 * @param fields
 * @return the list of immediate sons
 * @throws InvalidExecOperationException
 * @throws InstantiationException
 * @throws IllegalAccessException
 */
private ArrayList<DAip> execDAipNewToFile(final DAip father, final Map<String, Integer> subdepth22,
        final HashMap<String, String> esIndex, final int level, final Occurence occurence, final long lstart,
        final long lstop, final AtomicLong cpt, final List<TypeField> fields)
        throws InvalidExecOperationException, InstantiationException, IllegalAccessException {
    final ArrayList<DAip> listmetaaips = new ArrayList<DAip>();
    final boolean fromDatabase = level < MainIngestFile.minleveltofile;
    for (long rank = cpt.get(); rank <= lstop; rank = cpt.incrementAndGet()) {
        DAip maip = new DAip();
        maip.put(DAip.DAIPDEPTHS, subdepth22);
        for (final TypeField typeField : fields) {
            final BasicDBObject obj = getDbObject(typeField, rank, occurence.distrib, cpt);
            if (obj == null) {
                maip = null;
                break;
            }
            maip.putAll((BSONObject) obj);
        }
        if (maip == null) {
            continue;
        }
        totalCount.incrementAndGet();
        if (totalCount.get() % 1000 == 0) {
            System.out.print('.');
        }
        if (occurence == distribOccurence) {
            System.out.println("\nDistrib: " + rank);
        }
        maip.getAfterLoad();
        DAip metaaip2 = null;
        if (fromDatabase) {
            metaaip2 = (DAip) dbvitam.fineOne(VitamCollections.Cdaip, REFID, maip.getString(REFID));
        }
        boolean metaCreated = true;
        if (metaaip2 != null) {
            System.out.print('x');
            // merge Depth
            final Map<String, Integer> old = metaaip2.getDomDepth();
            // Map<String, Integer> toUpdateSon = new HashMap<String, Integer>();
            for (final String key : subdepth22.keySet()) {
                if (old.containsKey(key)) {
                    if (old.get(key) > subdepth22.get(key)) {
                        old.put(key, subdepth22.get(key));
                        // toUpdateSon.put(key, subdepth22.get(key));
                    }
                } else {
                    old.put(key, subdepth22.get(key));
                    // toUpdateSon.put(key, subdepth22.get(key));
                }
            }
            // old now contains all
            metaaip2.put(DAip.DAIPDEPTHS, old);
            // XXX FIXME should update children but will not since "POC" code and not final code
            // XXX FIXME here should do: recursive call to update DAIPDEPTHS from toUpdateSon
            maip = metaaip2;
            // System.out.println("Not created: "+metaaip2.toString());
            metaCreated = false;
            // Last level
            if (level >= daips.size()) {
                // update directly
                if (father == null) {
                    listmetaaips.add(metaaip2);
                }
                metaaip2.save(dbvitam);
                if (esIndex != null) {
                    final BSONObject bson = (BSONObject) JSON.parse(maip.toStringDirect());
                    ElasticSearchAccess.addEsIndex(dbvitam, model, esIndex, bson);
                }
                continue;
            }
        }
        if (metaCreated) {
            // since created saved once here
            // XXX NO SAVE OF MAIP!
            maip.setNewId();
            // now check duaref and confidentialLevel
            if (maip.containsField(MongoDbAccess.VitamCollections.Cdua.getName())) {
                final String duaname = (String) maip.removeField(MongoDbAccess.VitamCollections.Cdua.getName());
                final DuaRef duaobj = DuaRef.findOne(dbvitam, duaname);
                if (duaobj != null) {
                    maip.addDuaRef(dbvitam, duaobj);
                } else {
                    LOGGER.error("wrong dua: " + duaname);
                }
            }
        }
        // now compute subMaip if any
        if (level < daips.size()) {
            AtomicLong newcpt = context.cpts.get(CPTLEVEL + (level + 1));
            final List<TypeField> newfields = daips.get(level);
            final Occurence occurence2 = occurences.get(level);
            // default reset
            newcpt.set(occurence2.low);
            if (occurence2.idcpt != null) {
                newcpt = context.cpts.get(occurence2.idcpt);
            }
            final long newlstart = newcpt.get();
            final long newlstop = newlstart + occurence2.occur - 1;
            final Map<String, Integer> subdepth2 = maip.getSubDomDepth();
            ArrayList<DAip> listsubmetaaips = execDAipNewToFile(maip, subdepth2, esIndex, level + 1, occurence2,
                    newlstart, newlstop, newcpt, newfields);
            if (listsubmetaaips != null) {
                listsubmetaaips.clear();
                listsubmetaaips = null;
            }
        } else {
            // now check data
            if (paips != null) {
                // ignore ? XXX FIXME
                /*
                 * DataObject dataobj = new DataObject();
                 * for (TypeField typeField : dataObject) {
                 * BasicDBObject obj = getDbObject(typeField, rank, occurence.distrib, cpt);
                 * dataobj.putAll((BSONObject) obj);
                 * }
                 * dataobj.setRefid(maip.refid);
                 * dataobj.getAfterLoad();
                 * dataobj.save(dbvitam);
                 * maip.addDataObject(dbvitam, dataobj);
                 */
            }
        }
        if (father != null) {
            long nb = father.nb;
            father.addDAipWithNoSave(maip);
            if (GlobalDatas.PRINT_REQUEST) {
                if (level == MainIngestFile.minleveltofile) {
                    System.out.print("Add Daip: " + nb + ":" + father.nb);
                }
            }
        }
        if (fromDatabase) {
            maip.save(dbvitam);
            if (metaCreated) {
                maip.forceSave(dbvitam.daips);
            }
            savedDaips.put(maip.getId(), maip);
        }
        // System.out.println("M: "+maip.toString());
        if (metaCreated && father == null) {
            listmetaaips.add(maip);
        } else if (father != null && !fromDatabase) {
            maip.saveToFile(dbvitam, bufferedOutputStream);
            if (esIndex != null) {
                final BSONObject bson = (BSONObject) JSON.parse(maip.toStringDirect());
                ElasticSearchAccess.addEsIndex(dbvitam, model, esIndex, bson);
            }
        }
    }
    return listmetaaips;
}

From source file:fr.gouv.vitam.mdbes.VitamType.java

License:Open Source License

/**
 * Load from a JSON String//  w  w w .  ja v a  2s  .  co m
 *
 * @param json
 */
public final void load(final String json) {
    this.putAll((BSONObject) JSON.parse(json));
    getAfterLoad();
}

From source file:fr.wseduc.gridfs.GridFSPersistor.java

License:Apache License

private DBObject jsonToDBObject(JsonObject object) {
    String str = object.encode();
    return (DBObject) JSON.parse(str);
}