List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:com.fizzed.rocker.compiler.JavaGenerator.java
private void createSourceTemplate(TemplateModel model, Writer w) throws GeneratorException, IOException { if (model.getOptions().getPostProcessing() != null) { // allow post-processors to transform the model try {//from w w w .j a va 2 s. c o m model = postProcess(model); } catch (PostProcessorException ppe) { throw new GeneratorException("Error during post-processing of model.", ppe); } } // Used to register any withstatements we encounter, so we can generate all dynamic consumers at the end. final WithStatementConsumerGenerator withStatementConsumerGenerator = new WithStatementConsumerGenerator(); // simple increment to help create unique var names int varCounter = -1; if (model.getPackageName() != null && !model.getPackageName().equals("")) { w.append("package ").append(model.getPackageName()).append(";").append(CRLF); } // imports regardless of template w.append(CRLF); w.append("import ").append(java.io.IOException.class.getName()).append(";").append(CRLF); w.append("import ").append(com.fizzed.rocker.ForIterator.class.getName()).append(";").append(CRLF); w.append("import ").append(com.fizzed.rocker.RenderingException.class.getName()).append(";").append(CRLF); w.append("import ").append(com.fizzed.rocker.RockerContent.class.getName()).append(";").append(CRLF); w.append("import ").append(com.fizzed.rocker.RockerOutput.class.getName()).append(";").append(CRLF); w.append("import ").append(com.fizzed.rocker.runtime.DefaultRockerTemplate.class.getName()).append(";") .append(CRLF); w.append("import ").append(com.fizzed.rocker.runtime.PlainTextUnloadedClassLoader.class.getName()) .append(";").append(CRLF); // template imports if (model.getImports().size() > 0) { for (JavaImport i : model.getImports()) { w.append("// import ").append(sourceRef(i)).append(CRLF); w.append("import ").append(i.getStatement()).append(";").append(CRLF); } } w.append(CRLF); w.append("/*").append(CRLF); w.append(" * Auto generated code to render template ").append(model.getPackageName().replace('.', '/')) .append("/").append(model.getTemplateName()).append(CRLF); w.append(" * Do not edit this file. Changes will eventually be overwritten by Rocker parser!").append(CRLF); w.append(" */").append(CRLF); int indent = 0; // MODEL CLASS // class definition tab(w, indent).append("public class ").append(model.getName()).append(" extends ") .append(model.getOptions().getExtendsModelClass()).append(" {").append(CRLF); indent++; w.append(CRLF); // static info about this template tab(w, indent).append("static public final ").append(ContentType.class.getCanonicalName()) .append(" CONTENT_TYPE = ").append(ContentType.class.getCanonicalName()).append(".") .append(model.getContentType().toString()).append(";").append(CRLF); tab(w, indent).append("static public final String TEMPLATE_NAME = \"").append(model.getTemplateName()) .append("\";").append(CRLF); tab(w, indent).append("static public final String TEMPLATE_PACKAGE_NAME = \"") .append(model.getPackageName()).append("\";").append(CRLF); tab(w, indent).append("static public final String HEADER_HASH = \"").append(model.createHeaderHash() + "") .append("\";").append(CRLF); // Don't include MODIFIED_AT header when optimized compiler is used since this implicitly disables hot reloading anyhow if (!model.getOptions().getOptimize()) { tab(w, indent).append("static public final long MODIFIED_AT = ").append(model.getModifiedAt() + "") .append("L;").append(CRLF); } tab(w, indent).append("static public final String[] ARGUMENT_NAMES = {"); StringBuilder argNameList = new StringBuilder(); for (Argument arg : model.getArgumentsWithoutRockerBody()) { if (argNameList.length() > 0) { argNameList.append(","); } argNameList.append(" \"").append(arg.getExternalName()).append("\""); } w.append(argNameList).append(" };").append(CRLF); // model arguments as members of model class appendArgumentMembers(model, w, "private", false, indent); // model setters & getters with builder-style pattern // special case for the RockerBody argument which sorta "hides" its getter/setter if (model.getArguments().size() > 0) { for (Argument arg : model.getArguments()) { // setter w.append(CRLF); tab(w, indent).append("public ").append(model.getName()).append(" ").append(arg.getExternalName()) .append("(" + arg.getExternalType()).append(" ").append(arg.getName()).append(") {") .append(CRLF); tab(w, indent + 1).append("this.").append(arg.getName()).append(" = ").append(arg.getName()) .append(";").append(CRLF); tab(w, indent + 1).append("return this;").append(CRLF); tab(w, indent).append("}").append(CRLF); // getter w.append(CRLF); tab(w, indent).append("public ").append(arg.getExternalType()).append(" ") .append(arg.getExternalName()).append("() {").append(CRLF); tab(w, indent + 1).append("return this.").append(arg.getName()).append(";").append(CRLF); tab(w, indent).append("}").append(CRLF); } } w.append(CRLF); // // model "template" static builder // tab(w, indent).append("static public ").append(model.getName()).append(" template("); if (model.getArguments().size() > 0) { int i = 0; // RockerBody is NOT included (it is passed via a closure block in other templates) // so we only care about the other arguments for (Argument arg : model.getArgumentsWithoutRockerBody()) { if (i != 0) { w.append(", "); } w.append(arg.getType()).append(" ").append(arg.getName()); i++; } } w.append(") {").append(CRLF); tab(w, indent + 1).append("return new ").append(model.getName()).append("()"); if (model.getArguments().size() > 0) { int i = 0; for (Argument arg : model.getArgumentsWithoutRockerBody()) { w.append(CRLF); tab(w, indent + 2).append(".").append(arg.getName()).append("(").append(arg.getName()).append(")"); i++; } } w.append(";").append(CRLF); tab(w, indent).append("}").append(CRLF); // // render of model // w.append(CRLF); tab(w, indent).append("@Override").append(CRLF); tab(w, indent).append("protected DefaultRockerTemplate buildTemplate() throws RenderingException {") .append(CRLF); if (model.getOptions().getOptimize()) { // model "template" static builder (not reloading support, fastest performance) tab(w, indent + 1).append("// optimized for performance (via rocker.optimize flag; no auto reloading)") .append(CRLF); tab(w, indent + 1).append("return new Template(this);").append(CRLF); //tab(w, indent+1).append("return template.__render(context);").append(CRLF); } else { tab(w, indent + 1).append( "// optimized for convenience (runtime auto reloading enabled if rocker.reloading=true)") .append(CRLF); // use bootstrap to create underlying template tab(w, indent + 1).append("return ").append(RockerRuntime.class.getCanonicalName()) .append(".getInstance().getBootstrap().template(this.getClass(), this);").append(CRLF); //tab(w, indent+1).append("return template.__render(context);").append(CRLF); } tab(w, indent).append("}").append(CRLF); // // TEMPLATE CLASS // w.append(CRLF); // class definition tab(w, indent).append("static public class Template extends ").append(model.getOptions().getExtendsClass()); w.append(" {").append(CRLF); indent++; // plain text -> map of chunks of text (Java only supports 2-byte length of string constant) LinkedHashMap<String, LinkedHashMap<String, String>> plainTextMap = model .createPlainTextMap(PLAIN_TEXT_CHUNK_LENGTH); if (!plainTextMap.isEmpty()) { w.append(CRLF); for (String plainText : plainTextMap.keySet()) { // include static text as comments in source (limit to 500) tab(w, indent).append("// ") .append(StringUtils.abbreviate(RockerUtil.ESCAPE_JAVA.translate(plainText), 500)) .append(CRLF); for (Map.Entry<String, String> chunk : plainTextMap.get(plainText).entrySet()) { if (this.plainTextStrategy == PlainTextStrategy.STATIC_STRINGS) { tab(w, indent).append("static private final String ").append(chunk.getKey()).append(" = \"") .append(StringEscapeUtils.escapeJava(chunk.getValue())).append("\";").append(CRLF); } else if (this.plainTextStrategy == PlainTextStrategy.STATIC_BYTE_ARRAYS_VIA_UNLOADED_CLASS) { tab(w, indent).append("static private final byte[] ").append(chunk.getKey()).append(";") .append(CRLF); } } } // generate the static initializer if (this.plainTextStrategy == PlainTextStrategy.STATIC_BYTE_ARRAYS_VIA_UNLOADED_CLASS) { w.append(CRLF); tab(w, indent).append("static {").append(CRLF); String loaderClassName = unqualifiedClassName(PlainTextUnloadedClassLoader.class); tab(w, indent + 1).append(loaderClassName).append(" loader = ").append(loaderClassName) .append(".tryLoad(").append(model.getName()).append(".class.getClassLoader(), ") .append(model.getName()).append(".class.getName()").append(" + \"$PlainText\", \"") .append(model.getOptions().getTargetCharset()).append("\");").append(CRLF); for (String plainText : plainTextMap.keySet()) { for (Map.Entry<String, String> chunk : plainTextMap.get(plainText).entrySet()) { if (this.plainTextStrategy == PlainTextStrategy.STATIC_BYTE_ARRAYS_VIA_UNLOADED_CLASS) { tab(w, indent + 1).append(chunk.getKey()).append(" = loader.tryGet(\"") .append(chunk.getKey()).append("\");").append(CRLF); } } } tab(w, indent).append("}").append(CRLF); } } // arguments as members of template class appendArgumentMembers(model, w, "protected", true, indent); w.append(CRLF); // constructor tab(w, indent).append("public Template(").append(model.getName()).append(" model) {").append(CRLF); tab(w, indent + 1).append("super(model);").append(CRLF); tab(w, indent + 1).append("__internal.setCharset(\"").append(model.getOptions().getTargetCharset()) .append("\");").append(CRLF); tab(w, indent + 1).append("__internal.setContentType(CONTENT_TYPE);").append(CRLF); tab(w, indent + 1).append("__internal.setTemplateName(TEMPLATE_NAME);").append(CRLF); tab(w, indent + 1).append("__internal.setTemplatePackageName(TEMPLATE_PACKAGE_NAME);").append(CRLF); // each model argument passed along as well for (Argument arg : model.getArguments()) { tab(w, indent + 1).append("this.").append(arg.getName()).append(" = model.") .append(arg.getExternalName()).append("();").append(CRLF); } tab(w, indent).append("}").append(CRLF); w.append(CRLF); tab(w, indent).append("@Override").append(CRLF); tab(w, indent).append("protected void __doRender() throws IOException, RenderingException {").append(CRLF); // build rendering code int depth = 1; Deque<String> blockEnd = new ArrayDeque<>(); for (TemplateUnit unit : model.getUnits()) { if (unit instanceof Comment) { continue; } // something like // IfBeginBlock // __internal.aboutToExecutePosInSourceTemplate(5, 10); appendCommentAndSourcePositionUpdate(w, depth + indent, unit); if (unit instanceof PlainText) { PlainText plain = (PlainText) unit; LinkedHashMap<String, String> chunks = plainTextMap.get(plain.getText()); for (String chunkName : chunks.keySet()) { tab(w, depth + indent).append("__internal.writeValue(").append(chunkName).append(");") .append(CRLF); } } else if (unit instanceof ValueExpression) { ValueExpression value = (ValueExpression) unit; tab(w, depth + indent).append("__internal.renderValue(").append(value.getExpression()).append(", ") .append("" + value.isNullSafe()).append(");").append(CRLF); } else if (unit instanceof NullTernaryExpression) { NullTernaryExpression nullTernary = (NullTernaryExpression) unit; tab(w, depth + indent).append("{").append(CRLF); tab(w, depth + indent + 1).append("final Object __v = ").append(nullTernary.getLeftExpression()) .append(";").append(CRLF); tab(w, depth + indent + 1).append("if (__v != null) { __internal.renderValue(__v, false); }") .append(CRLF); if (nullTernary.getRightExpression() != null) { tab(w, depth + indent + 1).append("else {__internal.renderValue(") .append(nullTernary.getRightExpression()).append(", true); }").append(CRLF); } tab(w, depth + indent).append("}").append(CRLF); } else if (unit instanceof ValueClosureBegin) { ValueClosureBegin closure = (ValueClosureBegin) unit; tab(w, depth + indent).append("__internal.renderValue(").append(closure.getExpression()) .append(".__body("); // Java 1.8+ use lambda if (isJava8Plus(model)) { w.append("() -> {").append(CRLF); depth++; blockEnd.push("}), false);"); } // Java 1.7- uses anonymous inner class else { w.append("new ").append(unqualifiedClassName(RockerContent.class)).append("() {").append(CRLF); depth++; blockEnd.push("}), false);"); tab(w, depth + indent).append("@Override").append(CRLF); tab(w, depth + indent).append("public void render() throws IOException, RenderingException {") .append(CRLF); depth++; blockEnd.push("}"); } } else if (unit instanceof ValueClosureEnd) { // Java 1.8+ use lambda if (isJava8Plus(model)) { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // value closure end ") .append(sourceRef(unit)).append(CRLF); } // Java 1.7- uses anonymous inner class else { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(CRLF); depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // value closure end ") .append(sourceRef(unit)).append(CRLF); } } else if (unit instanceof ContentClosureBegin) { ContentClosureBegin closure = (ContentClosureBegin) unit; tab(w, depth + indent).append("RockerContent ").append(closure.getIdentifier()).append(" = "); // Java 1.8+ use lambda if (isJava8Plus(model)) { w.append("() -> {").append(CRLF); depth++; blockEnd.push("};"); } // Java 1.7- uses anonymous inner class else { w.append("new ").append(unqualifiedClassName(com.fizzed.rocker.RockerContent.class)) .append("() {").append(CRLF); depth++; blockEnd.push("};"); tab(w, depth + indent).append("@Override").append(CRLF); tab(w, depth + indent).append("public void render() throws IOException, RenderingException {") .append(CRLF); depth++; blockEnd.push("}"); } } else if (unit instanceof ContentClosureEnd) { // Java 1.8+ use lambda if (isJava8Plus(model)) { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // content closure end ") .append(sourceRef(unit)).append(CRLF); } // Java 1.7- uses anonymous inner class else { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(CRLF); depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // content closure end ") .append(sourceRef(unit)).append(CRLF); } } else if (unit instanceof IfBlockBegin) { IfBlockBegin block = (IfBlockBegin) unit; tab(w, depth + indent).append("if ").append(block.getExpression()).append(" {").append(CRLF); blockEnd.push("}"); depth++; } else if (unit instanceof IfBlockElseIf) { final IfBlockElseIf block = (IfBlockElseIf) unit; depth--; // This keeps else-if nicely formatted in generated code. tab(w, depth + indent).append("} else if ").append(block.getExpression()).append(" {").append(CRLF); depth++; } else if (unit instanceof IfBlockElse) { depth--; tab(w, depth + indent).append("} else {").append(" // else ").append(sourceRef(unit)).append(CRLF); depth++; } else if (unit instanceof IfBlockEnd) { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // if end ").append(sourceRef(unit)) .append(CRLF); } else if (unit instanceof WithBlockBegin) { WithBlockBegin block = (WithBlockBegin) unit; WithStatement stmt = block.getStatement(); String statementConsumerName = withStatementConsumerGenerator.register(stmt); final List<WithStatement.VariableWithExpression> variables = stmt.getVariables(); if (isJava8Plus(model)) { tab(w, depth + indent) .append(variables.size() == 1 ? qualifiedClassName(WithBlock.class) : WithStatementConsumerGenerator.WITH_BLOCKS_GENERATED_CLASS_NAME) .append(".with("); // All expressions for (int i = 0; i < variables.size(); i++) { final WithStatement.VariableWithExpression var = variables.get(i); if (i > 0) { w.append(", "); } w.append(var.getValueExpression()); } w.append(", ").append(stmt.isNullSafe() + "").append(", ("); for (int i = 0; i < variables.size(); i++) { final WithStatement.VariableWithExpression var = variables.get(i); if (i > 0) { w.append(", "); } w.append(var.getVariable().getName()); } w.append(") -> {").append(CRLF); depth++; blockEnd.push("});"); } else { tab(w, depth + indent) // Note for standard 1 variable with block we use the predefined consumers. // Otherwise we fallback to the generated ones. .append(variables.size() == 1 ? qualifiedClassName(WithBlock.class) : WithStatementConsumerGenerator.WITH_BLOCKS_GENERATED_CLASS_NAME) .append(".with("); // All expressions for (int i = 0; i < variables.size(); i++) { final WithStatement.VariableWithExpression var = variables.get(i); if (i > 0) { w.append(", "); } w.append(var.getValueExpression()); } w.append(", ").append(stmt.isNullSafe() + "").append(", (new ").append(statementConsumerName) .append('<'); // Types for the .with(..) for (int i = 0; i < variables.size(); i++) { final JavaVariable variable = variables.get(i).getVariable(); if (i > 0) { w.append(", "); } w.append(variable.getType()); } w.append(">() {").append(CRLF); tab(w, depth + indent + 1).append("@Override public void accept("); for (int i = 0; i < variables.size(); i++) { final JavaVariable variable = variables.get(i).getVariable(); if (i > 0) { w.append(", "); } w.append("final ").append(variable.toString()); } w.append(") throws IOException {").append(CRLF); depth++; blockEnd.push("}}));"); } } else if (unit instanceof WithBlockElse) { depth--; if (isJava8Plus(model)) { tab(w, depth + indent).append("}, () -> {").append(CRLF); } else { tab(w, depth + indent).append("}}), (new ") .append(qualifiedClassName(WithBlock.Consumer0.class)).append("() { ") .append("@Override public void accept() throws IOException {").append(CRLF); } depth++; } else if (unit instanceof WithBlockEnd) { depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // with end ").append(sourceRef(unit)) .append(CRLF); } else if (unit instanceof ForBlockBegin) { ForBlockBegin block = (ForBlockBegin) unit; ForStatement stmt = block.getStatement(); // break support via try and catch mechanism (works across lambdas!) tab(w, depth + indent).append("try {").append(CRLF); depth++; if (stmt.getForm() == ForStatement.Form.GENERAL) { // print out raw statement including parentheses tab(w, depth + indent).append("for ").append(block.getExpression()).append(" {").append(CRLF); blockEnd.push("}"); } else if (stmt.getForm() == ForStatement.Form.ENHANCED) { // Java 1.8+ (use lambdas) if (stmt.hasAnyUntypedArguments() && isJava8Plus(model)) { // build list of lambda vars String localVars = ""; for (JavaVariable arg : stmt.getArguments()) { if (localVars.length() != 0) { localVars += ","; } localVars += arg.getName(); } tab(w, depth + indent).append(Java8Iterator.class.getName()).append(".forEach(") .append(stmt.getValueExpression()).append(", (").append(localVars).append(") -> {") .append(CRLF); blockEnd.push("});"); } else { // is the first argument a "ForIterator" ? boolean forIterator = isForIteratorType(stmt.getArguments().get(0).getType()); int collectionCount = (forIterator ? 2 : 1); int mapCount = (forIterator ? 3 : 2); // type and value we are going to iterate thru String iterateeType = null; String valueExpression = null; if (stmt.getArguments().size() == collectionCount) { iterateeType = stmt.getArguments().get(collectionCount - 1).getTypeAsNonPrimitiveType(); valueExpression = stmt.getValueExpression(); } else if (stmt.getArguments().size() == mapCount) { iterateeType = "java.util.Map.Entry<" + stmt.getArguments().get(mapCount - 2).getTypeAsNonPrimitiveType() + "," + stmt.getArguments().get(mapCount - 1).getTypeAsNonPrimitiveType() + ">"; valueExpression = stmt.getValueExpression() + ".entrySet()"; } // create unique variable name for iterator String forIteratorVarName = "__forIterator" + (++varCounter); // ForIterator for collection and make it final to assure nested anonymous // blocks can access it as well. tab(w, depth + indent).append("final ") .append(com.fizzed.rocker.runtime.CollectionForIterator.class.getName()).append("<") .append(iterateeType).append(">").append(" ").append(forIteratorVarName) .append(" = new ") .append(com.fizzed.rocker.runtime.CollectionForIterator.class.getName()).append("<") .append(iterateeType).append(">").append("(").append(valueExpression).append(");") .append(CRLF); // for loop same regardless of map vs. collection tab(w, depth + indent).append("while (").append(forIteratorVarName).append(".hasNext()) {") .append(CRLF); // if forIterator request assign to local var and make it final to assure nested anonymous // blocks can access it as well. if (forIterator) { tab(w, depth + indent + 1).append("final ") .append(com.fizzed.rocker.ForIterator.class.getName()).append(" ") .append(stmt.getArguments().get(0).getName()).append(" = ") .append(forIteratorVarName).append(";").append(CRLF); } if (stmt.getArguments().size() == collectionCount) { // assign item to local var and make it final to assure nested anonymous // blocks can access it as well. tab(w, depth + indent + 1).append("final ") .append(stmt.getArguments().get(collectionCount - 1).toString()).append(" = ") .append(forIteratorVarName).append(".next();").append(CRLF); } else if (stmt.getArguments().size() == mapCount) { // create unique variable name for iterator String entryVarName = "__entry" + (++varCounter); // assign map entry to local var tab(w, depth + indent + 1).append("final ").append(iterateeType).append(" ") .append(entryVarName).append(" = ").append(forIteratorVarName) .append(".next();").append(CRLF); // assign entry to local values make it final to assure nested anonymous // blocks can access it as well. tab(w, depth + indent + 1).append("final ") .append(stmt.getArguments().get(mapCount - 2).toString()).append(" = ") .append(entryVarName).append(".getKey();").append(CRLF); tab(w, depth + indent + 1).append("final ") .append(stmt.getArguments().get(mapCount - 1).toString()).append(" = ") .append(entryVarName).append(".getValue();").append(CRLF); } else { throw new GeneratorException("Unsupported number of arguments for for loop"); } blockEnd.push("}"); } } depth++; // continue support via try and catch mechanism (works across lambdas!) tab(w, depth + indent).append("try {").append(CRLF); depth++; } else if (unit instanceof ForBlockEnd) { depth--; // continue support via try and catch mechanism (works across lambdas!) tab(w, depth + indent).append("} catch (").append(ContinueException.class.getCanonicalName()) .append(" e) {").append(CRLF); tab(w, depth + indent + 1).append("// support for continuing for loops").append(CRLF); tab(w, depth + indent).append("}").append(CRLF); depth--; tab(w, depth + indent).append(blockEnd.pop()).append(" // for end ").append(sourceRef(unit)) .append(CRLF); depth--; // break support via try and catch mechanism (works across lambdas!) tab(w, depth + indent).append("} catch (").append(BreakException.class.getCanonicalName()) .append(" e) {").append(CRLF); tab(w, depth + indent + 1).append("// support for breaking for loops").append(CRLF); tab(w, depth + indent).append("}").append(CRLF); } else if (unit instanceof BreakStatement) { tab(w, depth + indent).append("__internal.throwBreakException();").append(CRLF); } else if (unit instanceof ContinueStatement) { tab(w, depth + indent).append("__internal.throwContinueException();").append(CRLF); } //log.info(" src (@ {}): [{}]", unit.getSourceRef(), unit.getSourceRef().getConsoleFriendlyText()); } // end of render() tab(w, indent).append("}").append(CRLF); indent--; // end of template class tab(w, indent).append("}").append(CRLF); // Generate class with all gathered consumer interfaces for all withblocks withStatementConsumerGenerator.generate(this, w); if (this.plainTextStrategy == PlainTextStrategy.STATIC_BYTE_ARRAYS_VIA_UNLOADED_CLASS && !plainTextMap.isEmpty()) { w.append(CRLF); tab(w, indent).append("private static class PlainText {").append(CRLF); w.append(CRLF); for (String plainText : plainTextMap.keySet()) { for (Map.Entry<String, String> chunk : plainTextMap.get(plainText).entrySet()) { tab(w, indent + 1).append("static private final String ").append(chunk.getKey()).append(" = \"") .append(StringEscapeUtils.escapeJava(chunk.getValue())).append("\";").append(CRLF); } } w.append(CRLF); tab(w, indent).append("}").append(CRLF); } w.append(CRLF); w.append("}").append(CRLF); }
From source file:juicebox.tools.utils.original.Preprocessor.java
/** * Note -- compressed/*ww w . j a v a 2 s . c o m*/ * * @param zd Matrix zoom data * @param block Block to write * @param sampledData Array to hold a sample of the data (to compute statistics) * @throws IOException */ private void writeBlock(MatrixZoomDataPP zd, BlockPP block, DownsampledDoubleArrayList sampledData) throws IOException { final Map<Point, ContactCount> records = block.getContactRecordMap();// getContactRecords(); // System.out.println("Write contact records : records count = " + records.size()); // Count records first int nRecords; if (countThreshold > 0) { nRecords = 0; for (ContactCount rec : records.values()) { if (rec.getCounts() >= countThreshold) { nRecords++; } } } else { nRecords = records.size(); } BufferedByteWriter buffer = new BufferedByteWriter(nRecords * 12); buffer.putInt(nRecords); zd.cellCount += nRecords; // Find extents of occupied cells int binXOffset = Integer.MAX_VALUE; int binYOffset = Integer.MAX_VALUE; int binXMax = 0; int binYMax = 0; for (Map.Entry<Point, ContactCount> entry : records.entrySet()) { Point point = entry.getKey(); binXOffset = Math.min(binXOffset, point.x); binYOffset = Math.min(binYOffset, point.y); binXMax = Math.max(binXMax, point.x); binYMax = Math.max(binYMax, point.y); } buffer.putInt(binXOffset); buffer.putInt(binYOffset); // Sort keys in row-major order List<Point> keys = new ArrayList<Point>(records.keySet()); Collections.sort(keys, new Comparator<Point>() { @Override public int compare(Point o1, Point o2) { if (o1.y != o2.y) { return o1.y - o2.y; } else { return o1.x - o2.x; } } }); Point lastPoint = keys.get(keys.size() - 1); final short w = (short) (binXMax - binXOffset + 1); boolean isInteger = true; float maxCounts = 0; LinkedHashMap<Integer, List<ContactRecord>> rows = new LinkedHashMap<Integer, List<ContactRecord>>(); for (Point point : keys) { final ContactCount contactCount = records.get(point); float counts = contactCount.getCounts(); if (counts >= countThreshold) { isInteger = isInteger && (Math.floor(counts) == counts); maxCounts = Math.max(counts, maxCounts); final int px = point.x - binXOffset; final int py = point.y - binYOffset; List<ContactRecord> row = rows.get(py); if (row == null) { row = new ArrayList<ContactRecord>(10); rows.put(py, row); } row.add(new ContactRecord(px, py, counts)); } } // Compute size for each representation and choose smallest boolean useShort = isInteger && (maxCounts < Short.MAX_VALUE); int valueSize = useShort ? 2 : 4; int lorSize = 0; int nDensePts = (lastPoint.y - binYOffset) * w + (lastPoint.x - binXOffset) + 1; int denseSize = nDensePts * valueSize; for (List<ContactRecord> row : rows.values()) { lorSize += 4 + row.size() * valueSize; } buffer.put((byte) (useShort ? 0 : 1)); if (lorSize < denseSize) { buffer.put((byte) 1); // List of rows representation buffer.putShort((short) rows.size()); // # of rows for (Map.Entry<Integer, List<ContactRecord>> entry : rows.entrySet()) { int py = entry.getKey(); List<ContactRecord> row = entry.getValue(); buffer.putShort((short) py); // Row number buffer.putShort((short) row.size()); // size of row for (ContactRecord contactRecord : row) { buffer.putShort((short) (contactRecord.getBinX())); final float counts = contactRecord.getCounts(); if (useShort) { buffer.putShort((short) counts); } else { buffer.putFloat(counts); } sampledData.add(counts); zd.sum += counts; } } } else { buffer.put((byte) 2); // Dense matrix buffer.putInt(nDensePts); buffer.putShort(w); int lastIdx = 0; for (Point p : keys) { int idx = (p.y - binYOffset) * w + (p.x - binXOffset); for (int i = lastIdx; i < idx; i++) { // Filler value if (useShort) { buffer.putShort(Short.MIN_VALUE); } else { buffer.putFloat(Float.NaN); } } float counts = records.get(p).getCounts(); if (useShort) { buffer.putShort((short) counts); } else { buffer.putFloat(counts); } lastIdx = idx + 1; sampledData.add(counts); zd.sum += counts; } } byte[] bytes = buffer.getBytes(); byte[] compressedBytes = compress(bytes); los.write(compressedBytes); }
From source file:com.opengamma.analytics.financial.interestrate.capletstripping.SABRTermStructureModelProvider.java
/** * General set up for a SABRTermStructureModelProvider * @param knotPoints Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the positions of the knot points on each of those curves * @param interpolators Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the interpolator used to describe that curve * @param parameterTransforms Map between parameter curve names ("alpha", "beta", "rho" and "nu") and the parameter transform used for that curve * @param knownParameterTermSturctures Map between known curve names (could be "alpha", "beta", "rho" and "nu") and the known curve(s) *//*from w w w . jav a2 s . co m*/ public SABRTermStructureModelProvider(LinkedHashMap<String, double[]> knotPoints, final LinkedHashMap<String, Interpolator1D> interpolators, final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms, final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) { Validate.notNull(knotPoints, "null node points"); Validate.notNull(interpolators, "null interpolators"); Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators"); if (knownParameterTermSturctures == null) { Validate.isTrue(knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue(knotPoints.containsKey(BETA) && interpolators.containsKey(BETA), "beta curve not found"); Validate.isTrue(knotPoints.containsKey(NU) && interpolators.containsKey(NU), "nu curve not found"); Validate.isTrue(knotPoints.containsKey(RHO) && interpolators.containsKey(RHO), "rho curve not found"); } else { Validate.isTrue((knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA)) ^ knownParameterTermSturctures.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue((knotPoints.containsKey(BETA) && interpolators.containsKey(BETA)) ^ knownParameterTermSturctures.containsKey(BETA), "beta curve not found"); Validate.isTrue((knotPoints.containsKey(NU) && interpolators.containsKey(NU)) ^ knownParameterTermSturctures.containsKey(NU), "nu curve not found"); Validate.isTrue((knotPoints.containsKey(RHO) && interpolators.containsKey(RHO)) ^ knownParameterTermSturctures.containsKey(RHO), "rho curve not found"); } final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<>(); for (final Map.Entry<String, Interpolator1D> entry : interpolators.entrySet()) { final String name = entry.getKey(); final Interpolator1D temp = new TransformedInterpolator1D(entry.getValue(), parameterTransforms.get(name)); transInterpolators.put(name, temp); } _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators); // _parameterTransforms = parameterTransforms; //TODO all the check for this _knownParameterTermStructures = knownParameterTermSturctures; }
From source file:fr.cirad.mgdb.exporting.markeroriented.GFFExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }//from w w w . j av a 2 s. c o m } File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".gff3")); String header = "##gff-version 3" + LINE_SEPARATOR; zos.write(header.getBytes()); TreeMap<String, String> typeToOntology = new TreeMap<String, String>(); typeToOntology.put(Type.SNP.toString(), "SO:0000694"); typeToOntology.put(Type.INDEL.toString(), "SO:1000032"); typeToOntology.put(Type.MIXED.toString(), "SO:0001059"); typeToOntology.put(Type.SYMBOLIC.toString(), "SO:0000109"); typeToOntology.put(Type.MNP.toString(), "SO:0001059"); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); List<String> variantDataOrigin = new ArrayList<String>(); Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":"); if (chromAndPos.size() == 0) LOG.warn("Chromosomal position not found for marker " + variantId); // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String individualId = individuals .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); Integer gq = null; try { gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ); } catch (Exception ignored) { } if (gq != null && gq < nMinimumGenotypeQuality) continue; Integer dp = null; try { dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP); } catch (Exception ignored) { } if (dp != null && dp < nMinimumReadDepth) continue; String gtCode = sampleGenotype.getCode(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); } zos.write((chromAndPos.get(0) + "\t" + StringUtils.join(variantDataOrigin, ";") /*source*/ + "\t" + typeToOntology.get(variant.getType()) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t" + "." + "\t" + "+" + "\t" + "." + "\t") .getBytes()); Comparable syn = markerSynonyms == null ? null : markerSynonyms.get(variant.getId()); zos.write(("ID=" + variant.getId() + ";" + (syn != null ? "Name=" + syn + ";" : "") + "alleles=" + StringUtils.join(variant.getKnownAlleleList(), "/") + ";" + "refallele=" + variant.getKnownAlleleList().get(0) + ";").getBytes()); for (int j = 0; j < individualList .size(); j++ /* we use this list because it has the proper ordering*/) { NumberFormat nf = NumberFormat.getInstance(Locale.US); nf.setMaximumFractionDigits(4); HashMap<String, Integer> compt1 = new HashMap<String, Integer>(); int highestGenotypeCount = 0; int sum = 0; String individualId = individualList.get(j); List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int count = 0; for (String t : variant.getAllelesFromGenotypeCode(genotype)) { for (String t1 : variant.getKnownAlleleList()) { if (t.equals(t1) && !(compt1.containsKey(t1))) { count++; compt1.put(t1, count); } else if (t.equals(t1) && compt1.containsKey(t1)) { if (compt1.get(t1) != 0) { count++; compt1.put(t1, count); } else compt1.put(t1, count); } else if (!(compt1.containsKey(t1))) { compt1.put(t1, 0); } } } for (int countValue : compt1.values()) { sum += countValue; } int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>() : variant.getAllelesFromGenotypeCode(mostFrequentGenotype); if (alleles.size() != 0) { zos.write(("acounts=" + individualId + ":").getBytes()); for (String knowAllelesCompt : compt1.keySet()) { zos.write( (knowAllelesCompt + " " + nf.format(compt1.get(knowAllelesCompt) / (float) sum) + " " + compt1.get(knowAllelesCompt) + " ").getBytes()); } zos.write((alleles.size() + ";").getBytes()); } if (genotypeCounts.size() > 1) { Comparable sVariantId = markerSynonyms != null ? markerSynonyms.get(variant.getId()) : variant.getId(); warningFileWriter.write("- Dissimilar genotypes found for variant " + (sVariantId == null ? variant.getId() : sVariantId) + ", individual " + individualId + ". Exporting most frequent: " + StringUtils.join(alleles, ",") + "\n"); } } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:com.sonicle.webtop.vfs.Service.java
public void processManageStoresTree(HttpServletRequest request, HttpServletResponse response, PrintWriter out) { ArrayList<ExtTreeNode> children = new ArrayList<>(); // Node ID is composed in this way: // $shareId|$storeId|$path try {// w w w .j av a 2 s . c om String crud = ServletUtils.getStringParameter(request, "crud", true); if (crud.equals(Crud.READ)) { String node = ServletUtils.getStringParameter(request, "node", true); if (node.equals("root")) { // Share roots... for (StoreShareRoot root : getRootsFromCache()) { children.add(createRootNode(root)); } } else { StoreNodeId nodeId = (StoreNodeId) new StoreNodeId().parse(node); if (nodeId.getSize() == 1) { // Root share's folders... StoreShareRoot root = getRootFromCache(nodeId.getShareId()); if (root instanceof MyStoreRoot) { for (Store cal : manager.listStores()) { MyStoreFolder folder = new MyStoreFolder(node, root.getOwnerProfileId(), cal); children.add(createFolderNode(folder, root.getPerms())); } } else { for (StoreShareFolder fold : getFoldersFromCache(root.getShareId())) { children.add(createFolderNode(fold, root.getPerms())); } } } else if (nodeId.getSize() == 2 || nodeId.getSize() == 3) { // Store's folders (2) or folder's folders (3)... int storeId = Integer.valueOf(nodeId.getStoreId()); StoreShareFolder folder = getFolderFromCache(storeId); String path = (nodeId.getSize() == 2) ? "/" : nodeId.getPath(); boolean showHidden = us.getShowHiddenFiles(); LinkedHashMap<String, SharingLink> dls = manager.listDownloadLinks(storeId, path); LinkedHashMap<String, SharingLink> uls = manager.listUploadLinks(storeId, path); StoreFileSystem sfs = manager.getStoreFileSystem(storeId); for (FileObject fo : manager.listStoreFiles(StoreFileType.FOLDER, storeId, path)) { if (!showHidden && VfsUtils.isFileObjectHidden(fo)) continue; // Relativize path and force trailing separator (it's a folder) final String filePath = PathUtils.ensureTrailingSeparator(sfs.getRelativePath(fo), false); //final String fileId = new StoreNodeId(nodeId.getShareId(), nodeId.getStoreId(), filePath).toString(); final String fileHash = manager.generateStoreFileHash(storeId, filePath); String dlLink = null, ulLink = null; if (dls.containsKey(fileHash)) { dlLink = dls.get(fileHash).getLinkId(); } if (uls.containsKey(fileHash)) { ulLink = uls.get(fileHash).getLinkId(); } children.add(createFileNode(folder, filePath, dlLink, ulLink, fo)); } } } new JsonResult("children", children).printTo(out); } } catch (Exception ex) { logger.error("Error in ManageStoresTree", ex); } }
From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingFunction.java
public CapletStrippingFunction(final List<CapFloor> caps, final YieldCurveBundle yieldCurves, final LinkedHashMap<String, double[]> knotPoints, final LinkedHashMap<String, Interpolator1D> interpolators, final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms, final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) { Validate.notNull(caps, "caps null"); Validate.notNull(knotPoints, "null node points"); Validate.notNull(interpolators, "null interpolators"); Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators"); if (knownParameterTermSturctures == null) { Validate.isTrue(knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue(knotPoints.containsKey(BETA) && interpolators.containsKey(BETA), "beta curve not found"); Validate.isTrue(knotPoints.containsKey(NU) && interpolators.containsKey(NU), "nu curve not found"); Validate.isTrue(knotPoints.containsKey(RHO) && interpolators.containsKey(RHO), "rho curve not found"); } else {//from www. j av a 2s. com Validate.isTrue((knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA)) ^ knownParameterTermSturctures.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue((knotPoints.containsKey(BETA) && interpolators.containsKey(BETA)) ^ knownParameterTermSturctures.containsKey(BETA), "beta curve not found"); Validate.isTrue((knotPoints.containsKey(NU) && interpolators.containsKey(NU)) ^ knownParameterTermSturctures.containsKey(NU), "nu curve not found"); Validate.isTrue((knotPoints.containsKey(RHO) && interpolators.containsKey(RHO)) ^ knownParameterTermSturctures.containsKey(RHO), "rho curve not found"); } final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<String, Interpolator1D>(); final Set<String> names = interpolators.keySet(); for (final String name : names) { final Interpolator1D temp = new TransformedInterpolator1D(interpolators.get(name), parameterTransforms.get(name)); transInterpolators.put(name, temp); } _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators); // _parameterTransforms = parameterTransforms; //TODO all the check for this _capPricers = new ArrayList<CapFloorPricer>(caps.size()); for (final CapFloor cap : caps) { _capPricers.add(new CapFloorPricer(cap, yieldCurves)); } _knownParameterTermStructures = knownParameterTermSturctures; }
From source file:com.sonicle.webtop.vfs.VfsManager.java
public HashMap<Integer, StoreShareFolder> listIncomingStoreFolders(String rootShareId) throws WTException { CoreManager core = WT.getCoreManager(getTargetProfileId()); LinkedHashMap<Integer, StoreShareFolder> folders = new LinkedHashMap<>(); // Retrieves incoming folders (from sharing). This lookup already // returns readable shares (we don't need to test READ permission) List<OShare> shares = core.listIncomingShareFolders(rootShareId, GROUPNAME_STORE); for (OShare share : shares) { UserProfile.Id ownerId = core.userUidToProfileId(share.getUserUid()); List<Store> stores = null; if (share.hasWildcard()) { stores = listStores(ownerId); } else {/* w w w . ja v a 2s.c om*/ stores = Arrays.asList(getStore(Integer.valueOf(share.getInstance()))); } for (Store store : stores) { SharePermsFolder fperms = core.getShareFolderPermissions(share.getShareId().toString()); SharePermsElements eperms = core.getShareElementsPermissions(share.getShareId().toString()); if (folders.containsKey(store.getStoreId())) { StoreShareFolder folder = folders.get(store.getStoreId()); folder.getPerms().merge(fperms); folder.getElementsPerms().merge(eperms); } else { folders.put(store.getStoreId(), new StoreShareFolder(share.getShareId().toString(), ownerId, fperms, eperms, store)); } } } return folders; }
From source file:fr.cirad.mgdb.exporting.markeroriented.HapMapExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }/*from w ww . j a v a 2 s. com*/ } List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".hapmap")); String header = "rs#" + "\t" + "alleles" + "\t" + "chrom" + "\t" + "pos" + "\t" + "strand" + "\t" + "assembly#" + "\t" + "center" + "\t" + "protLSID" + "\t" + "assayLSID" + "\t" + "panelLSID" + "\t" + "QCcode"; zos.write(header.getBytes()); for (int i = 0; i < individualList.size(); i++) { zos.write(("\t" + individualList.get(i)).getBytes()); } zos.write((LINE_SEPARATOR).getBytes()); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor == null || markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } boolean fIsSNP = variant.getType().equals(Type.SNP.toString()); byte[] missingGenotype = ("\t" + "NN").getBytes(); String[] chromAndPos = markerChromosomalPositions.get(variant.getId()).split(":"); zos.write(((variantId == null ? variant.getId() : variantId) + "\t" + StringUtils.join(variant.getKnownAlleleList(), "/") + "\t" + chromAndPos[0] + "\t" + Long.parseLong(chromAndPos[1]) + "\t" + "+").getBytes()); for (int j = 0; j < 6; j++) zos.write(("\t" + "NA").getBytes()); Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String gtCode = run.getSampleGenotypes().get(sampleIndex).getCode(); String individualId = individuals .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); gqValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ)); dpValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP)); } int writtenGenotypeCount = 0; for (String individualId : individualList /* we use this list because it has the proper ordering */) { int individualIndex = individualList.indexOf(individualId); while (writtenGenotypeCount < individualIndex - 1) { zos.write(missingGenotype); writtenGenotypeCount++; } List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ Integer gqValue = gqValueForSampleId.get(individualId); if (gqValue != null && gqValue < nMinimumGenotypeQuality) continue; /* skip this sample because its GQ is under the threshold */ Integer dpValue = dpValueForSampleId.get(individualId); if (dpValue != null && dpValue < nMinimumReadDepth) continue; /* skip this sample because its DP is under the threshold */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } byte[] exportedGT = mostFrequentGenotype == null ? missingGenotype : ("\t" + StringUtils.join(variant.getAllelesFromGenotypeCode(mostFrequentGenotype), fIsSNP ? "" : "/")).getBytes(); zos.write(exportedGT); writtenGenotypeCount++; if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + new String(exportedGT) + "\n"); } while (writtenGenotypeCount < individualList.size()) { zos.write(missingGenotype); writtenGenotypeCount++; } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:com.ikanow.infinit.e.api.config.source.SourceHandler.java
/** * testSource// w ww. j ava 2 s . c o m * @param sourceJson * @param nNumDocsToReturn * @param bReturnFullText * @param userIdStr * @return */ public ResponsePojo testSource(String sourceJson, int nNumDocsToReturn, boolean bReturnFullText, boolean bRealDedup, String userIdStr) { ResponsePojo rp = new ResponsePojo(); try { SourcePojo source = null; SourcePojoSubstitutionApiMap apiMap = new SourcePojoSubstitutionApiMap(new ObjectId(userIdStr)); try { source = ApiManager.mapFromApi(sourceJson, SourcePojo.class, apiMap); source.fillInSourcePipelineFields(); } catch (Exception e) { rp.setResponse(new ResponseObject("Test Source", false, "Error deserializing source (JSON is valid but does not match schema): " + e.getMessage())); return rp; } if (null == source.getKey()) { source.setKey(source.generateSourceKey()); // (a dummy value, not guaranteed to be unique) } if ((null == source.getExtractType()) || !source.getExtractType().equals("Federated")) { String testUrl = source.getRepresentativeUrl(); if (null == testUrl) { rp.setResponse( new ResponseObject("Test Source", false, "Error, source contains no URL to harvest")); return rp; } } if (null == source.getTags()) { source.setTags(new HashSet<String>()); } // This is the only field that you don't normally need to specify in save but will cause // problems if it's not populated in test. ObjectId userId = new ObjectId(userIdStr); // Set owner (overwrite, for security reasons) source.setOwnerId(userId); if (null == source.getCommunityIds()) { source.setCommunityIds(new TreeSet<ObjectId>()); } if (!source.getCommunityIds().isEmpty()) { // need to check that I'm allowed the specified community... if ((1 == source.getCommunityIds().size()) && (userId.equals(source.getCommunityIds().iterator().next()))) { // we're OK only community id is user community } //TESTED else { HashSet<ObjectId> communities = SocialUtils.getUserCommunities(userIdStr); Iterator<ObjectId> it = source.getCommunityIds().iterator(); while (it.hasNext()) { ObjectId src = it.next(); if (!communities.contains(src)) { rp.setResponse(new ResponseObject("Test Source", false, "Authentication error: you don't belong to this community: " + src)); return rp; } //TESTED } } //TESTED } // Always add the userId to the source community Id (so harvesters can tell if they're running in test mode or not...) source.addToCommunityIds(userId); // (ie user's personal community, always has same _id - not that it matters) // Check the source's admin status source.setOwnedByAdmin(RESTTools.adminLookup(userId.toString(), false)); if (bRealDedup) { // Want to test update code, so ignore update cycle if (null != source.getRssConfig()) { source.getRssConfig().setUpdateCycle_secs(1); // always update } } HarvestController harvester = new HarvestController(true); if (nNumDocsToReturn > 100) { // (seems reasonable) nNumDocsToReturn = 100; } harvester.setStandaloneMode(nNumDocsToReturn, bRealDedup); List<DocumentPojo> toAdd = new LinkedList<DocumentPojo>(); List<DocumentPojo> toUpdate = new LinkedList<DocumentPojo>(); List<DocumentPojo> toRemove = new LinkedList<DocumentPojo>(); if (null == source.getHarvestStatus()) { source.setHarvestStatus(new SourceHarvestStatusPojo()); } String oldMessage = source.getHarvestStatus().getHarvest_message(); // SPECIAL CASE: FOR FEDERATED QUERIES if ((null != source.getExtractType()) && source.getExtractType().equals("Federated")) { int federatedQueryEnts = 0; SourceFederatedQueryConfigPojo endpoint = null; try { endpoint = source.getProcessingPipeline().get(0).federatedQuery; } catch (Exception e) { } if (null == endpoint) { rp.setResponse( new ResponseObject("Test Source", false, "source error: no federated query specified")); return rp; } AdvancedQueryPojo testQuery = null; String errMessage = "no query specified"; try { testQuery = AdvancedQueryPojo.fromApi(endpoint.testQueryJson, AdvancedQueryPojo.class); } catch (Exception e) { errMessage = e.getMessage(); } if (null == testQuery) { rp.setResponse(new ResponseObject("Test Source", false, "source error: need to specifiy a valid IKANOW query to test federated queries, error: " + errMessage)); return rp; } // OK if we're here then we can test the query SimpleFederatedQueryEngine testFederatedQuery = new SimpleFederatedQueryEngine(); endpoint.parentSource = source; testFederatedQuery.addEndpoint(endpoint); ObjectId queryId = new ObjectId(); String[] communityIdStrs = new String[source.getCommunityIds().size()]; int i = 0; for (ObjectId commId : source.getCommunityIds()) { communityIdStrs[i] = commId.toString(); i++; } testFederatedQuery.setTestMode(true); testFederatedQuery.preQueryActivities(queryId, testQuery, communityIdStrs); StatisticsPojo stats = new StatisticsPojo(); stats.setSavedScores(0, 0); rp.setStats(stats); ArrayList<BasicDBObject> toAddTemp = new ArrayList<BasicDBObject>(1); testFederatedQuery.postQueryActivities(queryId, toAddTemp, rp); for (BasicDBObject docObj : toAddTemp) { DocumentPojo doc = DocumentPojo.fromDb(docObj, DocumentPojo.class); if (bReturnFullText) { doc.setFullText(docObj.getString(DocumentPojo.fullText_)); doc.makeFullTextNonTransient(); } if (null != doc.getEntities()) { federatedQueryEnts += doc.getEntities().size(); } //Metadata workaround: @SuppressWarnings("unchecked") LinkedHashMap<String, Object[]> meta = (LinkedHashMap<String, Object[]>) docObj .get(DocumentPojo.metadata_); if (null != meta) { Object metaJson = meta.get("json"); if (metaJson instanceof Object[]) { // (in this case ... non-cached, need to recopy in, I forget why) doc.addToMetadata("json", (Object[]) metaJson); } } toAdd.add(doc); } // (currently can't run harvest source federated query) if (0 == federatedQueryEnts) { // (more fed query exceptions) source.getHarvestStatus().setHarvest_message( "Warning: no entities extracted, probably docConversionMap is wrong?"); } else { source.getHarvestStatus().setHarvest_message(federatedQueryEnts + " entities extracted"); } } //TESTED (END FEDERATED QUERY TEST MODE, WHICH IS A BIT DIFFERENT) else { harvester.harvestSource(source, toAdd, toUpdate, toRemove); } // (don't parrot the old message back - v confusing) if (oldMessage == source.getHarvestStatus().getHarvest_message()) { // (ptr ==) source.getHarvestStatus() .setHarvest_message("(no documents extracted - likely a source or configuration error)"); } //TESTED String message = null; if ((null != source.getHarvestStatus()) && (null != source.getHarvestStatus().getHarvest_message())) { message = source.getHarvestStatus().getHarvest_message(); } else { message = ""; } List<String> errMessagesFromSourceDeser = apiMap.getErrorMessages(); if (null != errMessagesFromSourceDeser) { StringBuffer sbApiMapErr = new StringBuffer("Substitution errors:\n"); for (String err : errMessagesFromSourceDeser) { sbApiMapErr.append(err).append("\n"); } message = message + "\n" + sbApiMapErr.toString(); } //TESTED (by hand) if ((null != source.getHarvestStatus()) && (HarvestEnum.error == source.getHarvestStatus().getHarvest_status())) { rp.setResponse(new ResponseObject("Test Source", false, "source error: " + message)); rp.setData(toAdd, new DocumentPojoApiMap()); } else { if ((null == message) || message.isEmpty()) { message = "no messages from harvester"; } rp.setResponse(new ResponseObject("Test Source", true, "successfully returned " + toAdd.size() + " docs: " + message)); try { // If grabbing full text // Also some logstash/custom specific logic - these aren't docs so just output the entire record boolean isLogstash = (null != source.getExtractType()) && source.getExtractType().equalsIgnoreCase("logstash"); boolean isCustom = (null != source.getExtractType()) && source.getExtractType().equalsIgnoreCase("custom"); List<BasicDBObject> records = null; if (bReturnFullText || isLogstash || isCustom) { for (DocumentPojo doc : toAdd) { if (isLogstash || isCustom) { if (null == records) { records = new ArrayList<BasicDBObject>(toAdd.size()); } BasicDBObject dbo = (BasicDBObject) doc.getMetadata().get("record")[0]; Object test = dbo.get("_id"); if ((null != test) && (test instanceof ObjectId)) { dbo.remove("_id"); // (unless it's a custom _id added from logstash then remove it) } records.add(dbo); } //TESTED else if (bReturnFullText) { doc.makeFullTextNonTransient(); } } } //TESTED if (null != records) { rp.setData(records, (BasePojoApiMap<BasicDBObject>) null); } //TESTED else { rp.setData(toAdd, new DocumentPojoApiMap()); } //TESTED //Test deserialization: rp.toApi(); } catch (Exception e) { //e.printStackTrace(); StringBuffer sb = new StringBuffer(); Globals.populateStackTrace(sb, e); rp.setData( new BasicDBObject("error_message", "Error deserializing documents: " + sb.toString()), null); } } } catch (Exception e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); rp.setResponse(new ResponseObject("Test Source", false, "Error testing source: " + e.getMessage())); } catch (Error e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); rp.setResponse(new ResponseObject("Test Source", false, "Configuration/Installation error: " + e.getMessage())); } return rp; }
From source file:com.vmware.bdd.manager.ClusterConfigManager.java
private LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>> validateAndConvertNetNamesToNetConfigs( Map<NetTrafficType, List<String>> netNamesInfo, boolean isMaprDistro) { LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>> netConfigs = new LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>>(); LinkedHashMap<String, Set<String>> port2names = new LinkedHashMap<String, Set<String>>(); for (NetTrafficType type : netNamesInfo.keySet()) { netConfigs.put(type, new ArrayList<ClusterNetConfigInfo>()); for (String name : netNamesInfo.get(type)) { NetworkEntity networkEntity = networkMgr.getNetworkEntityByName(name); String pg = networkEntity.getPortGroup(); Boolean isGenerateHostname = networkEntity.getIsGenerateHostname(); String hostnamePrefix = HostnameManager.getHostnamePrefix(); ClusterNetConfigInfo netConfig = new ClusterNetConfigInfo(type, name, pg, networkEntity.getDnsType(), isGenerateHostname, hostnamePrefix); netConfigs.get(type).add(netConfig); if (!port2names.containsKey(pg)) { port2names.put(pg, new HashSet<String>()); }//from www . j a v a 2 s .co m port2names.get(pg).add(name); } } if (isMaprDistro && port2names.size() > 1) { throw BddException.MULTI_NETWORKS_FOR_MAPR_DISTRO(); } // if nw1,nw2 are both refer to pg1, should not use them in one cluster for (String pg : port2names.keySet()) { if (port2names.get(pg).size() > 1) { throw BddException.PG_REFERENCED_MULTI_TIMES(); } } return netConfigs; }