List of usage examples for java.util Queue add
boolean add(E e);
From source file:it.geosolutions.geobatch.actions.ds2ds.geoserver.DSGeoServerAction.java
@Override public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException { listenerForwarder.started();//www. j av a 2 s. c o m // return object final Queue<EventObject> outputEvents = new LinkedList<EventObject>(); //check global configurations //Geoserver config //---------------- updateTask("Check GeoServer configuration"); final String url = conf.getGeoserverURL(); final String user = conf.getGeoserverUID(); final String password = conf.getGeoserverPWD(); GeoServerRESTManager gsMan = null; try { gsMan = new GeoServerRESTManager(new URL(url), user, password); } catch (MalformedURLException e) { failAction("Wrong GeoServer URL"); } catch (IllegalArgumentException e) { failAction("Unable to create the GeoServer Manager using a null argument"); } //TODO how to check if GS user/password are correct? listenerForwarder.progressing(5, "GeoServer configuration checked"); //Check operation //--------------- updateTask("Check operation"); String op = conf.getOperation(); if (op == null || !(op.equalsIgnoreCase("PUBLISH") || op.equalsIgnoreCase("REMOVE"))) { failAction("Bad operation: " + op + " in configuration"); } listenerForwarder.progressing(10, "Operation checked"); //Check WorkSpace //--------------- updateTask("Check workspace configuration"); String ws = conf.getDefaultNamespace(); String wsUri = conf.getDefaultNamespaceUri(); Boolean existWS = false; synchronized (existWS) { existWS = gsMan.getReader().getWorkspaceNames().contains(ws); if (!existWS) { boolean createWS = conf.getCreateNameSpace(); if (createWS) { //try to create the workspace updateTask("Create workspace " + ws + " in GeoServer"); boolean created = false; if (wsUri == null) { created = gsMan.getPublisher().createWorkspace(ws); } else { try { created = gsMan.getPublisher().createWorkspace(ws, new URI(wsUri)); } catch (URISyntaxException e) { failAction("Invalid NameSpace URI " + wsUri + " in configuration"); } } if (!created) { failAction("FATAL: unable to create workspace " + ws + " in GeoServer"); } } else { failAction("Bad workspace (namespace): " + ws + " in configuration"); } } } listenerForwarder.progressing(25, "GeoServer workspace checked"); //event-based business logic while (events.size() > 0) { final EventObject ev; try { if ((ev = events.remove()) != null) { updateTask("Working on incoming event: " + ev.getSource()); updateTask("Check acceptable file"); FileSystemEvent fileEvent = (FileSystemEvent) ev; //set FeatureConfiguration updateTask("Set Feature Configuration"); this.createFeatureConfiguration(fileEvent); FeatureConfiguration featureConfig = conf.getFeatureConfiguration(); //TODO check FeatureConfiguration updateTask("Check Feature Configuration"); if (featureConfig.getTypeName() == null) { failAction("feature typeName cannot be null"); } //TODO check if the typeName already exists for the target workspace? //datastore check (and eventually creation) updateTask("Check datastore configuration"); String ds = conf.getStoreName(); Boolean existDS = false; synchronized (existDS) { existDS = gsMan.getReader().getDatastores(ws).getNames().contains(ds); if (!existDS) { boolean createDS = conf.getCreateDataStore(); if (createDS) { //create datastore updateTask("Create datastore in GeoServer"); Map<String, Object> datastore = this.deserialize(featureConfig.getDataStore()); String dbType = (String) datastore.get("dbtype"); boolean created = false; if (dbType.equalsIgnoreCase("postgis")) { GSPostGISDatastoreEncoder encoder = new GSPostGISDatastoreEncoder(ds); encoder.setName(ds); encoder.setEnabled(true); encoder.setHost((String) datastore.get("host")); encoder.setPort(Integer.parseInt((String) datastore.get("port"))); encoder.setDatabase((String) datastore.get("database")); encoder.setSchema((String) datastore.get("schema")); encoder.setUser((String) datastore.get("user")); encoder.setPassword((String) datastore.get("passwd")); created = gsMan.getStoreManager().create(ws, encoder); if (!created) { failAction("FATAL: unable to create PostGIS datastore " + ds + " in GeoServer"); } } else if (dbType.equalsIgnoreCase("oracle")) { String dbname = (String) datastore.get("database"); GSOracleNGDatastoreEncoder encoder = new GSOracleNGDatastoreEncoder(ds, dbname); encoder.setName(ds); encoder.setEnabled(true); encoder.setHost((String) datastore.get("host")); encoder.setPort(Integer.parseInt((String) datastore.get("port"))); encoder.setDatabase(dbname); encoder.setSchema((String) datastore.get("schema")); encoder.setUser((String) datastore.get("user")); encoder.setPassword((String) datastore.get("passwd")); created = gsMan.getStoreManager().create(ws, encoder); if (!created) { failAction("FATAL: unable to create Oracle NG datastore " + ds + " in GeoServer"); } } else { failAction("The datastore type " + dbType + " is not supported"); } } else { failAction("Bad datastore:" + ds + " in configuration. Datastore " + ds + " doesn't exist in workspace (namespace) " + ws); } } } listenerForwarder.progressing(50, "Check GeoServer datastore"); //feature type publication/removal boolean done = false; if (op.equalsIgnoreCase("PUBLISH")) { if (!gsMan.getReader().getLayers().getNames().contains(featureConfig.getTypeName())) { updateTask("Publish DBLayer " + featureConfig.getTypeName() + " in GeoServer"); //featuretype final GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder(); fte.setName(featureConfig.getTypeName()); fte.setTitle(featureConfig.getTypeName()); String crs = featureConfig.getCrs(); if (crs != null) { fte.setSRS(featureConfig.getCrs()); } else { fte.setSRS("EPSG:4326"); } fte.setProjectionPolicy(ProjectionPolicy.FORCE_DECLARED); //layer & styles final GSLayerEncoder layerEncoder = new GSLayerEncoder(); layerEncoder.setDefaultStyle(this.defineLayerStyle(featureConfig, gsMan)); //default style if (conf.getStyles() != null) { //add available styles for (String style : conf.getStyles()) { layerEncoder.addStyle(style); } } //publish done = gsMan.getPublisher().publishDBLayer(ws, ds, fte, layerEncoder); if (!done) { failAction("Impossible to publish DBLayer " + featureConfig.getTypeName() + " in GeoServer"); } } } else if (op.equalsIgnoreCase("REMOVE")) { if (gsMan.getReader().getLayers().getNames().contains(featureConfig.getTypeName())) { //remove updateTask("Remove DBLayer " + featureConfig.getTypeName() + " from GeoServer"); done = gsMan.getPublisher().unpublishFeatureType(ws, ds, featureConfig.getTypeName()); if (!done) { failAction("Impossible to remove DBLayer " + featureConfig.getTypeName() + " in GeoServer"); } } } listenerForwarder.progressing(100F, "Successful Geoserver " + op + " operation"); listenerForwarder.completed(); outputEvents.add(ev); } else { if (LOGGER.isErrorEnabled()) { LOGGER.error("Encountered a NULL event: SKIPPING..."); } continue; } } catch (Exception ioe) { failAction("Unable to produce the output: " + ioe.getLocalizedMessage(), ioe); } } return outputEvents; }
From source file:it.geosolutions.geobatch.imagemosaic.ImageMosaicAction.java
/** * Public or update an ImageMosaic layer on the specified GeoServer *///from www . j a v a 2 s .c o m public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException { if (LOGGER.isInfoEnabled()) LOGGER.info("Start processing..."); listenerForwarder.started(); try { // looking for file if (events == null) throw new IllegalArgumentException("Unable to execute action with incoming null parameter"); if (events.size() == 0) throw new IllegalArgumentException("Wrong number of elements for this action: " + events.size()); /* * If here: we can execute the action */ Queue<EventObject> ret = new LinkedList<EventObject>(); /** * For each event into the queue */ while (events.size() > 0) { final Object evObj = events.remove(); /** * If the input file exists and it is a file: Check if it is: - * A Directory - An XML -> Serialized ImageMosaicCommand * * Building accordingly the ImageMosaicCommand command. */ final ImageMosaicCommand cmd; if (evObj == null) { ActionExceptionHandler.handleError(getConfiguration(), this, "Input null object."); continue; } if (evObj instanceof FileSystemEvent) { /* * Checking input files. */ final File input = ((FileSystemEvent) evObj).getSource(); if (!input.exists()) { // no file is found for this event try with the next one ActionExceptionHandler.handleError(getConfiguration(), this, "The input file does not exists at url: " + input.getAbsolutePath()); continue; } /** * the file event points to an XML file... * * @see ImageMosaicCommand */ if (input.isFile() && FilenameUtils.getExtension(input.getName()).equalsIgnoreCase("xml")) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Working on an XML command file: " + input.getAbsolutePath()); } // try to deserialize cmd = ImageMosaicCommand.deserialize(input.getAbsoluteFile()); if (cmd == null) { ActionExceptionHandler.handleError(getConfiguration(), this, "Unable to deserialize the passed file: " + input.getAbsolutePath()); continue; } } else if (input.isDirectory()) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Input file event points to a directory: " + input.getAbsolutePath()); } String format = ((ImageMosaicConfiguration) super.getConfiguration()).getGranuleFormat(); if (format == null || format.isEmpty()) { LOGGER.warn( "No granule format specified in flow configuration... try force it to .tif"); format = "tif"; } StringBuilder builder = new StringBuilder(); builder.append("*."); builder.append(format); final Collector coll = new Collector( new WildcardFileFilter(builder.toString(), IOCase.INSENSITIVE)); // try to deserialize cmd = new ImageMosaicCommand(input, coll.collect(input), null); } else { // the file event does not point to a directory nor to an xml file ActionExceptionHandler.handleError(getConfiguration(), this, "The file event does not point to a directory nor to an xml file: " + input.getAbsolutePath()); continue; } } else if (evObj instanceof EventObject) { Object innerObject = ((EventObject) evObj).getSource(); if (innerObject instanceof ImageMosaicCommand) { cmd = (ImageMosaicCommand) innerObject; } else { // the file event does not point to a directory nor to an xml file ActionExceptionHandler.handleError(getConfiguration(), this, "The file event does not point to a valid object: " + evObj); continue; } } else { // the file event does not point to a directory nor to an xml file ActionExceptionHandler.handleError(getConfiguration(), this, "The file event does not point to a valid object: " + evObj); continue; } /** * the file pointing to the directory which the layer will refer * to. */ final File baseDir = cmd.getBaseDir(); /** * a descriptor for the mosaic to handle */ final ImageMosaicGranulesDescriptor mosaicDescriptor = ImageMosaicGranulesDescriptor .buildDescriptor(baseDir, getConfiguration()); if (mosaicDescriptor == null) { ActionExceptionHandler.handleError(getConfiguration(), this, "Unable to build the imageMosaic descriptor" + cmd.getBaseDir()); continue; } // Perform tests on the base dir file if (!baseDir.exists() || !baseDir.isDirectory()) { // no base dir exists try to build a new one using // addList() if (cmd.getAddFiles() != null) { if (cmd.getAddFiles().size() > 0) { // try build the baseDir if (!baseDir.mkdirs()) { ActionExceptionHandler.handleError(getConfiguration(), this, "Unable to create the base directory named \'" + baseDir.getAbsolutePath() + "\'."); continue; } } else { final StringBuilder msg = new StringBuilder(); msg.append("Unexpected not existent baseDir for this layer '") .append(baseDir.getAbsolutePath()) .append("'.\n If you want to build a new layer try using an ") .append("existent or writeable baseDir and append a list of file to use to the addFile list."); ActionExceptionHandler.handleError(getConfiguration(), this, msg.toString()); continue; } } else { final StringBuilder msg = new StringBuilder(); msg.append("Unexpected not existent baseDir for this layer '") .append(baseDir.getAbsolutePath()) .append("'.\n If you want to build a new layer try using an ") .append("existent or writeable baseDir and append a list of file to use to the addFile list."); ActionExceptionHandler.handleError(getConfiguration(), this, msg.toString()); continue; } } // override local cmd null params with the getConfiguration() cmd.copyConfigurationIntoCommand(getConfiguration()); // prepare configuration for layername and storename final String layerName; if (cmd.getLayerName() == null) { layerName = baseDir.getName(); cmd.setLayerName(layerName); } else { layerName = cmd.getLayerName(); } final String storeName; if (cmd.getStoreName() == null) { storeName = layerName; cmd.setStoreName(storeName); } else { storeName = cmd.getStoreName(); } /** * HERE WE HAVE A 'cmd' COMMAND FILE WHICH MAY HAVE GETADDFILE * OR GETDELFILE !=NULL USING THOSE LIST WE MAY:<br> * DEL ->DELETE FROM THE DATASTORE AN IMAGE USING THE ABSOLUTE * PATH.<br> * ADD ->INSERT INTO THE DATASTORE AN IMAGE USING THE ABSOLUTE * PATH.<br> */ // REST library read GeoServerRESTReader gsReader = new GeoServerRESTReader(cmd.getGeoserverURL(), cmd.getGeoserverUID(), cmd.getGeoserverPWD()); // REST library write final GeoServerRESTPublisher gsPublisher = new GeoServerRESTPublisher(cmd.getGeoserverURL(), cmd.getGeoserverUID(), cmd.getGeoserverPWD()); final String workspace = cmd.getDefaultNamespace() != null ? cmd.getDefaultNamespace() : ""; /* * Check if ImageMosaic layer already exists... */ final boolean layerExists; if (cmd.getIgnoreGeoServer()) { if (LOGGER.isInfoEnabled()) { LOGGER.info( "GeoServer will be ignored by configuration. Assuming that an updated is required. "); } layerExists = true; } else { final RESTLayer layer = gsReader.getLayer(layerName); layerExists = layer != null; } if (layerExists) { if (!updateMosaicLayer(cmd, baseDir, layerName, mosaicDescriptor, gsPublisher)) { ActionExceptionHandler.handleError(getConfiguration(), this, "Mosaic not Updated..."); continue; } } else { if (!createMosaicLayer(cmd, baseDir, workspace, mosaicDescriptor, layerName, gsPublisher, storeName)) { ActionExceptionHandler.handleError(getConfiguration(), this, "Mosaic not Created..."); continue; } } /** * The returned file: - one for each event - .layer file - will * be added to the output queue */ final File layerDescriptor; // generate a RETURN file and append it to the return queue // TODO get info about store and workspace name... layerDescriptor = ImageMosaicOutput.writeReturn(baseDir, baseDir, cmd); if (layerDescriptor != null) { LOGGER.info("Created layer descriptor file " + layerDescriptor); ret.add(new FileSystemEvent(layerDescriptor, FileSystemEventType.FILE_ADDED)); } } // while listenerForwarder.completed(); // ... setting up the appropriate event for the next action return ret; } catch (Exception t) { if (LOGGER.isErrorEnabled()) LOGGER.error(t.getLocalizedMessage(), t); listenerForwarder.failed(t); throw new ActionException(this, t.getMessage(), t); } }
From source file:org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.java
private void walkASTMarkTABREF(ASTNode ast, Set<String> cteAlias) throws SemanticException { Queue<Node> queue = new LinkedList<>(); queue.add(ast); Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>(); while (!queue.isEmpty()) { ASTNode astNode = (ASTNode) queue.poll(); if (astNode.getToken().getType() == HiveParser.TOK_TABREF) { int aliasIndex = 0; StringBuilder additionalTabInfo = new StringBuilder(); for (int index = 1; index < astNode.getChildCount(); index++) { ASTNode ct = (ASTNode) astNode.getChild(index); if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) { additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex())); } else { aliasIndex = index;/*from w ww .j a v a 2s . co m*/ } } ASTNode tableTree = (ASTNode) (astNode.getChild(0)); String tabIdName = getUnescapedName(tableTree); String alias; if (aliasIndex != 0) { alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText()); } else { alias = getUnescapedUnqualifiedTableName(tableTree); } // We need to know if it is CTE or not. // A CTE may have the same name as a table. // For example, // with select TAB1 [masking] as TAB2 // select * from TAB2 [no masking] if (cteAlias.contains(tabIdName)) { continue; } String replacementText = null; Table table = null; try { table = getTableObjectByName(tabIdName); } catch (HiveException e) { // Table may not be found when materialization of CTE is on. LOG.info("Table " + tabIdName + " is not found in walkASTMarkTABREF."); continue; } List<String> colNames = new ArrayList<>(); List<String> colTypes = new ArrayList<>(); for (FieldSchema col : table.getAllCols()) { colNames.add(col.getName()); colTypes.add(col.getType()); } basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView())); } if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) { for (Node child : astNode.getChildren()) { queue.offer(child); } } } List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>(); basicPrivObjs.addAll(basicInfos.keySet()); List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs); if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) { for (HivePrivilegeObject privObj : needRewritePrivObjs) { MaskAndFilterInfo info = basicInfos.get(privObj); String replacementText = tableMask.create(privObj, info); if (replacementText != null) { // We don't support masking/filtering against ACID query at the moment if (ctx.getIsUpdateDeleteMerge()) { throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName()); } tableMask.setNeedsRewrite(true); tableMask.addTranslation(info.astNode, replacementText); } } } }
From source file:com.ricemap.spateDB.core.RTree.java
/** * Builds the RTree given a serialized list of elements. It uses the given * stockObject to deserialize these elements and build the tree. Also writes * the created tree to the disk directly. * /*w w w . ja v a 2s. c o m*/ * @param elements * - serialization of elements to be written * @param offset * - index of the first element to use in the elements array * @param len * - number of bytes to user from the elements array * @param bytesAvailable * - size available (in bytes) to store the tree structures * @param dataOut * - an output to use for writing the tree to * @param fast_sort * - setting this to <code>true</code> allows the method to run * faster by materializing the offset of each element in the list * which speeds up the comparison. However, this requires an * additional 16 bytes per element. So, for each 1M elements, the * method will require an additional 16 M bytes (approximately). */ public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree, DataOutput dataOut, final boolean fast_sort, final boolean columnarStorage) { try { columnar = columnarStorage; //TODO: the order of fields should be stable under Oracle JVM, but not guaranteed Field[] fields = stockObject.getClass().getDeclaredFields(); // Count number of elements in the given text int i_start = offset; final Text line = new Text(); while (i_start < offset + len) { int i_end = skipToEOL(element_bytes, i_start); // Extract the line without end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); elementCount++; i_start = i_end; } LOG.info("Bulk loading an RTree with " + elementCount + " elements"); // It turns out the findBestDegree returns the best degree when the // whole // tree is loaded to memory when processed. However, as current // algorithms // process the tree while it's on disk, a higher degree should be // selected // such that a node fits one file block (assumed to be 4K). // final int degree = findBestDegree(bytesAvailable, elementCount); LOG.info("Writing an RTree with degree " + degree); int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int nonLeafNodeCount = nodeCount - leafNodeCount; // Keep track of the offset of each element in the text final int[] offsets = new int[elementCount]; final int[] ids = new int[elementCount]; final double[] ts = fast_sort ? new double[elementCount] : null; final double[] xs = fast_sort ? new double[elementCount] : null; final double[] ys = fast_sort ? new double[elementCount] : null; //initialize columnar data output ByteArrayOutputStream index_bos = new ByteArrayOutputStream(); DataOutputStream index_dos = new DataOutputStream(index_bos); ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length]; DataOutputStream[] dos = new DataOutputStream[fields.length]; for (int i = 0; i < bos.length; i++) { bos[i] = new ByteArrayOutputStream(); dos[i] = new DataOutputStream(bos[i]); } i_start = offset; line.clear(); for (int i = 0; i < elementCount; i++) { offsets[i] = i_start; ids[i] = i; int i_end = skipToEOL(element_bytes, i_start); if (xs != null) { // Extract the line with end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); // Sample center of the shape ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2; xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; //build columnar storage if (stockObject instanceof Point3d) { index_dos.writeDouble(ts[i]); index_dos.writeDouble(xs[i]); index_dos.writeDouble(ys[i]); } else { throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet"); } for (int j = 0; j < fields.length; j++) { if (fields[j].getType().equals(Integer.TYPE)) { dos[j].writeInt(fields[j].getInt(stockObject)); } else if (fields[j].getType().equals(Double.TYPE)) { dos[j].writeDouble(fields[j].getDouble(stockObject)); } else if (fields[j].getType().equals(Long.TYPE)) { dos[j].writeLong(fields[j].getLong(stockObject)); } else { continue; //throw new RuntimeException("Field type is not supported yet"); } } } i_start = i_end; } index_dos.close(); for (int i = 0; i < dos.length; i++) { dos[i].close(); } /** A struct to store information about a split */ class SplitStruct extends Prism { /** Start and end index for this split */ int index1, index2; /** Direction of this split */ byte direction; /** Index of first element on disk */ int offsetOfFirstElement; static final byte DIRECTION_T = 0; static final byte DIRECTION_X = 1; static final byte DIRECTION_Y = 2; SplitStruct(int index1, int index2, byte direction) { this.index1 = index1; this.index2 = index2; this.direction = direction; } @Override public void write(DataOutput out) throws IOException { // if (columnarStorage) out.writeInt(index1); else out.writeInt(offsetOfFirstElement); super.write(out); } void partition(Queue<SplitStruct> toBePartitioned) { IndexedSortable sortableT; IndexedSortable sortableX; IndexedSortable sortableY; if (fast_sort) { // Use materialized xs[] and ys[] to do the comparisons sortableT = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ts[i] < ts[j]) return -1; if (ts[i] > ts[j]) return 1; return 0; } }; sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ts[i] < ts[j]) return -1; if (xs[i] < xs[j]) return -1; if (xs[i] > xs[j]) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ys[i] < ys[j]) return -1; if (ys[i] > ys[j]) return 1; return 0; } }; } else { // No materialized xs and ys. Always deserialize objects // to compare sortableT = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double ti = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double tj = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2; if (ti < tj) return -1; if (ti > tj) return 1; return 0; } }; sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; if (xi < xj) return -1; if (xi > xj) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; if (yi < yj) return -1; if (yi > yj) return 1; return 0; } }; } final IndexedSorter sorter = new QuickSort(); final IndexedSortable[] sortables = new IndexedSortable[3]; sortables[SplitStruct.DIRECTION_T] = sortableT; sortables[SplitStruct.DIRECTION_X] = sortableX; sortables[SplitStruct.DIRECTION_Y] = sortableY; sorter.sort(sortables[direction], index1, index2); // Partition into maxEntries partitions (equally) and // create a SplitStruct for each partition int i1 = index1; for (int iSplit = 0; iSplit < degree; iSplit++) { int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree; SplitStruct newSplit; if (direction == 0) { newSplit = new SplitStruct(i1, i2, (byte) 1); } else if (direction == 1) { newSplit = new SplitStruct(i1, i2, (byte) 2); } else { newSplit = new SplitStruct(i1, i2, (byte) 0); } toBePartitioned.add(newSplit); i1 = i2; } } } // All nodes stored in level-order traversal Vector<SplitStruct> nodes = new Vector<SplitStruct>(); final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>(); toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X)); while (!toBePartitioned.isEmpty()) { SplitStruct split = toBePartitioned.poll(); if (nodes.size() < nonLeafNodeCount) { // This is a non-leaf split.partition(toBePartitioned); } nodes.add(split); } if (nodes.size() != nodeCount) { throw new RuntimeException( "Expected node count: " + nodeCount + ". Real node count: " + nodes.size()); } // Now we have our data sorted in the required order. Start building // the tree. // Store the offset of each leaf node in the tree FSDataOutputStream fakeOut = new FSDataOutputStream(new java.io.OutputStream() { // Null output stream @Override public void write(int b) throws IOException { // Do nothing } @Override public void write(byte[] b, int off, int len) throws IOException { // Do nothing } @Override public void write(byte[] b) throws IOException { // Do nothing } }, null, TreeHeaderSize + nodes.size() * NodeSize); for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) { nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos(); if (i != nodes.elementAt(i_leaf).index1) throw new RuntimeException(); double t1, x1, y1, t2, x2, y2; // Initialize MBR to first object int eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); Prism mbr = stockObject.getMBR(); t1 = mbr.t1; x1 = mbr.x1; y1 = mbr.y1; t2 = mbr.t2; x2 = mbr.x2; y2 = mbr.y2; i++; while (i < nodes.elementAt(i_leaf).index2) { eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); mbr = stockObject.getMBR(); if (mbr.t1 < t1) t1 = mbr.t1; if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.t2 > t2) t2 = mbr.t2; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i++; } nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2); } fakeOut.close(); fakeOut = null; // Calculate MBR and offsetOfFirstElement for non-leaves for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) { int i_first_child = i_node * degree + 1; nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement; int i_child = 0; Prism mbr; mbr = nodes.elementAt(i_first_child + i_child); double t1 = mbr.t1; double x1 = mbr.x1; double y1 = mbr.y1; double t2 = mbr.t2; double x2 = mbr.x2; double y2 = mbr.y2; i_child++; while (i_child < degree) { mbr = nodes.elementAt(i_first_child + i_child); if (mbr.t1 < t1) t1 = mbr.t1; if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.t2 > t2) t2 = mbr.t2; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i_child++; } nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2); } // Start writing the tree // write tree header (including size) // Total tree size. (== Total bytes written - 8 bytes for the size // itself) dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len); // Tree height dataOut.writeInt(height); // Degree dataOut.writeInt(degree); dataOut.writeInt(elementCount); //isColumnar dataOut.writeInt(columnarStorage ? 1 : 0); // write nodes for (SplitStruct node : nodes) { node.write(dataOut); } // write elements if (columnarStorage) { byte[] index_bs = index_bos.toByteArray(); byte[][] bss = new byte[bos.length][]; for (int i = 0; i < bss.length; i++) { bss[i] = bos[i].toByteArray(); } for (int element_i = 0; element_i < elementCount; element_i++) { //int eol = skipToEOL(element_bytes, offsets[element_i]); //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); dataOut.write(index_bs, ids[element_i] * IndexUnitSize, IndexUnitSize); } for (int i = 0; i < fields.length; i++) { int fieldSize = 0; if (fields[i].getType().equals(Integer.TYPE)) { fieldSize = 4; } else if (fields[i].getType().equals(Long.TYPE)) { fieldSize = 8; } else if (fields[i].getType().equals(Double.TYPE)) { fieldSize = 8; } else { //throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName()); continue; } for (int element_i = 0; element_i < elementCount; element_i++) { //int eol = skipToEOL(element_bytes, offsets[element_i]); //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); dataOut.write(bss[i], ids[element_i] * fieldSize, fieldSize); } } } else { for (int element_i = 0; element_i < elementCount; element_i++) { int eol = skipToEOL(element_bytes, offsets[element_i]); dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); } } } catch (IOException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } }