List of usage examples for com.rabbitmq.client Channel exchangeDeclare
Exchange.DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable) throws IOException;
From source file:edu.jhu.pha.vospace.node.ContainerNode.java
License:Apache License
@Override public void markRemoved(boolean isRemoved) { if (!isStoredMetadata()) throw new NotFoundException("NodeNotFound"); NodesList childrenList = getDirectChildren(false, 0, -1); List<Node> children = childrenList.getNodesList(); for (Node child : children) { child.markRemoved(isRemoved);//from w w w .ja v a 2s. com } getMetastore().markRemoved(getUri(), isRemoved); QueueConnector.goAMQP("mark removed Container", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); return true; } }); }
From source file:edu.jhu.pha.vospace.node.DataNode.java
License:Apache License
/** * Set the node content/*from w ww . jav a 2 s.c om*/ * @param data The new node content */ public void setData(InputStream data) { if (!getMetastore().isStored(getUri())) throw new NotFoundException("NodeNotFound"); logger.debug("Updating node " + getUri().toString()); // put the node data into storage getStorage().putBytes(getUri().getNodePath(), data); // update node size from storage to metadata getStorage().updateNodeInfo(getUri().getNodePath(), getNodeInfo()); getNodeInfo().setRevision(getNodeInfo().getRevision() + 1);//increase revision version to store in DB getMetastore().storeInfo(getUri(), getNodeInfo()); QueueConnector.goAMQP("setData", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); channel.exchangeDeclare(conf.getString("process.exchange.nodeprocess"), "fanout", true); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); channel.basicPublish(conf.getString("process.exchange.nodeprocess"), "", MessageProperties.PERSISTENT_TEXT_PLAIN, jobSer); return true; } }); }
From source file:edu.jhu.pha.vospace.node.DataNode.java
License:Apache License
public void setChunkedData(String uploadId) { if (!getMetastore().isStored(getUri())) throw new NotFoundException("NodeNotFound"); logger.debug("Updating chunked node " + getUri().toString()); VoSyncMetaStore vosyncMeta = new VoSyncMetaStore(this.owner); // put the node data into storage getStorage().putChunkedBytes(getUri().getNodePath(), uploadId); vosyncMeta.deleteNodeChunks(this.getUri()); vosyncMeta.mapChunkedToNode(this.getUri(), uploadId); // update node size from storage to metadata getStorage().updateNodeInfo(getUri().getNodePath(), getNodeInfo()); getNodeInfo().setRevision(getNodeInfo().getRevision() + 1);//increase revision version to store in DB getMetastore().storeInfo(getUri(), getNodeInfo()); QueueConnector.goAMQP("setData", new QueueConnector.AMQPWorker<Boolean>() { @Override/*from w ww. j a v a 2 s . c om*/ public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); channel.exchangeDeclare(conf.getString("process.exchange.nodeprocess"), "fanout", true); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); channel.basicPublish(conf.getString("process.exchange.nodeprocess"), "", MessageProperties.PERSISTENT_TEXT_PLAIN, jobSer); return true; } }); }
From source file:edu.jhu.pha.vospace.node.Node.java
License:Apache License
public void copy(VospaceId newLocationId, final boolean keepBytes) { if (!isStoredMetadata()) throw new NotFoundException("NodeNotFound"); if (getMetastore().isStored(newLocationId)) throw new ForbiddenException("DestinationNodeExists"); getStorage().copyBytes(getUri().getNodePath(), newLocationId.getNodePath(), keepBytes); if (!keepBytes) { // update node's container size metadata try {/*from w w w . j av a 2s .co m*/ ContainerNode contNode = (ContainerNode) NodeFactory.getNode( new VospaceId(new NodePath(getUri().getNodePath().getContainerName())), getOwner()); getStorage().updateNodeInfo(contNode.getUri().getNodePath(), contNode.getNodeInfo()); getMetastore().storeInfo(contNode.getUri(), contNode.getNodeInfo()); } catch (URISyntaxException e) { logger.error("Updating root node size failed: " + e.getMessage()); } } final Node newDataNode = NodeFactory.createNode(newLocationId, owner, this.getType()); newDataNode.setNode(null); newDataNode.getStorage().updateNodeInfo(newLocationId.getNodePath(), newDataNode.getNodeInfo()); newDataNode.getMetastore().storeInfo(newLocationId, newDataNode.getNodeInfo()); newDataNode.getMetastore().updateUserProperties(newLocationId, getNodeMeta(PropertyType.property)); // Update chunks table to point to the new node if the node is chunked // copy with keepBytes=true is prohibited for chunked files by swift storage if (null != this.getNodeInfo().getChunkedName()) { VoSyncMetaStore vosyncMeta = new VoSyncMetaStore(this.owner); vosyncMeta.mapChunkedToNode(newDataNode.getUri(), this.getNodeInfo().getChunkedName()); } if (!keepBytes) newDataNode.getMetastore().remove(this.getUri()); QueueConnector.goAMQP("copyNode", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); channel.exchangeDeclare(conf.getString("process.exchange.nodeprocess"), "fanout", true); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", newDataNode.getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", newDataNode.getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); channel.basicPublish(conf.getString("process.exchange.nodeprocess"), "", MessageProperties.PERSISTENT_TEXT_PLAIN, jobSer); if (!keepBytes) { Map<String, Object> oldNodeData = new HashMap<String, Object>(); oldNodeData.put("uri", getUri().toString()); oldNodeData.put("owner", getOwner()); oldNodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] oldNodejobSer = (new ObjectMapper()).writeValueAsBytes(oldNodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, oldNodejobSer); } return true; } }); }
From source file:edu.jhu.pha.vospace.node.Node.java
License:Apache License
/** * Marks the node as removed in metadata database *//* ww w . ja v a 2 s .co m*/ public void markRemoved(boolean isRemoved) { if (!isStoredMetadata()) throw new NotFoundException("NodeNotFound"); getMetastore().markRemoved(getUri(), isRemoved); QueueConnector.goAMQP("markRemovedNode", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); return true; } }); }
From source file:edu.jhu.pha.vospace.node.Node.java
License:Apache License
/** * Set node XML metadata and create storage empty node if needed *///from w w w . ja va 2s .c o m public void setNode(String nodeXml) { if (!isStoredMetadata()) { if (getType().equals(NodeType.CONTAINER_NODE)) getStorage().createContainer(this.getUri().getNodePath()); else if (this.getUri().getNodePath().getNodeRelativeStoragePath().isEmpty()) // creating non-container in first level throw new BadRequestException("BadRequest"); getMetastore().storeData(getUri(), this.getType()); } if (null != nodeXml) { XMLObject nodeXmlObj = new XMLObject(nodeXml.getBytes()); getMetastore().updateUserProperties(getUri(), nodeXmlObj.getNodeProperties()); } QueueConnector.goAMQP("setNode", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); Map<String, Object> nodeData = new HashMap<String, Object>(); nodeData.put("uri", getUri().toString()); nodeData.put("owner", getOwner()); nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); return true; } }); }
From source file:edu.jhu.pha.vospace.process.NodeProcessor.java
License:Apache License
@Override public void run() { QueueConnector.goAMQP("nodesProcessor", new QueueConnector.AMQPWorker<Boolean>() { @Override//from w w w. j a va2 s .co m public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { channel.exchangeDeclare(conf.getString("process.exchange.nodeprocess"), "fanout", true); channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false); channel.queueDeclare(conf.getString("process.queue.nodeprocess"), true, false, false, null); channel.queueBind(conf.getString("process.queue.nodeprocess"), conf.getString("process.exchange.nodeprocess"), ""); QueueingConsumer consumer = new QueueingConsumer(channel); channel.basicConsume(conf.getString("process.queue.nodeprocess"), false, consumer); while (!Thread.currentThread().isInterrupted()) { Node node = null; try { QueueingConsumer.Delivery delivery = consumer.nextDelivery(); Map<String, Object> nodeData = (new ObjectMapper()).readValue(delivery.getBody(), 0, delivery.getBody().length, new TypeReference<HashMap<String, Object>>() { }); channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false); node = NodeFactory.getNode(new VospaceId((String) nodeData.get("uri")), (String) nodeData.get("owner")); logger.debug("Node changed: " + nodeData.get("uri") + " " + nodeData.get("owner") + " " + node.getType()); switch (node.getType()) { case DATA_NODE: case STRUCTURED_DATA_NODE: case UNSTRUCTURED_DATA_NODE: { TikaInputStream inp = null; try { Metadata detectTikaMeta = new Metadata(); detectTikaMeta.set(Metadata.RESOURCE_NAME_KEY, node.getUri().getNodePath().getNodeName()); inp = TikaInputStream.get(node.exportData()); //MediaType type = new DefaultDetector().detect(inp, nodeTikaMeta); List<Detector> list = new ArrayList<Detector>(); list.add(new SimulationDetector()); list.add(new DefaultDetector()); Detector detector = new CompositeDetector(list); MediaType type = detector.detect(inp, detectTikaMeta); node.getNodeInfo().setContentType(type.toString()); node.getMetastore().storeInfo(node.getUri(), node.getNodeInfo()); JsonNode credentials = UserHelper.getProcessorCredentials(node.getOwner()); boolean makeStructured = false; List<String> externalLinks = new ArrayList<String>(); for (ProcessorConfig processorConf : ProcessingFactory.getInstance() .getProcessorConfigsForNode(node, credentials)) { Metadata nodeTikaMeta = new Metadata(); nodeTikaMeta.set(TikaCoreProperties.SOURCE, node.getUri().toString()); nodeTikaMeta.set("owner", (String) nodeData.get("owner")); nodeTikaMeta.set(TikaCoreProperties.TITLE, node.getUri().getNodePath().getNodeName()); nodeTikaMeta.add(TikaCoreProperties.METADATA_DATE, dateFormat .format(Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime())); nodeTikaMeta.set(Metadata.CONTENT_LOCATION, ((DataNode) node).getHttpDownloadLink().toASCIIString()); nodeTikaMeta.set(Metadata.CONTENT_TYPE, type.toString()); AbstractParser parser; TikaConfig config = TikaConfig.getDefaultConfig(); if (processorConf.getTikaConfig() != null) { config = new TikaConfig( getClass().getResourceAsStream(processorConf.getTikaConfig())); } parser = new CompositeParser(config.getMediaTypeRegistry(), config.getParser()); Processor processor = Processor.fromProcessorConfig(processorConf); InputStream str = null; try { str = TikaInputStream.get(node.exportData()); parser.parse(str, processor.getContentHandler(), nodeTikaMeta, new ParseContext()); } finally { try { str.close(); } catch (Exception ex) { } } // now do out-of-tika processing of results try { processor.processNodeMeta(nodeTikaMeta, credentials.get(processorConf.getId())); logger.debug("Processing of " + node.getUri().toString() + " is finished."); } catch (Exception e) { logger.error("Error processing the node. " + e.getMessage()); e.printStackTrace(); processError(node, e); } String[] links = nodeTikaMeta.getValues("EXTERNAL_LINKS"); if (null != links && links.length > 0) { externalLinks.addAll(Arrays.asList(links)); } MediaType curType = MediaType.parse(nodeTikaMeta.get(Metadata.CONTENT_TYPE)); if (MIME_REGISTRY.isSpecializationOf(curType, type)) { type = curType; logger.debug("Media type reassigned to " + type.toString() + " by " + processorConf.getId()); } String nodeTypeStr = nodeTikaMeta.get("NodeType"); if (null != nodeTypeStr && NodeType.valueOf(nodeTypeStr).equals(NodeType.STRUCTURED_DATA_NODE)) makeStructured = true; } node.makeNodeStructured(makeStructured); Map<String, String> properties = new HashMap<String, String>(); properties.put(PROCESSING_PROPERTY, "done"); if (externalLinks.size() > 0) { properties.put(EXTERNAL_LINK_PROPERTY, StringUtils.join(externalLinks, ' ')); } node.getNodeInfo().setContentType(type.toString()); node.getMetastore().updateUserProperties(node.getUri(), properties); node.getMetastore().storeInfo(node.getUri(), node.getNodeInfo()); logger.debug("Updated node " + node.getUri().toString() + " to " + node.getNodeInfo().getContentType() + " and " + node.getNodeInfo().getSize()); // update node's container size metadata try { ContainerNode contNode = (ContainerNode) NodeFactory.getNode( new VospaceId( new NodePath(node.getUri().getNodePath().getContainerName())), node.getOwner()); node.getStorage().updateNodeInfo(contNode.getUri().getNodePath(), contNode.getNodeInfo()); node.getMetastore().storeInfo(contNode.getUri(), contNode.getNodeInfo()); } catch (URISyntaxException e) { logger.error("Updating root node size failed: " + e.getMessage()); } try { nodeData.put("container", node.getUri().getNodePath().getParentPath().getNodeStoragePath()); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData); channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer); } catch (IOException e) { logger.error(e); } } catch (TikaException ex) { logger.error("Error parsing the node " + node.getUri().toString() + ": " + ex.getMessage()); processError(node, ex); ex.printStackTrace(); } catch (SAXException ex) { logger.error("Error SAX parsing the node " + node.getUri().toString() + ": " + ex.getMessage()); processError(node, ex); } catch (IOException ex) { logger.error("Error reading the node " + node.getUri().toString() + ": " + ex.getMessage()); processError(node, ex); } finally { try { inp.close(); } catch (Exception ex2) { } ; } break; } case CONTAINER_NODE: { // DbPoolServlet.goSql("Processing nodes", // "select * from nodes where owner = ?)", // new SqlWorker<Boolean>() { // @Override // public Boolean go(java.sql.Connection conn, java.sql.PreparedStatement stmt) throws SQLException { // stmt.setString(1, node.getOwner()); // /*ResultSet resSet = stmt.executeQuery(); // while(resSet.next()) { // String uriStr = resSet.getString(1); // String username = resSet.getString(2); // // try { // VospaceId uri = new VospaceId(uriStr); // // Node newNode = NodeFactory.getInstance().getNode(uri, username); // newNode.remove(); // } catch(Exception ex) { // ex.printStackTrace(); // } // }*/ // return true; // } // } // // ); break; } default: { break; } } } catch (InterruptedException ex) { logger.error("Sleeping interrupted. " + ex.getMessage()); processError(node, ex); } catch (IOException ex) { ex.printStackTrace(); logger.error("Error reading the changed node JSON: " + ex.getMessage()); processError(node, ex); } catch (URISyntaxException ex) { logger.error("Error parsing VospaceId from changed node JSON: " + ex.getMessage()); processError(node, ex); } } return true; } }); }
From source file:edu.kit.dama.util.test.RabbitMQReceiver.java
License:Apache License
public static void main(String[] args) throws IOException, TimeoutException, InterruptedException { ConnectionFactory factory = new ConnectionFactory(); factory.setHost("localhost"); final Connection connection = factory.newConnection(); final Channel channel = connection.createChannel(); channel.exchangeDeclare(EXCHANGE_NAME, "topic", true); String queueName = channel.queueDeclare().getQueue(); channel.queueBind(queueName, EXCHANGE_NAME, "audit.*"); // channel.queueDeclare(QUEUE_NAME, true, false, false, null); System.out.println(" [*] Waiting for messages. To exit press CTRL+C"); /* final Consumer consumer = new DefaultConsumer(channel) { @Override/*from w ww .java 2 s . c o m*/ public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { String message = new String(body, "UTF-8"); System.out.println(" [x] Received '" + message + "'"); try { doWork(message); } finally { System.out.println(" [x] Done"); channel.basicAck(envelope.getDeliveryTag(), false); } } }; channel.basicConsume(TASK_QUEUE_NAME, false, consumer); }*/ /* QueueingConsumer consumer = new QueueingConsumer(channel); /*Consumer consumer = new DefaultConsumer(channel) { @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { String message = new String(body, "UTF-8"); System.out.println(" [x] Received '" + message + "'"); } };*/ /*channel.basicConsume(queueName, true, consumer); QueueingConsumer.Delivery delivery = consumer.nextDelivery(10000); if (delivery != null) { byte[] message = delivery.getBody(); System.out.println("MESS " + new String(message)); }*/ Consumer consumer = new DefaultConsumer(channel) { @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { String message = new String(body, "UTF-8"); System.out.println(" [x] Received '" + message + "'"); } }; channel.basicConsume(queueName, true, consumer); // System.exit(0); }
From source file:edu.sdsc.vospace.process.SimulationProcessor.java
License:Apache License
@Override public void processNodeMeta(Metadata metadata, JsonNode credentials) throws ProcessingException { String owner = metadata.get("owner"); String source = metadata.get(TikaCoreProperties.SOURCE); try {/* w w w . ja v a 2 s.c o m*/ DataNode node = (DataNode) NodeFactory.getNode(new VospaceId(source), owner); Map<String, String> properties = new HashMap<String, String>(); properties.put(SIM_ID_PROPERTY, metadata.get(SimulationParser.METADATA_SIMULATION_UUID)); properties.put(SIM_DATASET_ID_PROPERTY, StringUtils.join(metadata.getValues(SimulationParser.METADATA_DATASET_UUID), " ")); node.getMetastore().updateUserProperties(node.getUri(), properties); final String simEndpointUrl = node.getHttpDownloadLink().toASCIIString(); QueueConnector.goAMQP("submit new simulation", new QueueConnector.AMQPWorker<Boolean>() { @Override public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel) throws IOException { Map<String, Object> jobData = new HashMap<String, Object>(); jobData.put("url", simEndpointUrl); byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(jobData); channel.exchangeDeclare(SIM_EXCHANGE, "topic", true); channel.basicPublish(SIM_EXCHANGE, "new_sim", null, jobSer); return true; } }); } catch (URISyntaxException ex) { throw new ProcessingException(ex); } }
From source file:genqa.ExportRabbitMQVerifier.java
License:Open Source License
public void run() throws IOException, InterruptedException { final Connection connection = m_connFactory.newConnection(); final Channel channel = connection.createChannel(); try {//from w w w . ja va2 s .co m channel.exchangeDeclare(m_exchangeName, "topic", true); String dataQueue = channel.queueDeclare().getQueue(); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_PARTITIONED_TABLE.#"); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_PARTITIONED_TABLE2.#"); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_REPLICATED_TABLE.#"); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_PARTITIONED_TABLE_FOO.#"); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_PARTITIONED_TABLE2_FOO.#"); channel.queueBind(dataQueue, m_exchangeName, "EXPORT_REPLICATED_TABLE_FOO.#"); String doneQueue = channel.queueDeclare().getQueue(); channel.queueBind(doneQueue, m_exchangeName, "EXPORT_DONE_TABLE.#"); channel.queueBind(doneQueue, m_exchangeName, "EXPORT_DONE_TABLE_FOO.#"); // Setup callback for data stream channel.basicConsume(dataQueue, false, createConsumer(channel)); // Setup callback for the done message QueueingConsumer doneConsumer = new QueueingConsumer(channel); channel.basicConsume(doneQueue, true, doneConsumer); // Wait until the done message arrives, then verify count final QueueingConsumer.Delivery doneMsg = doneConsumer.nextDelivery(); final long expectedRows = Long.parseLong(ExportOnServerVerifier.RoughCSVTokenizer .tokenize(new String(doneMsg.getBody(), Charsets.UTF_8))[6]); while (expectedRows > m_verifiedRows) { Thread.sleep(1000); System.err.println("Expected " + expectedRows + " " + m_verifiedRows); } } finally { tearDown(channel); channel.close(); connection.close(); } }