Example usage for java.util.concurrent Semaphore release

List of usage examples for java.util.concurrent Semaphore release

Introduction

In this page you can find the example usage for java.util.concurrent Semaphore release.

Prototype

public void release() 

Source Link

Document

Releases a permit, returning it to the semaphore.

Usage

From source file:fur.shadowdrake.minecraft.InstallPanel.java

@SuppressWarnings({ "Convert2Lambda" })
private boolean downloadFile(String filename, String local) throws NetworkException {
    final Semaphore semaphore = new Semaphore(0);
    success = false;//from   ww w  .  j  a va2  s. c o m
    while (true) {
        result = ftpClient.openDataChannel(new ActionListener() {
            @Override
            public void actionPerformed(ActionEvent e) {
                if (e.getID() == FtpClient.FTP_OK) {
                    try {
                        InputStream is;
                        FileOutputStream fos;

                        is = ((Socket) e.getSource()).getInputStream();
                        fos = new FileOutputStream(new File(workingDir, local));
                        byte[] buffer = new byte[4096];
                        for (int n = is.read(buffer); n > 0; n = is.read(buffer)) {
                            fos.write(buffer, 0, n);
                            log.advance(n);
                        }
                        fos.close();
                        success = true;
                    } catch (IOException ex) {
                        Logger.getLogger(InstallPanel.class.getName()).log(Level.SEVERE, "Download", ex);
                        log.println("Faild to save file.");
                        success = false;
                    }
                }
            }
        });
        switch (result) {
        case FtpClient.FTP_OK:
            int size = ftpClient.retr(filename, (ActionEvent e) -> {
                ftpClient.closeDataChannel();
                semaphore.release();
            });
            if (size < 0) {
                ftpClient.abandonDataChannel();
            } else {
                log.reset();
                log.setMaximum(size);
            }
            try {
                semaphore.acquire();
            } catch (InterruptedException ex) {
                return false;
            }
            break;
        case FtpClient.FTP_TIMEOUT:
            if (reconnect()) {
                continue;
            } else {
                ftpClient.abandonDataChannel();
                return false;
            }
        default:
            ftpClient.abandonDataChannel();
            return false;
        }
        break;
    }
    return success;
}

From source file:org.telepatch.ui.ChatActivity.java

@Override
public boolean onFragmentCreate() {
    final int chatId = arguments.getInt("chat_id", 0);
    final int userId = arguments.getInt("user_id", 0);
    final int encId = arguments.getInt("enc_id", 0);
    scrollToTopOnResume = arguments.getBoolean("scrollToTopOnResume", false);

    if (chatId != 0) {
        currentChat = MessagesController.getInstance().getChat(chatId);
        if (currentChat == null) {
            final Semaphore semaphore = new Semaphore(0);
            MessagesStorage.getInstance().storageQueue.postRunnable(new Runnable() {
                @Override/*from  w w  w  .ja  v a2s  . c om*/
                public void run() {
                    currentChat = MessagesStorage.getInstance().getChat(chatId);
                    semaphore.release();
                }
            });
            try {
                semaphore.acquire();
            } catch (Exception e) {
                FileLog.e("tmessages", e);
            }
            if (currentChat != null) {
                MessagesController.getInstance().putChat(currentChat, true);
            } else {
                return false;
            }
        }
        if (chatId > 0) {
            dialog_id = -chatId;
        } else {
            isBroadcast = true;
            dialog_id = AndroidUtilities.makeBroadcastId(chatId);
        }
        Semaphore semaphore = null;
        if (isBroadcast) {
            semaphore = new Semaphore(0);
        }
        MessagesController.getInstance().loadChatInfo(currentChat.id, semaphore);
        if (isBroadcast) {
            try {
                semaphore.acquire();
            } catch (Exception e) {
                FileLog.e("tmessages", e);
            }
        }
    } else if (userId != 0) {
        currentUser = MessagesController.getInstance().getUser(userId);
        if (currentUser == null) {
            final Semaphore semaphore = new Semaphore(0);
            MessagesStorage.getInstance().storageQueue.postRunnable(new Runnable() {
                @Override
                public void run() {
                    currentUser = MessagesStorage.getInstance().getUser(userId);
                    semaphore.release();
                }
            });
            try {
                semaphore.acquire();
            } catch (Exception e) {
                FileLog.e("tmessages", e);
            }
            if (currentUser != null) {
                MessagesController.getInstance().putUser(currentUser, true);
            } else {
                return false;
            }
        }
        dialog_id = userId;
    } else if (encId != 0) {
        currentEncryptedChat = MessagesController.getInstance().getEncryptedChat(encId);
        if (currentEncryptedChat == null) {
            final Semaphore semaphore = new Semaphore(0);
            MessagesStorage.getInstance().storageQueue.postRunnable(new Runnable() {
                @Override
                public void run() {
                    currentEncryptedChat = MessagesStorage.getInstance().getEncryptedChat(encId);
                    semaphore.release();
                }
            });
            try {
                semaphore.acquire();
            } catch (Exception e) {
                FileLog.e("tmessages", e);
            }
            if (currentEncryptedChat != null) {
                MessagesController.getInstance().putEncryptedChat(currentEncryptedChat, true);
            } else {
                return false;
            }
        }
        currentUser = MessagesController.getInstance().getUser(currentEncryptedChat.user_id);
        if (currentUser == null) {
            final Semaphore semaphore = new Semaphore(0);
            MessagesStorage.getInstance().storageQueue.postRunnable(new Runnable() {
                @Override
                public void run() {
                    currentUser = MessagesStorage.getInstance().getUser(currentEncryptedChat.user_id);
                    semaphore.release();
                }
            });
            try {
                semaphore.acquire();
            } catch (Exception e) {
                FileLog.e("tmessages", e);
            }
            if (currentUser != null) {
                MessagesController.getInstance().putUser(currentUser, true);
            } else {
                return false;
            }
        }
        dialog_id = ((long) encId) << 32;
        maxMessageId = Integer.MIN_VALUE;
        minMessageId = Integer.MAX_VALUE;
        MediaController.getInstance().startMediaObserver();
    } else {
        return false;
    }

    //TODO qui elimino la notifica una volta entrato nella chat
    if (preferences.getBoolean("multiple_notify", true)) {
        nm.cancel((int) dialog_id);
    } else {
        nm.cancel(1);
    }

    chatActivityEnterView = new ChatActivityEnterView();
    chatActivityEnterView.setDialogId(dialog_id);
    chatActivityEnterView.setDelegate(new ChatActivityEnterView.ChatActivityEnterViewDelegate() {
        @Override
        public void onMessageSend() {
            chatListView.post(new Runnable() {
                @Override
                public void run() {
                    chatListView.setSelectionFromTop(messages.size() - 1,
                            -100000 - chatListView.getPaddingTop());
                }
            });
        }

        @Override
        public void needSendTyping() {
            MessagesController.getInstance().sendTyping(dialog_id, classGuid);
        }
    });
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messagesDidLoaded);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.emojiDidLoaded);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.updateInterfaces);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.didReceivedNewMessages);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.closeChats);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messagesRead);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messagesDeleted);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messageReceivedByServer);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messageReceivedByAck);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messageSendError);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.chatInfoDidLoaded);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.contactsDidLoaded);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.encryptedChatUpdated);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.messagesReadedEncrypted);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.removeAllMessagesFromDialog);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.audioProgressDidChanged);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.audioDidReset);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.screenshotTook);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.blockedUsersDidLoaded);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.FileNewChunkAvailable);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.didCreatedNewDeleteTask);
    NotificationCenter.getInstance().addObserver(this, NotificationCenter.audioDidStarted);

    super.onFragmentCreate();

    loading = true;

    MessagesController.getInstance().loadMessages(dialog_id, AndroidUtilities.isTablet() ? 30 : 20, 0, true, 0,
            classGuid, true, false, null);

    if (currentUser != null) {
        userBlocked = MessagesController.getInstance().blockedUsers.contains(currentUser.id);
    }

    if (AndroidUtilities.isTablet()) {
        NotificationCenter.getInstance().postNotificationName(NotificationCenter.openedChatChanged, dialog_id,
                false);
    }

    typingDotsDrawable = new TypingDotsDrawable();
    typingDotsDrawable.setIsChat(currentChat != null);

    if (currentEncryptedChat != null && AndroidUtilities
            .getMyLayerVersion(currentEncryptedChat.layer) != SendMessagesHelper.CURRENT_SECRET_CHAT_LAYER) {
        SendMessagesHelper.getInstance().sendNotifyLayerMessage(currentEncryptedChat, null);
    }

    return true;
}

From source file:com.boundlessgeo.geoserver.api.controllers.ThumbnailController.java

/**
 * Creates a thumbnail for the layer as a Resource, and updates the layer with the new thumbnail
 * @param ws The workspace of the layer/*from  w  ww  .j a v a 2 s  . com*/
 * @param layer The layer or layerGroup to get the thumbnail for
 * @return The thumbnail image as a Resource
 * @throws Exception
 */
protected void createThumbnail(WorkspaceInfo ws, PublishedInfo layer, HttpServletRequest baseRequest)
        throws Exception {
    //Sync against this map/layer
    Semaphore s = semaphores.get(layer);
    s.acquire();
    try {
        //(SUITE-1072) Initialize the thumbnail to a blank image in case the WMS request crashes geoserver
        BufferedImage blankImage = new BufferedImage(THUMBNAIL_SIZE * 2, THUMBNAIL_SIZE * 2,
                BufferedImage.TYPE_4BYTE_ABGR);
        Graphics2D g = blankImage.createGraphics();
        g.setColor(new Color(0, 0, 0, 0));
        g.fillRect(0, 0, blankImage.getWidth(), blankImage.getHeight());
        writeThumbnail(layer, blankImage);

        //Set up getMap request
        String url = baseRequest.getScheme() + "://localhost:" + baseRequest.getLocalPort()
                + baseRequest.getContextPath() + "/" + ws.getName() + "/wms/reflect";

        url += "?FORMAT=" + MIME_TYPE;

        ReferencedEnvelope bbox = null;
        if (layer instanceof LayerInfo) {
            url += "&LAYERS=" + layer.getName();
            url += "&STYLES=" + ((LayerInfo) layer).getDefaultStyle().getName();
            bbox = ((LayerInfo) layer).getResource().boundingBox();
        } else if (layer instanceof LayerGroupInfo) {

            LayerGroupHelper helper = new LayerGroupHelper((LayerGroupInfo) layer);
            bbox = ((LayerGroupInfo) layer).getBounds();
            url += "&CRS=" + CRS.toSRS(bbox.getCoordinateReferenceSystem());

            List<LayerInfo> layerList = helper.allLayersForRendering();
            if (layerList.size() > 0) {
                url += "&LAYERS=";
                for (int i = 0; i < layerList.size(); i++) {
                    if (i > 0) {
                        url += ",";
                    }
                    url += layerList.get(i) == null ? "" : layerList.get(i).getName();
                }
            }
            List<StyleInfo> styleList = helper.allStylesForRendering();
            if (styleList.size() > 0) {
                url += "&STYLES=";
                for (int i = 0; i < styleList.size(); i++) {
                    if (i > 0) {
                        url += ",";
                    }
                    if (styleList.get(i) == null) {
                        url += layerList.get(i).getDefaultStyle() == null ? ""
                                : layerList.get(i).getDefaultStyle().getName();
                    } else {
                        url += styleList.get(i) == null ? "" : styleList.get(i).getName();
                    }
                }
            }
        } else {
            throw new RuntimeException("layer must be one of LayerInfo or LayerGroupInfo");
        }
        //Set the size of the HR thumbnail
        //Take the smallest bbox dimension as the min dimension. We can then crop the other 
        //dimension to give a square thumbnail
        url += "&BBOX=" + ((float) bbox.getMinX()) + "," + ((float) bbox.getMinY()) + ","
                + ((float) bbox.getMaxX()) + "," + ((float) bbox.getMaxY());
        if (bbox.getWidth() < bbox.getHeight()) {
            url += "&WIDTH=" + (2 * THUMBNAIL_SIZE);
            url += "&HEIGHT=" + (2 * THUMBNAIL_SIZE * Math.round(bbox.getHeight() / bbox.getWidth()));
        } else {
            url += "&HEIGHT=" + (2 * THUMBNAIL_SIZE);
            url += "&WIDTH=" + (2 * THUMBNAIL_SIZE * Math.round(bbox.getWidth() / bbox.getHeight()));
        }

        //Run the getMap request through the WMS Reflector
        //WebMap response = wms.reflect(request);            
        URL obj = new URL(url);
        HttpURLConnection con = (HttpURLConnection) obj.openConnection();
        con.setRequestMethod("GET");
        BufferedImage image = ImageIO.read(con.getInputStream());
        if (image == null) {
            throw new RuntimeException(
                    "Failed to encode thumbnail for " + ws.getName() + ":" + layer.getName());
        }
        writeThumbnail(layer, image);
    } finally {
        s.release();
    }
}

From source file:it.infn.ct.ParallelSemanticSearch_portlet.java

public void doGet(final ActionRequest request, final ActionResponse response, final App_Input appInput,
        final int numberRecords, final PortletPreferences portletPreferences) {
    testLookup();/*from  w w  w.j av  a2 s . c  o m*/
    int numThread = countTab(portletPreferences);
    System.out.println("About to submit tasks to " + tp);
    // PortletPreferences portletPreferences = request.getPreferences();

    final Semaphore s = new Semaphore(0);
    Thread thread_openAgris = null;
    Thread thread_culturaItalia = null;
    Thread thread_engage = null;
    Thread thread_europeana = null;
    Thread thread_isidore = null;
    Thread thread_pubmed = null;
    Thread thread_chain = new Thread("CHAIN_THREAD") {
        @Override
        public void run() {

            System.out.println("Executing task in " + Thread.currentThread());

            System.out.println("################### init_thread chain");
            try {
                handlerTabCHAIN(request, response, appInput, numberRecords);
            } catch (RepositoryException ex) {
                Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
            } catch (MalformedQueryException ex) {
                Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
            } catch (QueryEvaluationException ex) {
                Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
            } catch (UnsupportedEncodingException ex) {
                Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
            } catch (MalformedURLException ex) {
                Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
            }

            s.release();
            System.out.println("thread_chain isAlive: " + Thread.currentThread().getName() + "---"
                    + Thread.currentThread().isAlive());
            System.out.println("###################### finish thread chain" + Thread.currentThread().isAlive());

        }
    };

    if (portletPreferences.getValue("OpenAgris", "").equals("true")) {
        //  if (appPreferences.OpenAgris.equals("true")) {
        thread_openAgris = new Thread("OPENAGRIS_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread OpenAgris");
                try {
                    handlerTabOpenAgris(request, response, appInput, numberRecords, portletPreferences);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread OpenAgris");

            }
        };
    }

    if (portletPreferences.getValue("CulturaItalia", "").equals("true")) {
        //  if (appPreferences.CulturaItalia.equals("true")) {
        thread_culturaItalia = new Thread("CULTURAITALIA_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread CulturaItalia");
                try {
                    handlerTabCulturaItalia(request, response, appInput, numberRecords, portletPreferences);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread CulturaItalia");

            }
        };
    }

    if (portletPreferences.getValue("Engage", "").equals("true")) {
        //  if (appPreferences.Engage.equals("true")) {
        thread_engage = new Thread("ENGAGE_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread Engage");
                try {
                    handlerTabEngage(request, response, appInput, numberRecords, portletPreferences);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (IOException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread Engage");

            }
        };
    }

    String EuropeanaSet = portletPreferences.getValue("Europeana", "");
    System.out
            .println("EuropeanaSet--" + EuropeanaSet + " appPreferences.Europeana" + appPreferences.Europeana);

    if (EuropeanaSet.equals("true")) {
        thread_europeana = new Thread("EUROPEANA_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread Europeana");
                try {
                    handlerTabEuropeana(request, response, appInput, numberRecords, portletPreferences);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (IOException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread europeana");

            }
        };
    }

    if (portletPreferences.getValue("Isidore", "").equals("true")) {
        //if (appPreferences.Isidore.equals("true")) {
        thread_isidore = new Thread("ISIDORE_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread Isidore");
                try {
                    handlerTabIsidore(request, response, appInput, numberRecords, portletPreferences);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (IOException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread Isidore");

            }
        };
    }

    if (portletPreferences.getValue("Pubmed", "").equals("true")) {
        //if (appPreferences.Pubmed.equals("true")) {
        thread_pubmed = new Thread("PUBMED_THREAD") {

            @Override
            public void run() {

                System.out.println("Executing task in " + Thread.currentThread());

                System.out.println("################### init_thread Pubmed");
                try {
                    handlerTabPubmed(request, response, appInput, numberRecords, portletPreferences);
                } catch (MalformedQueryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (QueryEvaluationException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (UnsupportedEncodingException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (MalformedURLException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (RepositoryException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                } catch (IOException ex) {
                    Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null,
                            ex);
                }

                s.release();

                System.out.println("###################### finish thread Pubmed");

            }
        };
    }
    if (tp != null) {
        tp.execute(thread_chain);
        if (thread_openAgris != null) {
            tp.execute(thread_openAgris);
        }
        if (thread_culturaItalia != null) {
            tp.execute(thread_culturaItalia);
        }
        if (thread_engage != null) {
            tp.execute(thread_engage);
        }
        if (thread_europeana != null) {
            tp.execute(thread_europeana);

        }
        if (thread_isidore != null) {
            tp.execute(thread_isidore);
        }
        if (thread_pubmed != null) {
            tp.execute(thread_pubmed);
        }

        try {
            s.acquire(numThread);
        } catch (InterruptedException ex) {
            Logger.getLogger(ParallelSemanticSearch_portlet.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else {

        thread_chain.start();

    }
    //tp.shutdown();
    //while (!tp.isTerminated()) {}
    System.out.println("###################### finish threadPoolMio");
}

From source file:org.jumpmind.symmetric.service.impl.DataExtractorService.java

protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter,
        OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics,
        ExtractMode mode) {/*from www. ja  v  a  2  s . com*/
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {

        Node sourceNode = nodeService.findIdentity();

        TransformWriter transformExtractWriter = null;
        if (useStagingDataWriter) {
            long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode,
                    new ProcessInfoDataWriter(
                            new StagingDataWriter(memoryThresholdInBytes, nodeService.findIdentityNodeId(),
                                    Constants.STAGING_CATEGORY_OUTGOING, stagingManager),
                            processInfo));
        } else {
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode,
                    new ProcessInfoDataWriter(dataWriter, processInfo));
        }

        long ts = System.currentTimeMillis();
        long extractTimeInMs = 0l;
        long byteCount = 0l;
        long transformTimeInMs = 0l;

        if (currentBatch.getStatus() == Status.IG) {
            Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(),
                    symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(),
                    currentBatch.isCommonFlag());
            batch.setIgnored(true);
            try {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.delete();
                }
                DataContext ctx = new DataContext(batch);
                ctx.put("targetNode", targetNode);
                ctx.put("sourceNode", sourceNode);
                transformExtractWriter.open(ctx);
                transformExtractWriter.start(batch);
                transformExtractWriter.end(batch, false);
            } finally {
                transformExtractWriter.close();
            }
        } else if (!isPreviouslyExtracted(currentBatch)) {
            int maxPermits = parameterService.getInt(ParameterConstants.CONCURRENT_WORKERS);
            String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId())
                    : currentBatch.getNodeBatchId();
            Semaphore lock = null;
            try {
                synchronized (locks) {
                    lock = locks.get(semaphoreKey);
                    if (lock == null) {
                        lock = new Semaphore(maxPermits);
                        locks.put(semaphoreKey, lock);
                    }
                    try {
                        lock.acquire();
                    } catch (InterruptedException e) {
                        throw new org.jumpmind.exception.InterruptedException(e);
                    }
                }

                synchronized (lock) {
                    if (!isPreviouslyExtracted(currentBatch)) {
                        currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
                        if (updateBatchStatistics) {
                            changeBatchStatus(Status.QY, currentBatch, mode);
                        }
                        currentBatch.resetStats();
                        IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(),
                                new SelectFromSymDataSource(currentBatch, sourceNode, targetNode, processInfo));
                        DataContext ctx = new DataContext();
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());

                        new DataProcessor(dataReader, transformExtractWriter, "extract").process(ctx);
                        extractTimeInMs = System.currentTimeMillis() - ts;
                        Statistics stats = transformExtractWriter.getNestedWriter().getStatistics().values()
                                .iterator().next();
                        transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
                        extractTimeInMs = extractTimeInMs - transformTimeInMs;
                        byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                    }
                }
            } catch (RuntimeException ex) {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.close();
                    resource.delete();
                }
                throw ex;
            } finally {
                lock.release();
                synchronized (locks) {
                    if (lock.availablePermits() == maxPermits) {
                        locks.remove(semaphoreKey);
                    }
                }
            }
        }

        if (updateBatchStatistics) {
            long dataEventCount = currentBatch.getDataEventCount();
            long insertEventCount = currentBatch.getInsertEventCount();
            currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);

            // preserve in the case of a reload event
            if (dataEventCount > currentBatch.getDataEventCount()) {
                currentBatch.setDataEventCount(dataEventCount);
            }

            // preserve in the case of a reload event
            if (insertEventCount > currentBatch.getInsertEventCount()) {
                currentBatch.setInsertEventCount(insertEventCount);
            }

            // only update the current batch after we have possibly
            // "re-queried"
            if (extractTimeInMs > 0) {
                currentBatch.setExtractMillis(extractTimeInMs);
            }

            if (byteCount > 0) {
                currentBatch.setByteCount(byteCount);
                statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
                statisticManager.incrementDataExtracted(currentBatch.getChannelId(),
                        currentBatch.getExtractCount());
            }
        }

    }

    return currentBatch;
}

From source file:org.commoncrawl.service.crawler.CrawlerEngine.java

/** internal loadWorkUnit routine **/
private CrawlSegmentStatus loadCrawlSegment(final CrawlSegment crawlSegment) {

    _activeLoadCount++;//from   w  w  w  .  j  a v  a 2  s. c o  m

    // mark the segment as crawling ... 
    crawlSegment.setIsCrawling(true);

    final CrawlSegmentStatus status = new CrawlSegmentStatus();

    status.setListId(crawlSegment.getListId());
    status.setSegmentId(crawlSegment.getSegmentId());
    status.setLoadStatus(CrawlSegmentStatus.LoadStatus.LOADING);
    status.setCrawlStatus(CrawlSegmentStatus.CrawlStatus.UNKNOWN);
    status.setUrlCount(0);
    status.setUrlsComplete(0);

    status.setIsDirty(true);

    _statusMap.put(CrawlLog.makeSegmentLogId(crawlSegment.getListId(), crawlSegment.getSegmentId()), status);

    if (Environment.detailLogEnabled())
        LOG.info("loading crawl segment:" + crawlSegment.getSegmentId());

    if (!getServer().externallyManageCrawlSegments()) {

        // remove crawl segment log from crawl log data structure 
        // (we need to do this to protect the data structure from corruption, since the underlying 
        //  worker thread walks the log and reconciles it against the segment data)
        final CrawlSegmentLog segmentLogObj = (getServer().enableCrawlLog())
                ? _crawlLog.removeSegmentLog(crawlSegment.getListId(), crawlSegment.getSegmentId())
                : null;

        if (segmentLogObj == null && getServer().enableCrawlLog()) {
            _activeLoadCount--;
            throw new RuntimeException(
                    "Expected Non-NULL CrawlSegmentLog for Segment:" + crawlSegment.getSegmentId());
        }

        getServer().getDefaultThreadPool()
                .execute(new ConcurrentTask<CrawlSegmentStatus>(getServer().getEventLoop(),

                        new Callable<CrawlSegmentStatus>() {

                            public CrawlSegmentStatus call() throws Exception {

                                try {

                                    LOG.info("### SYNC:Loading SegmentFPInfo for List:"
                                            + crawlSegment.getListId() + " Segment:"
                                            + crawlSegment.getSegmentId());
                                    // load work unit fingerprint detail  ...
                                    final CrawlSegmentFPMap urlFPMap = SegmentLoader.loadCrawlSegmentFPInfo(
                                            crawlSegment.getListId(), crawlSegment.getSegmentId(),
                                            CrawlerEngine.this.getServer().getHostName(),
                                            new SegmentLoader.CancelOperationCallback() {

                                                @Override
                                                public boolean cancelOperation() {
                                                    return _shutdownFlag;
                                                }
                                            });

                                    if (_shutdownFlag) {
                                        LOG.info("### SYNC:EXITING LOAD OF List:" + crawlSegment.getListId()
                                                + " Segment:" + crawlSegment.getSegmentId());
                                        return new CrawlSegmentStatus();
                                    }

                                    if (getServer().enableCrawlLog()) {
                                        LOG.info("### SYNC: Syncing Log to SegmentFPInfo for List:"
                                                + crawlSegment.getListId() + " Segment:"
                                                + crawlSegment.getSegmentId());
                                        // re-sync log to segment ... 
                                        segmentLogObj.syncToLog(urlFPMap);
                                    }

                                    LOG.info("### SYNC: Sync for List:" + crawlSegment.getListId() + " Segment:"
                                            + crawlSegment.getSegmentId() + " Returned:" + urlFPMap._urlCount
                                            + " Total URLS and " + urlFPMap._urlsComplete + " CompleteURLS");

                                    if (!_shutdownFlag) {
                                        // now activate the segment log ... 
                                        final Semaphore segActiveSemaphore = new Semaphore(0);

                                        // check for completion here ... 
                                        if (urlFPMap._urlCount == urlFPMap._urlsComplete && !_shutdownFlag) {
                                            LOG.info("### SYNC: For List:" + crawlSegment.getListId()
                                                    + " Segment:" + crawlSegment.getSegmentId()
                                                    + " indicates Completed Segment.");

                                            _server.getEventLoop()
                                                    .setTimer(new Timer(1, false, new Timer.Callback() {

                                                        public void timerFired(Timer timer) {
                                                            LOG.info("### SYNC: For List:"
                                                                    + crawlSegment.getListId() + " Segment:"
                                                                    + crawlSegment.getSegmentId()
                                                                    + " setting Status to CompletedCompleted Segment.");

                                                            if (!_shutdownFlag) {
                                                                // update segment status ... 
                                                                status.setUrlCount(urlFPMap._urlCount);
                                                                status.setUrlsComplete(urlFPMap._urlCount);
                                                                // update crawl status  
                                                                status.setCrawlStatus(
                                                                        CrawlSegmentStatus.CrawlStatus.CRAWL_COMPLETE);
                                                                status.setIsComplete(true);
                                                                // set dirty flag for segment 
                                                                status.setIsDirty(true);
                                                            }
                                                            // and release semaphore ... 
                                                            segActiveSemaphore.release();

                                                        }

                                                    }));
                                        } else {

                                            _server.getEventLoop()
                                                    .setTimer(new Timer(1, false, new Timer.Callback() {

                                                        public void timerFired(Timer timer) {
                                                            if (!_shutdownFlag) {
                                                                if (getServer().enableCrawlLog()) {
                                                                    //back in primary thread context, so go ahead and SAFELY re-activate the segment log ... 
                                                                    activateSegmentLog(segmentLogObj);
                                                                }
                                                            }
                                                            // and release semaphore ... 
                                                            segActiveSemaphore.release();
                                                        }

                                                    }));
                                        }
                                        // wait for segment activation ... 
                                        segActiveSemaphore.acquireUninterruptibly();
                                    }

                                    // now if complete return immediately 
                                    if (urlFPMap._urlCount != urlFPMap._urlsComplete && !_shutdownFlag) {

                                        LOG.info("### LOADER Loading CrawlSegment Detail for Segment:"
                                                + crawlSegment.getSegmentId());

                                        SegmentLoader.loadCrawlSegment(crawlSegment.getListId(),
                                                crawlSegment.getSegmentId(),
                                                CrawlerEngine.this.getServer().getHostName(), urlFPMap, null,
                                                createLoadProgressCallback(status),
                                                new SegmentLoader.CancelOperationCallback() {

                                                    @Override
                                                    public boolean cancelOperation() {
                                                        return _shutdownFlag;
                                                    }
                                                });
                                    }

                                } catch (Exception e) {
                                    LOG.error(StringUtils.stringifyException(e));
                                    throw e;
                                }

                                return status;
                            }
                        }, createCompletionCallback(crawlSegment, status)));
    } else {
        getServer().loadExternalCrawlSegment(crawlSegment, createLoadProgressCallback(status),
                createCompletionCallback(crawlSegment, status), status);
    }
    return status;
}

From source file:org.jahia.services.render.filter.cache.LegacyAclCacheKeyPartGenerator.java

@SuppressWarnings("unchecked")
private Map<String, Set<String>> getPrincipalAcl(final String aclKey, final String siteKey)
        throws RepositoryException {

    final String cacheKey = siteKey != null ? aclKey + ":" + siteKey : aclKey;
    Element element = cache.get(cacheKey);
    if (element == null) {
        Semaphore semaphore = processings.get(cacheKey);
        if (semaphore == null) {
            semaphore = new Semaphore(1);
            processings.putIfAbsent(cacheKey, semaphore);
        }//  w  w  w . j  ava  2  s. com
        try {
            semaphore.tryAcquire(500, TimeUnit.MILLISECONDS);
            element = cache.get(cacheKey);
            if (element != null) {
                return (Map<String, Set<String>>) element.getObjectValue();
            }

            logger.debug("Getting ACL for {}", cacheKey);
            long l = System.currentTimeMillis();

            Map<String, Set<String>> map = template.doExecuteWithSystemSessionAsUser(null,
                    Constants.LIVE_WORKSPACE, null, new JCRCallback<Map<String, Set<String>>>() {

                        @Override
                        public Map<String, Set<String>> doInJCR(JCRSessionWrapper session)
                                throws RepositoryException {
                            Query query = session.getWorkspace().getQueryManager()
                                    .createQuery("select * from [jnt:ace] as ace where ace.[j:principal] = '"
                                            + JCRContentUtils.sqlEncode(aclKey) + "'", Query.JCR_SQL2);
                            QueryResult queryResult = query.execute();
                            NodeIterator rowIterator = queryResult.getNodes();

                            Map<String, Set<String>> mapGranted = new ConcurrentHashMap<String, Set<String>>();
                            Map<String, Set<String>> mapDenied = new LinkedHashMap<String, Set<String>>();

                            while (rowIterator.hasNext()) {
                                JCRNodeWrapper node = (JCRNodeWrapper) rowIterator.next();
                                if (siteKey != null && !node.getResolveSite().getName().equals(siteKey)) {
                                    continue;
                                }
                                String path = node.getParent().getParent().getPath();
                                Set<String> foundRoles = new HashSet<String>();
                                boolean granted = node.getProperty("j:aceType").getString().equals("GRANT");
                                Value[] roles = node.getProperty(Constants.J_ROLES).getValues();
                                for (Value r : roles) {
                                    String role = r.getString();
                                    if (!foundRoles.contains(role)) {
                                        foundRoles.add(role);
                                    }
                                }
                                if (path.equals("/")) {
                                    path = "";
                                }
                                if (granted) {
                                    mapGranted.put(path, foundRoles);
                                } else {
                                    mapDenied.put(path, foundRoles);
                                }
                            }
                            for (String deniedPath : mapDenied.keySet()) {
                                String grantedPath = deniedPath;
                                while (grantedPath.length() > 0) {
                                    grantedPath = StringUtils.substringBeforeLast(grantedPath, "/");
                                    if (mapGranted.containsKey(grantedPath)) {
                                        Collection<String> intersection = CollectionUtils.intersection(
                                                mapGranted.get(grantedPath), mapDenied.get(deniedPath));
                                        for (String s : intersection) {
                                            mapGranted.get(grantedPath).add(s + " -> " + deniedPath);
                                        }
                                    }
                                }
                            }
                            return mapGranted;
                        }
                    });
            element = new Element(cacheKey, map);
            element.setEternal(true);
            cache.put(element);
            logger.debug("Getting ACL for {} took {} ms", cacheKey, System.currentTimeMillis() - l);
        } catch (InterruptedException e) {
            logger.debug(e.getMessage(), e);
        } finally {
            semaphore.release();
        }
    }
    return (Map<String, Set<String>>) element.getObjectValue();
}

From source file:com.impetus.ankush2.utils.LogsManager.java

public String downloadLogsOnServer() throws AnkushException {
    try {//from  w  w w .  ja v a2  s .  c o m
        String clusterResourcesLogsDir = AppStoreWrapper.getClusterResourcesPath() + "logs/";

        String clusterLogsDirName = "Logs_" + this.clusterConfig.getName() + "_" + System.currentTimeMillis();

        String clusterLogsArchiveName = clusterLogsDirName + ".zip";

        final String cmpLogsDirPathOnServer = clusterResourcesLogsDir + clusterLogsDirName + "/" + component
                + "/";

        if (!FileUtils.ensureFolder(cmpLogsDirPathOnServer)) {
            throw (new AnkushException("Could not create log directory for " + this.component + " on server."));
        }

        final Semaphore semaphore = new Semaphore(nodes.size());

        try {
            for (final String host : nodes) {
                semaphore.acquire();
                AppStoreWrapper.getExecutor().execute(new Runnable() {
                    @Override
                    public void run() {
                        NodeConfig nodeConfig = clusterConfig.getNodes().get(host);

                        SSHExec connection = SSHUtils.connectToNode(host, clusterConfig.getAuthConf());
                        if (connection == null) {
                            // TODO: handle Error
                            LOG.error("Could not fetch log files - Connection not initialized", component,
                                    host);
                        }
                        Serviceable serviceableObj = null;
                        try {
                            serviceableObj = ObjectFactory.getServiceObject(component);

                            for (String role : roles) {
                                if (nodeConfig.getRoles().get(component).contains(role)) {

                                    String tmpLogsDirOnServer = cmpLogsDirPathOnServer + "/" + role + "/" + host
                                            + "/";
                                    if (!FileUtils.ensureFolder(tmpLogsDirOnServer)) {
                                        // TODO: handle Error
                                        // Log error in operation table and
                                        // skip
                                        // this role
                                        continue;
                                    }

                                    String nodeLogsDirPath = FileUtils.getSeparatorTerminatedPathEntry(
                                            serviceableObj.getLogDirPath(clusterConfig, host, role));
                                    String logFilesRegex = serviceableObj.getLogFilesRegex(clusterConfig, host,
                                            role, null);
                                    String outputTarArchiveName = role + "_" + +System.currentTimeMillis()
                                            + ".tar.gz";
                                    try {
                                        List<String> logsFilesList = AnkushUtils.listFilesInDir(connection,
                                                host, nodeLogsDirPath, logFilesRegex);

                                        AnkushTask ankushTask = new CreateTarArchive(nodeLogsDirPath,
                                                nodeLogsDirPath + outputTarArchiveName, logsFilesList);
                                        if (connection.exec(ankushTask).rc != 0) {
                                            // TODO: handle Error
                                            // Log error in operation table
                                            // and
                                            // skip this
                                            // role
                                            continue;
                                        }
                                        connection.downloadFile(nodeLogsDirPath + outputTarArchiveName,
                                                tmpLogsDirOnServer + outputTarArchiveName);
                                        ankushTask = new Remove(nodeLogsDirPath + outputTarArchiveName);
                                        connection.exec(ankushTask);
                                        System.out.println("tmpLogsDirOnServer + outputTarArchiveName : "
                                                + tmpLogsDirOnServer + outputTarArchiveName);
                                        ankushTask = new UnTarArchive(tmpLogsDirOnServer + outputTarArchiveName,
                                                tmpLogsDirOnServer);
                                        System.out.println(
                                                "ankushTask.getCommand() : " + ankushTask.getCommand());
                                        Runtime.getRuntime().exec(ankushTask.getCommand()).waitFor();
                                        ankushTask = new Remove(tmpLogsDirOnServer + outputTarArchiveName);
                                        Runtime.getRuntime().exec(ankushTask.getCommand()).waitFor();
                                    } catch (Exception e) {
                                        e.printStackTrace();
                                        // TODO: handle exception
                                        // Log error in operation table and
                                        // skip
                                        // this role
                                        continue;
                                    }
                                }
                            }
                        } catch (Exception e) {
                            // TODO: handle exception
                            return;
                        } finally {
                            if (semaphore != null) {
                                semaphore.release();
                            }
                            if (connection != null) {
                                connection.disconnect();
                            }
                        }
                    }
                });
            }
            semaphore.acquire(nodes.size());
        } catch (Exception e) {

        }

        ZipUtil.pack(new File(clusterResourcesLogsDir + clusterLogsDirName),
                new File(clusterResourcesLogsDir + clusterLogsArchiveName), true);

        org.apache.commons.io.FileUtils.deleteDirectory(new File(clusterResourcesLogsDir + clusterLogsDirName));

        // result.put(com.impetus.ankush2.constant.Constant.Keys.DOWNLOADPATH,
        // clusterResourcesLogsDir + clusterLogsArchiveName);
    } catch (Exception e) {
        // this.addAndLogError("Could not download logs for " + component +
        // ".");
        LOG.error(e.getMessage(), component, e);
    }
    return null;
}

From source file:com.impetus.ankush2.hadoop.monitor.HadoopComponentMonitor.java

/**
 * Editparams./*from   www.java 2  s .  c o m*/
 */
private void editparams() {

    this.hadoopConfig = HadoopUtils.getHadoopConfig(this.clusterConf);
    String errMsg = "Unable to process request to edit Hadoop configuration files.";

    if (!HadoopUtils.isManagedByAnkush(this.hadoopConfig)) {
        this.addAndLogError(errMsg + " " + Constant.Registration.ErrorMsg.NOT_MANAGED_MODE);
        return;
    }

    try {
        this.clusterConf.incrementOperation();
        boolean isAgentDown = AnkushUtils.isAnyAgentDown(this.hadoopConfig.getNodes().keySet());
        if (isAgentDown) {
            throw new AnkushException(
                    "Could not process edit parameters request: AnkushAgent is down on few nodes.");
        }

        final Map<String, Object> confParams = (Map<String, Object>) parameterMap.get("params");

        final String loggedUser = (String) parameterMap.get("loggedUser");

        AppStoreWrapper.getExecutor().execute(new Runnable() {
            @Override
            public void run() {
                final Semaphore semaphore = new Semaphore(hadoopConfig.getNodes().size());
                try {
                    // connect with all the component nodes
                    AnkushUtils.connectNodesString(clusterConf, hadoopConfig.getNodes().keySet());

                    for (final String host : hadoopConfig.getNodes().keySet()) {

                        semaphore.acquire();
                        AppStoreWrapper.getExecutor().execute(new Runnable() {
                            @Override
                            public void run() {
                                try {
                                    for (Entry entry : confParams.entrySet()) {

                                        // get fileName
                                        String fileName = (String) entry.getKey();
                                        // get config params list
                                        List<Map> params = (List<Map>) entry.getValue();

                                        for (Map param : params) {
                                            final Parameter parameter = JsonMapperUtil.objectFromMap(param,
                                                    Parameter.class);

                                            String status = parameter.getStatus();

                                            Result res = null;

                                            ConfigurationManager confManager = new ConfigurationManager();

                                            // get component
                                            // homepath
                                            String confDir = HadoopUtils.getHadoopConfDir(hadoopConfig);

                                            // get server.properties
                                            // file path
                                            String propertyFilePath = confDir + fileName;

                                            // if connection is
                                            // established.

                                            switch (Constant.ParameterActionType
                                                    .valueOf(status.toUpperCase())) {
                                            case ADD:
                                                if (addParam(clusterConf.getNodes().get(host),
                                                        Constant.Component.Name.HADOOP, parameter.getName(),
                                                        parameter.getValue(), propertyFilePath,
                                                        Constant.File_Extension.XML)) {
                                                    confManager.saveConfiguration(clusterConf.getClusterId(),
                                                            loggedUser, fileName, host, parameter.getName(),
                                                            parameter.getValue());
                                                }
                                                break;
                                            case EDIT:
                                                if (editParam(clusterConf.getNodes().get(host),
                                                        Constant.Component.Name.HADOOP, parameter.getName(),
                                                        parameter.getValue(), propertyFilePath,
                                                        Constant.File_Extension.XML)) {
                                                    confManager.saveConfiguration(clusterConf.getClusterId(),
                                                            loggedUser, fileName, host, parameter.getName(),
                                                            parameter.getValue());
                                                }
                                                break;
                                            case DELETE:
                                                if (deleteParam(clusterConf.getNodes().get(host),
                                                        Constant.Component.Name.HADOOP, parameter.getName(),
                                                        propertyFilePath, Constant.File_Extension.XML)) {
                                                    confManager.removeOldConfiguration(
                                                            clusterConf.getClusterId(), host, fileName,
                                                            parameter.getName());
                                                }
                                                break;
                                            }
                                        }
                                    }
                                } catch (Exception e) {
                                    // To be Handled : Exception for
                                    // Edit Parameter call
                                } finally {
                                    if (semaphore != null) {
                                        semaphore.release();
                                    }
                                }
                            }
                        });
                    }
                    semaphore.acquire(hadoopConfig.getNodes().size());
                    // disconnect with all the component nodes
                    AnkushUtils.disconnectCompNodes(clusterConf, hadoopConfig.getNodes().keySet());
                } catch (Exception e) {
                    // To be Handled : Exception for Edit Parameter call
                }
            }

        });
        result.put("message", "Parameters update request placed successfully.");
    } catch (AnkushException e) {
        this.addErrorAndLogException(e.getMessage(), e);
    } catch (Exception e) {
        this.addErrorAndLogException(errMsg, e);
    }
}

From source file:com.impetus.ankush2.framework.monitor.AbstractMonitor.java

public void downloadlogs() {
    final String component = (String) parameterMap.get(com.impetus.ankush2.constant.Constant.Keys.COMPONENT);
    if (component == null || component.isEmpty()) {
        this.addAndLogError("Invalid Log request: Please specify a component.");
        return;/*ww  w .  j a v a 2s.c  o  m*/
    }
    try {
        ArrayList<String> nodes = (ArrayList) parameterMap.get(Constant.JsonKeys.Logs.NODES);
        if (nodes == null || nodes.isEmpty()) {
            nodes = new ArrayList<String>(this.clusterConf.getComponents().get(component).getNodes().keySet());
        }

        ArrayList<String> roles = (ArrayList) parameterMap.get(Constant.JsonKeys.Logs.ROLES);

        Serviceable serviceableObj = ObjectFactory.getServiceObject(component);

        if (roles == null || roles.isEmpty()) {
            roles = new ArrayList<String>(serviceableObj.getServiceList(this.clusterConf));
        }

        String clusterResourcesLogsDir = AppStoreWrapper.getClusterResourcesPath() + "logs/";

        String clusterLogsDirName = "Logs_" + this.clusterConf.getName() + "_" + System.currentTimeMillis();

        String clusterLogsArchiveName = clusterLogsDirName + ".zip";

        final String cmpLogsDirPathOnServer = clusterResourcesLogsDir + clusterLogsDirName + "/" + component
                + "/";

        if (!FileUtils.ensureFolder(cmpLogsDirPathOnServer)) {
            this.addAndLogError("Could not create log directory for " + component + " on server.");
            return;
        }

        final Semaphore semaphore = new Semaphore(nodes.size());
        final ArrayList<String> rolesObj = new ArrayList<String>(roles);
        try {
            for (final String host : nodes) {
                semaphore.acquire();
                AppStoreWrapper.getExecutor().execute(new Runnable() {
                    @Override
                    public void run() {
                        NodeConfig nodeConfig = clusterConf.getNodes().get(host);

                        SSHExec connection = SSHUtils.connectToNode(host, clusterConf.getAuthConf());
                        if (connection == null) {
                            // TODO: handle Error
                            logger.error("Could not fetch log files - Connection not initialized", component,
                                    host);
                        }
                        Serviceable serviceableObj = null;
                        try {
                            serviceableObj = ObjectFactory.getServiceObject(component);

                            for (String role : rolesObj) {
                                if (nodeConfig.getRoles().get(component).contains(role)) {

                                    String tmpLogsDirOnServer = cmpLogsDirPathOnServer + "/" + role + "/" + host
                                            + "/";
                                    if (!FileUtils.ensureFolder(tmpLogsDirOnServer)) {
                                        // TODO: handle Error
                                        // Log error in operation table and
                                        // skip
                                        // this role
                                        continue;
                                    }

                                    String nodeLogsDirPath = FileUtils.getSeparatorTerminatedPathEntry(
                                            serviceableObj.getLogDirPath(clusterConf, host, role));
                                    String logFilesRegex = serviceableObj.getLogFilesRegex(clusterConf, host,
                                            role, null);
                                    String outputTarArchiveName = role + "_" + +System.currentTimeMillis()
                                            + ".tar.gz";
                                    try {
                                        List<String> logsFilesList = AnkushUtils.listFilesInDir(connection,
                                                host, nodeLogsDirPath, logFilesRegex);
                                        AnkushTask ankushTask = new CreateTarArchive(nodeLogsDirPath,
                                                nodeLogsDirPath + outputTarArchiveName, logsFilesList);
                                        if (connection.exec(ankushTask).rc != 0) {
                                            // TODO: handle Error
                                            // Log error in operation table
                                            // and
                                            // skip this
                                            // role
                                            continue;
                                        }
                                        connection.downloadFile(nodeLogsDirPath + outputTarArchiveName,
                                                tmpLogsDirOnServer + outputTarArchiveName);
                                        ankushTask = new Remove(nodeLogsDirPath + outputTarArchiveName);
                                        connection.exec(ankushTask);

                                        ankushTask = new UnTarArchive(tmpLogsDirOnServer + outputTarArchiveName,
                                                tmpLogsDirOnServer);

                                        Runtime.getRuntime().exec(ankushTask.getCommand()).waitFor();
                                        ankushTask = new Remove(tmpLogsDirOnServer + outputTarArchiveName);
                                        Runtime.getRuntime().exec(ankushTask.getCommand()).waitFor();
                                    } catch (Exception e) {
                                        e.printStackTrace();
                                        // TODO: handle exception
                                        // Log error in operation table and
                                        // skip
                                        // this role
                                        continue;
                                    }
                                }
                            }
                        } catch (Exception e) {
                            // TODO: handle exception
                            return;
                        } finally {
                            if (semaphore != null) {
                                semaphore.release();
                            }
                            if (connection != null) {
                                connection.disconnect();
                            }
                        }
                    }
                });
            }
            semaphore.acquire(nodes.size());
        } catch (Exception e) {

        }

        ZipUtil.pack(new File(clusterResourcesLogsDir + clusterLogsDirName),
                new File(clusterResourcesLogsDir + clusterLogsArchiveName), true);

        org.apache.commons.io.FileUtils.deleteDirectory(new File(clusterResourcesLogsDir + clusterLogsDirName));

        result.put(com.impetus.ankush2.constant.Constant.Keys.DOWNLOADPATH,
                clusterResourcesLogsDir + clusterLogsArchiveName);
    } catch (Exception e) {
        this.addAndLogError("Could not download logs for " + component + ".");
        logger.error(e.getMessage(), component, e);
    }
}