Example usage for java.util HashMap HashMap

List of usage examples for java.util HashMap HashMap

Introduction

In this page you can find the example usage for java.util HashMap HashMap.

Prototype

public HashMap() 

Source Link

Document

Constructs an empty HashMap with the default initial capacity (16) and the default load factor (0.75).

Usage

From source file:com.da.daum.DaumCafeOneLineParser.java

public static void main(String[] args) throws IOException {
    DaumCafeOneLineParser parser = new DaumCafeOneLineParser();
    String listBody = "(?)^*~.txt";
    listBody = listBody.replaceAll("\\*", "").replaceAll("\\*", "");
    System.out.println(listBody);
    // FileUtils.writeStringToFile(new File(file), listBody, "utf-8");
    // File file = new File("C:\\TEMP\\daum\\user\\Lak_view_.txt");
    File file = new File("C:\\TEMP\\daum\\user\\Lak_list_1.txt");
    listBody = FileUtils.readFileToString(file, "utf-8");
    Map pageMap = new HashMap();
    parser.setDaumListVoList(listBody, pageMap);
    // parser.setDaumView(listBody);

}

From source file:com.thed.zapi.cloud.sample.FetchExecuteUpdate.java

public static void main(String[] args) throws JSONException, URISyntaxException, ParseException, IOException {
    final String issueSearchUri = API_SEARCH_ISSUES.replace("{SERVER}", jiraBaseURL);

    /**/*ww w . j av  a 2  s.  c  o m*/
     * Get Test Issues by JQL
     * 
     */

    //Json object for JQL search
    JSONObject jqlJsonObj = new JSONObject();
    jqlJsonObj.put("jql", "project = SUP"); // Replace the value with Valid JQL
    jqlJsonObj.put("startAt", 0);
    jqlJsonObj.put("maxResults", 21); // maxResults to be returned by search
    jqlJsonObj.put("fieldsByKeys", false);

    String[] IssueIds = getIssuesByJQL(issueSearchUri, userName, password, jqlJsonObj);
    // System.out.println(ArrayUtils.toString(IssueIds));

    /**
     * Add tests to Cycle
     * 
     */

    final String addTestsUri = API_ADD_TESTS.replace("{SERVER}", zephyrBaseUrl) + cycleId;

    /** Create JSON object by providing input values */
    String[] IssueIds = new String[25];
    JSONObject addTestsObj = new JSONObject();
    addTestsObj.put("issues", IssueIds);
    addTestsObj.put("method", "1");
    addTestsObj.put("projectId", projectId);
    addTestsObj.put("versionId", versionId);

    StringEntity addTestsJSON = null;
    try {
        addTestsJSON = new StringEntity(addTestsObj.toString());
    } catch (UnsupportedEncodingException e1) {
        e1.printStackTrace();
    }
    addTestsToCycle(addTestsUri, client, accessKey, addTestsJSON);

    /**
     * Get Execution Id's by CycleId and Add them to Ad Hoc cycle of
     * UnScheduled Version
     * 
     */
    Map<String, String> executionIds = new HashMap<String, String>();
    final String getExecutionsUri = API_GET_EXECUTIONS.replace("{SERVER}", zephyrBaseUrl) + cycleId
            + "?projectId=" + projectId + "&versionId=" + versionId;

    executionIds = getExecutionsByCycleId(getExecutionsUri, client, accessKey);

    /**
     * Bulk Update Executions with Status by Execution Id
     * 
     */

    JSONObject statusObj = new JSONObject();
    statusObj.put("id", "1");

    JSONObject executeTestsObj = new JSONObject();
    executeTestsObj.put("status", statusObj);
    executeTestsObj.put("cycleId", cycleId);
    executeTestsObj.put("projectId", projectId);
    executeTestsObj.put("versionId", versionId);
    executeTestsObj.put("comment", "Executed by ZAPI Cloud");

    for (String key : executionIds.keySet()) {
        final String updateExecutionUri = API_UPDATE_EXECUTION.replace("{SERVER}", zephyrBaseUrl) + key;
        // System.out.println(updateExecutionUri);
        // System.out.println(executionIds.get(key));
        executeTestsObj.put("issueId", executionIds.get(key));
        // System.out.println(executeTestsObj.toString());
        StringEntity executeTestsJSON = null;
        try {
            executeTestsJSON = new StringEntity(executeTestsObj.toString());
        } catch (UnsupportedEncodingException e1) {
            e1.printStackTrace();
        }
        updateExecutions(updateExecutionUri, client, accessKey, executeTestsJSON);
    }

}

From source file:ReadTemp.java

/**
 * Method main/*  w w  w  .jav  a 2  s.c  om*/
 *
 *
 * @param args
 *
 * @throws OneWireException
 * @throws OneWireIOException
 *
 */
public static void main(String[] args) throws OneWireIOException, OneWireException {
    boolean usedefault = false;
    DSPortAdapter access = null;
    String adapter_name = null;
    String port_name = null;

    variableNames = new HashMap();

    if ((args == null) || (args.length < 1)) {
        try {
            access = OneWireAccessProvider.getDefaultAdapter();

            if (access == null)
                throw new Exception();
        } catch (Exception e) {
            System.out.println("Couldn't get default adapter!");
            printUsageString();

            return;
        }

        usedefault = true;
    }

    if (!usedefault) {
        StringTokenizer st = new StringTokenizer(args[0], "_");

        if (st.countTokens() != 2) {
            printUsageString();

            return;
        }

        adapter_name = st.nextToken();
        port_name = st.nextToken();

        System.out.println("Adapter Name: " + adapter_name);
        System.out.println("Port Name: " + port_name);
    }

    if (access == null) {
        try {
            access = OneWireAccessProvider.getAdapter(adapter_name, port_name);
        } catch (Exception e) {
            System.out.println("That is not a valid adapter/port combination.");

            Enumeration en = OneWireAccessProvider.enumerateAllAdapters();

            while (en.hasMoreElements()) {
                DSPortAdapter temp = (DSPortAdapter) en.nextElement();

                System.out.println("Adapter: " + temp.getAdapterName());

                Enumeration f = temp.getPortNames();

                while (f.hasMoreElements()) {
                    System.out.println("   Port name : " + ((String) f.nextElement()));
                }
            }

            return;
        }
    }

    boolean scan = false;

    if (args.length > 1) {
        if (args[1].compareTo("--scan") == 0)
            scan = true;
    } else {
        populateVariableNames();
    }

    while (true) {
        access.adapterDetected();
        access.targetAllFamilies();
        access.beginExclusive(true);
        access.reset();
        access.setSearchAllDevices();

        boolean next = access.findFirstDevice();

        if (!next) {
            System.out.println("Could not find any iButtons!");

            return;
        }

        while (next) {
            OneWireContainer owc = access.getDeviceContainer();

            boolean isTempContainer = false;
            TemperatureContainer tc = null;

            try {
                tc = (TemperatureContainer) owc;
                isTempContainer = true;
            } catch (Exception e) {
                tc = null;
                isTempContainer = false; //just to reiterate
            }

            if (isTempContainer) {
                String id = owc.getAddressAsString();
                if (scan) {
                    System.out.println("= Temperature Sensor Found: " + id);
                } else {

                    double high = 0.0;
                    double low = 0.0;
                    byte[] state = tc.readDevice();

                    boolean selectable = tc.hasSelectableTemperatureResolution();

                    if (selectable)
                        try {
                            tc.setTemperatureResolution(0.0625, state);
                        } catch (Exception e) {
                            System.out.println("= Could not set resolution for " + id + ": " + e.toString());
                        }

                    try {
                        tc.writeDevice(state);
                    } catch (Exception e) {
                        System.out.println("= Could not write device state, all changes lost.");
                        System.out.println("= Exception occurred for " + id + ": " + e.toString());
                    }

                    boolean conversion = false;
                    try {
                        tc.doTemperatureConvert(state);
                        conversion = true;
                    } catch (Exception e) {
                        System.out.println("= Could not complete temperature conversion...");
                        System.out.println("= Exception occurred for " + id + ": " + e.toString());
                    }

                    if (conversion) {
                        state = tc.readDevice();

                        double temp = tc.getTemperature(state);
                        if (temp < 84) {
                            double temp_f = (9.0 / 5.0) * temp + 32;
                            setVariable(id, Double.toString(temp_f));
                        }
                        System.out.println("= Reported temperature from " + (String) variableNames.get(id) + "("
                                + id + "):" + temp);
                    }
                }
            }
            next = access.findNextDevice();
        }
        if (!scan) {
            try {
                Thread.sleep(60 * 5 * 1000);
            } catch (Exception e) {
            }
        } else {
            System.exit(0);
        }
    }
}

From source file:DIA_Umpire_Quant.DIA_Umpire_Quant.java

/**
 * @param args the command line arguments
 *//*  w  ww .  j a v a 2s  .  com*/
public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    System.out.println(
            "=================================================================================================");
    System.out.println("DIA-Umpire quantitation with targeted re-extraction analysis (version: "
            + UmpireInfo.GetInstance().Version + ")");
    if (args.length != 1) {
        System.out.println(
                "command format error, it should be like: java -jar -Xmx10G DIA_Umpire_Quant.jar diaumpire_quant.params");
        return;
    }
    try {
        ConsoleLogger.SetConsoleLogger(Level.INFO);
        ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_quant.log");
    } catch (Exception e) {
    }

    try {

        Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version);
        Logger.getRootLogger().info("Parameter file:" + args[0]);

        BufferedReader reader = new BufferedReader(new FileReader(args[0]));
        String line = "";
        String WorkFolder = "";
        int NoCPUs = 2;

        String UserMod = "";
        String Combined_Prot = "";
        String InternalLibID = "";
        String ExternalLibPath = "";
        String ExternalLibDecoyTag = "DECOY";
        boolean DefaultProtFiltering = true;
        boolean DataSetLevelPepFDR = false;
        float ProbThreshold = 0.99f;
        float ExtProbThreshold = 0.99f;
        float Freq = 0f;
        int TopNPep = 6;
        int TopNFrag = 6;
        float MinFragMz = 200f;
        String FilterWeight = "GW";
        float MinWeight = 0.9f;
        float RTWindow_Int = -1f;
        float RTWindow_Ext = -1f;

        TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600);
        HashMap<String, File> AssignFiles = new HashMap<>();
        boolean InternalLibSearch = false;
        boolean ExternalLibSearch = false;

        boolean ExportSaint = false;
        boolean SAINT_MS1 = false;
        boolean SAINT_MS2 = true;

        HashMap<String, String[]> BaitList = new HashMap<>();
        HashMap<String, String> BaitName = new HashMap<>();
        HashMap<String, String[]> ControlList = new HashMap<>();
        HashMap<String, String> ControlName = new HashMap<>();

        //<editor-fold defaultstate="collapsed" desc="Reading parameter file">
        while ((line = reader.readLine()) != null) {
            line = line.trim();
            Logger.getRootLogger().info(line);
            if (!"".equals(line) && !line.startsWith("#")) {
                //System.out.println(line);
                if (line.equals("==File list begin")) {
                    do {
                        line = reader.readLine();
                        line = line.trim();
                        if (line.equals("==File list end")) {
                            continue;
                        } else if (!"".equals(line)) {
                            File newfile = new File(line);
                            if (newfile.exists()) {
                                AssignFiles.put(newfile.getAbsolutePath(), newfile);
                            } else {
                                Logger.getRootLogger().info("File: " + newfile + " does not exist.");
                            }
                        }
                    } while (!line.equals("==File list end"));
                }
                if (line.split("=").length < 2) {
                    continue;
                }
                String type = line.split("=")[0].trim();
                String value = line.split("=")[1].trim();
                switch (type) {
                case "TargetedExtraction": {
                    InternalLibSearch = Boolean.parseBoolean(value);
                    break;
                }
                case "InternalLibSearch": {
                    InternalLibSearch = Boolean.parseBoolean(value);
                    break;
                }
                case "ExternalLibSearch": {
                    ExternalLibSearch = Boolean.parseBoolean(value);
                    break;
                }

                case "Path": {
                    WorkFolder = value;
                    break;
                }
                case "path": {
                    WorkFolder = value;
                    break;
                }
                case "Thread": {
                    NoCPUs = Integer.parseInt(value);
                    break;
                }
                case "Fasta": {
                    tandemPara.FastaPath = value;
                    break;
                }
                case "Combined_Prot": {
                    Combined_Prot = value;
                    break;
                }
                case "DefaultProtFiltering": {
                    DefaultProtFiltering = Boolean.parseBoolean(value);
                    break;
                }
                case "DecoyPrefix": {
                    if (!"".equals(value)) {
                        tandemPara.DecoyPrefix = value;
                    }
                    break;
                }
                case "UserMod": {
                    UserMod = value;
                    break;
                }
                case "ProteinFDR": {
                    tandemPara.ProtFDR = Float.parseFloat(value);
                    break;
                }
                case "PeptideFDR": {
                    tandemPara.PepFDR = Float.parseFloat(value);
                    break;
                }
                case "DataSetLevelPepFDR": {
                    DataSetLevelPepFDR = Boolean.parseBoolean(value);
                    break;
                }
                case "InternalLibID": {
                    InternalLibID = value;
                    break;
                }
                case "ExternalLibPath": {
                    ExternalLibPath = value;
                    break;
                }
                case "ExtProbThreshold": {
                    ExtProbThreshold = Float.parseFloat(value);
                    break;
                }
                case "RTWindow_Int": {
                    RTWindow_Int = Float.parseFloat(value);
                    break;
                }
                case "RTWindow_Ext": {
                    RTWindow_Ext = Float.parseFloat(value);
                    break;
                }
                case "ExternalLibDecoyTag": {
                    ExternalLibDecoyTag = value;
                    if (ExternalLibDecoyTag.endsWith("_")) {
                        ExternalLibDecoyTag = ExternalLibDecoyTag.substring(0,
                                ExternalLibDecoyTag.length() - 1);
                    }
                    break;
                }
                case "ProbThreshold": {
                    ProbThreshold = Float.parseFloat(value);
                    break;
                }
                case "ReSearchProb": {
                    //ReSearchProb = Float.parseFloat(value);
                    break;
                }
                case "FilterWeight": {
                    FilterWeight = value;
                    break;
                }
                case "MinWeight": {
                    MinWeight = Float.parseFloat(value);
                    break;
                }
                case "TopNFrag": {
                    TopNFrag = Integer.parseInt(value);
                    break;
                }
                case "TopNPep": {
                    TopNPep = Integer.parseInt(value);
                    break;
                }
                case "Freq": {
                    Freq = Float.parseFloat(value);
                    break;
                }
                case "MinFragMz": {
                    MinFragMz = Float.parseFloat(value);
                    break;
                }

                //<editor-fold defaultstate="collapsed" desc="SaintOutput">
                case "ExportSaintInput": {
                    ExportSaint = Boolean.parseBoolean(value);
                    break;
                }
                case "QuantitationType": {
                    switch (value) {
                    case "MS1": {
                        SAINT_MS1 = true;
                        SAINT_MS2 = false;
                        break;
                    }
                    case "MS2": {
                        SAINT_MS1 = false;
                        SAINT_MS2 = true;
                        break;
                    }
                    case "BOTH": {
                        SAINT_MS1 = true;
                        SAINT_MS2 = true;
                        break;
                    }
                    }
                    break;
                }
                //                    case "BaitInputFile": {
                //                        SaintBaitFile = value;
                //                        break;
                //                    }
                //                    case "PreyInputFile": {
                //                        SaintPreyFile = value;
                //                        break;
                //                    }
                //                    case "InterationInputFile": {
                //                        SaintInteractionFile = value;
                //                        break;
                //                    }
                default: {
                    if (type.startsWith("BaitName_")) {
                        BaitName.put(type.substring(9), value);
                    }
                    if (type.startsWith("BaitFile_")) {
                        BaitList.put(type.substring(9), value.split("\t"));
                    }
                    if (type.startsWith("ControlName_")) {
                        ControlName.put(type.substring(12), value);
                    }
                    if (type.startsWith("ControlFile_")) {
                        ControlList.put(type.substring(12), value.split("\t"));
                    }
                    break;
                }
                //</editor-fold>                    
                }
            }
        }
        //</editor-fold>

        //Initialize PTM manager using compomics library
        PTMManager.GetInstance();
        if (!UserMod.equals("")) {
            PTMManager.GetInstance().ImportUserMod(UserMod);
        }

        //Check if the fasta file can be found
        if (!new File(tandemPara.FastaPath).exists()) {
            Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath
                    + " cannot be found, the process will be terminated, please check.");
            System.exit(1);
        }

        //Check if the prot.xml file can be found
        if (!new File(Combined_Prot).exists()) {
            Logger.getRootLogger().info("ProtXML file: " + Combined_Prot
                    + " cannot be found, the export protein summary table will be empty.");
        }

        LCMSID protID = null;

        //Parse prot.xml and generate protein master list given an FDR 
        if (Combined_Prot != null && !Combined_Prot.equals("")) {
            protID = LCMSID.ReadLCMSIDSerialization(Combined_Prot);
            if (!"".equals(Combined_Prot) && protID == null) {
                protID = new LCMSID(Combined_Prot, tandemPara.DecoyPrefix, tandemPara.FastaPath);
                ProtXMLParser protxmlparser = new ProtXMLParser(protID, Combined_Prot, 0f);
                //Use DIA-Umpire default protein FDR calculation
                if (DefaultProtFiltering) {
                    protID.RemoveLowLocalPWProtein(0.8f);
                    protID.RemoveLowMaxIniProbProtein(0.9f);
                    protID.FilterByProteinDecoyFDRUsingMaxIniProb(tandemPara.DecoyPrefix, tandemPara.ProtFDR);
                } //Get protein FDR calculation without other filtering
                else {
                    protID.FilterByProteinDecoyFDRUsingLocalPW(tandemPara.DecoyPrefix, tandemPara.ProtFDR);
                }
                protID.LoadSequence();
                protID.WriteLCMSIDSerialization(Combined_Prot);
            }
            Logger.getRootLogger().info("Protein No.:" + protID.ProteinList.size());
        }
        HashMap<String, HashMap<String, FragmentPeak>> IDSummaryFragments = new HashMap<>();

        //Generate DIA file list
        ArrayList<DIAPack> FileList = new ArrayList<>();

        File folder = new File(WorkFolder);
        if (!folder.exists()) {
            Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found.");
            System.exit(1);
        }
        for (final File fileEntry : folder.listFiles()) {
            if (fileEntry.isFile()
                    && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                            | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml"))
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry);
            }
            if (fileEntry.isDirectory()) {
                for (final File fileEntry2 : fileEntry.listFiles()) {
                    if (fileEntry2.isFile()
                            && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                                    | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml"))
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                        AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2);
                    }
                }
            }
        }

        Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size());
        for (File fileEntry : AssignFiles.values()) {
            Logger.getRootLogger().info(fileEntry.getAbsolutePath());
            String mzXMLFile = fileEntry.getAbsolutePath();
            if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) {
                DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs);
                FileList.add(DiaFile);
                HashMap<String, FragmentPeak> FragMap = new HashMap<>();
                IDSummaryFragments.put(FilenameUtils.getBaseName(mzXMLFile), FragMap);
                Logger.getRootLogger().info(
                        "=================================================================================================");
                Logger.getRootLogger().info("Processing " + mzXMLFile);
                if (!DiaFile.LoadDIASetting()) {
                    Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete");
                    System.exit(1);
                }
                if (!DiaFile.LoadParams()) {
                    Logger.getRootLogger().info("Loading parameters failed, job is incomplete");
                    System.exit(1);
                }
            }
        }

        LCMSID combinePepID = null;
        if (DataSetLevelPepFDR) {
            combinePepID = LCMSID.ReadLCMSIDSerialization(WorkFolder + "combinePepID.SerFS");
            if (combinePepID == null) {
                FDR_DataSetLevel fdr = new FDR_DataSetLevel();
                fdr.GeneratePepIonList(FileList, tandemPara, WorkFolder + "combinePepID.SerFS");
                combinePepID = fdr.combineID;
                combinePepID.WriteLCMSIDSerialization(WorkFolder + "combinePepID.SerFS");
            }
        }

        //process each DIA file for quantification based on untargeted identifications
        for (DIAPack DiaFile : FileList) {
            long time = System.currentTimeMillis();
            Logger.getRootLogger().info("Loading identification results " + DiaFile.Filename + "....");

            //If the LCMSID serialization is found
            if (!DiaFile.ReadSerializedLCMSID()) {
                DiaFile.ParsePepXML(tandemPara, combinePepID);
                DiaFile.BuildStructure();
                if (!DiaFile.MS1FeatureMap.ReadPeakCluster()) {
                    Logger.getRootLogger().info("Loading peak and structure failed, job is incomplete");
                    System.exit(1);
                }
                DiaFile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster();
                //Generate mapping between index of precursor feature and pseudo MS/MS scan index 
                DiaFile.GenerateClusterScanNomapping();
                //Doing quantification
                DiaFile.AssignQuant();
                DiaFile.ClearStructure();
            }
            DiaFile.IDsummary.ReduceMemoryUsage();
            time = System.currentTimeMillis() - time;
            Logger.getRootLogger().info(DiaFile.Filename + " processed time:"
                    + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time),
                            TimeUnit.MILLISECONDS.toMinutes(time)
                                    - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)),
                            TimeUnit.MILLISECONDS.toSeconds(time)
                                    - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time))));
        }

        //<editor-fold defaultstate="collapsed" desc="Targete re-extraction using internal library">            
        Logger.getRootLogger().info(
                "=================================================================================================");
        if (InternalLibSearch && FileList.size() > 1) {
            Logger.getRootLogger().info("Module C: Targeted extraction using internal library");

            FragmentLibManager libManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder,
                    InternalLibID);
            if (libManager == null) {
                Logger.getRootLogger().info("Building internal spectral library");
                libManager = new FragmentLibManager(InternalLibID);
                ArrayList<LCMSID> LCMSIDList = new ArrayList<>();
                for (DIAPack dia : FileList) {
                    LCMSIDList.add(dia.IDsummary);
                }
                libManager.ImportFragLibTopFrag(LCMSIDList, Freq, TopNFrag);
                libManager.WriteFragmentLibSerialization(WorkFolder);
            }
            libManager.ReduceMemoryUsage();

            Logger.getRootLogger()
                    .info("Building retention time prediction model and generate candidate peptide list");
            for (int i = 0; i < FileList.size(); i++) {
                FileList.get(i).IDsummary.ClearMappedPep();
            }
            for (int i = 0; i < FileList.size(); i++) {
                for (int j = i + 1; j < FileList.size(); j++) {
                    RTAlignedPepIonMapping alignment = new RTAlignedPepIonMapping(WorkFolder,
                            FileList.get(i).GetParameter(), FileList.get(i).IDsummary,
                            FileList.get(j).IDsummary);
                    alignment.GenerateModel();
                    alignment.GenerateMappedPepIon();
                }
                FileList.get(i).ExportID();
                FileList.get(i).IDsummary = null;
            }

            Logger.getRootLogger().info("Targeted matching........");
            for (DIAPack diafile : FileList) {
                if (diafile.IDsummary == null) {
                    diafile.ReadSerializedLCMSID();
                }
                if (!diafile.IDsummary.GetMappedPepIonList().isEmpty()) {
                    diafile.UseMappedIon = true;
                    diafile.FilterMappedIonByProb = false;
                    diafile.BuildStructure();
                    diafile.MS1FeatureMap.ReadPeakCluster();
                    diafile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster();
                    diafile.GenerateMassCalibrationRTMap();
                    diafile.TargetedExtractionQuant(false, libManager, 1.1f, RTWindow_Int);
                    diafile.MS1FeatureMap.ClearAllPeaks();
                    diafile.IDsummary.ReduceMemoryUsage();
                    diafile.IDsummary.RemoveLowProbMappedIon(ProbThreshold);
                    diafile.ExportID();
                    Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size()
                            + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size());
                    diafile.ClearStructure();
                }
                diafile.IDsummary = null;
                System.gc();
            }
            Logger.getRootLogger().info(
                    "=================================================================================================");
        }
        //</editor-fold>

        //<editor-fold defaultstate="collapsed" desc="Targeted re-extraction using external library">
        //External library search
        if (ExternalLibSearch) {
            Logger.getRootLogger().info("Module C: Targeted extraction using external library");

            //Read exteranl library
            FragmentLibManager ExlibManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder,
                    FilenameUtils.getBaseName(ExternalLibPath));
            if (ExlibManager == null) {
                ExlibManager = new FragmentLibManager(FilenameUtils.getBaseName(ExternalLibPath));

                //Import traML file
                ExlibManager.ImportFragLibByTraML(ExternalLibPath, ExternalLibDecoyTag);
                //Check if there are decoy spectra
                ExlibManager.CheckDecoys();
                //ExlibManager.ImportFragLibBySPTXT(ExternalLibPath);
                ExlibManager.WriteFragmentLibSerialization(WorkFolder);
            }
            Logger.getRootLogger()
                    .info("No. of peptide ions in external lib:" + ExlibManager.PeptideFragmentLib.size());
            for (DIAPack diafile : FileList) {
                if (diafile.IDsummary == null) {
                    diafile.ReadSerializedLCMSID();
                }
                //Generate RT mapping
                RTMappingExtLib RTmap = new RTMappingExtLib(diafile.IDsummary, ExlibManager,
                        diafile.GetParameter());
                RTmap.GenerateModel();
                RTmap.GenerateMappedPepIon();

                diafile.BuildStructure();
                diafile.MS1FeatureMap.ReadPeakCluster();
                diafile.GenerateMassCalibrationRTMap();
                //Perform targeted re-extraction
                diafile.TargetedExtractionQuant(false, ExlibManager, ProbThreshold, RTWindow_Ext);
                diafile.MS1FeatureMap.ClearAllPeaks();
                diafile.IDsummary.ReduceMemoryUsage();
                //Remove target IDs below the defined probability threshold
                diafile.IDsummary.RemoveLowProbMappedIon(ExtProbThreshold);
                diafile.ExportID();
                diafile.ClearStructure();
                Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size()
                        + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size());
            }
        }
        //</editor-fold>

        //<editor-fold defaultstate="collapsed" desc="Peptide and fragment selection">
        Logger.getRootLogger().info("Peptide and fragment selection across the whole dataset");
        ArrayList<LCMSID> SummaryList = new ArrayList<>();
        for (DIAPack diafile : FileList) {
            if (diafile.IDsummary == null) {
                diafile.ReadSerializedLCMSID();
                diafile.IDsummary.ClearAssignPeakCluster();
                //diafile.IDsummary.ClearPSMs();                    
            }
            if (protID != null) {
                //Generate protein list according to mapping of peptide ions for each DIA file to the master protein list
                diafile.IDsummary.GenerateProteinByRefIDByPepSeq(protID, true);
                diafile.IDsummary.ReMapProPep();
            }
            if ("GW".equals(FilterWeight)) {
                diafile.IDsummary.SetFilterByGroupWeight();
            } else if ("PepW".equals(FilterWeight)) {
                diafile.IDsummary.SetFilterByWeight();
            }
            SummaryList.add(diafile.IDsummary);
        }
        FragmentSelection fragselection = new FragmentSelection(SummaryList);
        fragselection.freqPercent = Freq;
        fragselection.MinFragMZ = MinFragMz;
        fragselection.GeneratePepFragScoreMap();
        fragselection.GenerateTopFragMap(TopNFrag);
        fragselection.GenerateProtPepScoreMap(MinWeight);
        fragselection.GenerateTopPepMap(TopNPep);
        //</editor-fold>

        //<editor-fold defaultstate="collapsed" desc="Writing general reports">                 
        ExportTable export = new ExportTable(WorkFolder, SummaryList, IDSummaryFragments, protID,
                fragselection);
        export.Export(TopNPep, TopNFrag, Freq);
        //</editor-fold>

        //<editor-fold defaultstate="collapsed" desc="//<editor-fold defaultstate="collapsed" desc="Generate SAINT input files">
        if (ExportSaint && protID != null) {
            HashMap<String, DIAPack> Filemap = new HashMap<>();
            for (DIAPack DIAfile : FileList) {
                Filemap.put(DIAfile.GetBaseName(), DIAfile);
            }

            FileWriter baitfile = new FileWriter(WorkFolder + "SAINT_Bait_" + DateTimeTag.GetTag() + ".txt");
            FileWriter preyfile = new FileWriter(WorkFolder + "SAINT_Prey_" + DateTimeTag.GetTag() + ".txt");
            FileWriter interactionfileMS1 = null;
            FileWriter interactionfileMS2 = null;
            if (SAINT_MS1) {
                interactionfileMS1 = new FileWriter(
                        WorkFolder + "SAINT_Interaction_MS1_" + DateTimeTag.GetTag() + ".txt");
            }
            if (SAINT_MS2) {
                interactionfileMS2 = new FileWriter(
                        WorkFolder + "SAINT_Interaction_MS2_" + DateTimeTag.GetTag() + ".txt");
            }
            HashMap<String, String> PreyID = new HashMap<>();

            for (String samplekey : ControlName.keySet()) {
                String name = ControlName.get(samplekey);
                for (String file : ControlList.get(samplekey)) {
                    baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "C\n");
                    LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary;
                    if (SAINT_MS1) {
                        SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID,
                                1);
                    }
                    if (SAINT_MS2) {
                        SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID,
                                2);
                    }
                }
            }
            for (String samplekey : BaitName.keySet()) {
                String name = BaitName.get(samplekey);
                for (String file : BaitList.get(samplekey)) {
                    baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "T\n");
                    LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary;
                    if (SAINT_MS1) {
                        SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID,
                                1);
                    }
                    if (SAINT_MS2) {
                        SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID,
                                2);
                    }
                }
            }
            baitfile.close();
            if (SAINT_MS1) {
                interactionfileMS1.close();
            }
            if (SAINT_MS2) {
                interactionfileMS2.close();
            }
            for (String AccNo : PreyID.keySet()) {
                preyfile.write(AccNo + "\t" + PreyID.get(AccNo) + "\n");
            }
            preyfile.close();
        }

        //</editor-fold>

        Logger.getRootLogger().info("Job done");
        Logger.getRootLogger().info(
                "=================================================================================================");

    } catch (Exception e) {
        Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e));
        throw e;
    }
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step5GoldLabelEstimator.java

@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    String inputDir = args[0];/*from   w  w w.  j  av a 2  s.  c  o  m*/
    File outputDir = new File(args[1]);

    if (!outputDir.exists()) {
        outputDir.mkdirs();
    }

    // we will process only a subset first
    List<AnnotatedArgumentPair> allArgumentPairs = new ArrayList<>();

    Collection<File> files = IOHelper.listXmlFiles(new File(inputDir));

    for (File file : files) {
        allArgumentPairs.addAll((List<AnnotatedArgumentPair>) XStreamTools.getXStream().fromXML(file));
    }

    // collect turkers and csv
    List<String> turkerIDs = extractAndSortTurkerIDs(allArgumentPairs);
    String preparedCSV = prepareCSV(allArgumentPairs, turkerIDs);

    // save CSV and run MACE
    Path tmpDir = Files.createTempDirectory("mace");
    File maceInputFile = new File(tmpDir.toFile(), "input.csv");
    FileUtils.writeStringToFile(maceInputFile, preparedCSV, "utf-8");

    File outputPredictions = new File(tmpDir.toFile(), "predictions.txt");
    File outputCompetence = new File(tmpDir.toFile(), "competence.txt");

    // run MACE
    MACE.main(new String[] { "--iterations", "500", "--threshold", String.valueOf(MACE_THRESHOLD), "--restarts",
            "50", "--outputPredictions", outputPredictions.getAbsolutePath(), "--outputCompetence",
            outputCompetence.getAbsolutePath(), maceInputFile.getAbsolutePath() });

    // read back the predictions and competence
    List<String> predictions = FileUtils.readLines(outputPredictions, "utf-8");

    // check the output
    if (predictions.size() != allArgumentPairs.size()) {
        throw new IllegalStateException("Wrong size of the predicted file; expected " + allArgumentPairs.size()
                + " lines but was " + predictions.size());
    }

    String competenceRaw = FileUtils.readFileToString(outputCompetence, "utf-8");
    String[] competence = competenceRaw.split("\t");
    if (competence.length != turkerIDs.size()) {
        throw new IllegalStateException(
                "Expected " + turkerIDs.size() + " competence number, got " + competence.length);
    }

    // rank turkers by competence
    Map<String, Double> turkerIDCompetenceMap = new TreeMap<>();
    for (int i = 0; i < turkerIDs.size(); i++) {
        turkerIDCompetenceMap.put(turkerIDs.get(i), Double.valueOf(competence[i]));
    }

    // sort by value descending
    Map<String, Double> sortedCompetences = IOHelper.sortByValue(turkerIDCompetenceMap, false);
    System.out.println("Sorted turker competences: " + sortedCompetences);

    // assign the gold label and competence

    for (int i = 0; i < allArgumentPairs.size(); i++) {
        AnnotatedArgumentPair annotatedArgumentPair = allArgumentPairs.get(i);
        String goldLabel = predictions.get(i).trim();

        // might be empty
        if (!goldLabel.isEmpty()) {
            // so far the gold label has format aXXX_aYYY_a1, aXXX_aYYY_a2, or aXXX_aYYY_equal
            // strip now only the gold label
            annotatedArgumentPair.setGoldLabel(goldLabel);
        }

        // update turker competence
        for (AnnotatedArgumentPair.MTurkAssignment assignment : annotatedArgumentPair.mTurkAssignments) {
            String turkID = assignment.getTurkID();

            int turkRank = getTurkerRank(turkID, sortedCompetences);
            assignment.setTurkRank(turkRank);

            double turkCompetence = turkerIDCompetenceMap.get(turkID);
            assignment.setTurkCompetence(turkCompetence);
        }
    }

    // now sort the data back according to their original file name
    Map<String, List<AnnotatedArgumentPair>> fileNameAnnotatedPairsMap = new HashMap<>();
    for (AnnotatedArgumentPair argumentPair : allArgumentPairs) {
        String fileName = IOHelper.createFileName(argumentPair.getDebateMetaData(),
                argumentPair.getArg1().getStance());

        if (!fileNameAnnotatedPairsMap.containsKey(fileName)) {
            fileNameAnnotatedPairsMap.put(fileName, new ArrayList<AnnotatedArgumentPair>());
        }

        fileNameAnnotatedPairsMap.get(fileName).add(argumentPair);
    }

    // and save them to the output file
    for (Map.Entry<String, List<AnnotatedArgumentPair>> entry : fileNameAnnotatedPairsMap.entrySet()) {
        String fileName = entry.getKey();
        List<AnnotatedArgumentPair> argumentPairs = entry.getValue();

        File outputFile = new File(outputDir, fileName);

        // and save all sampled pairs into a XML file
        XStreamTools.toXML(argumentPairs, outputFile);

        System.out.println("Saved " + argumentPairs.size() + " pairs to " + outputFile);
    }

}

From source file:com.yahoo.athenz.example.instance.InstanceClientRegister.java

public static void main(String[] args) throws MalformedURLException, IOException {

    // parse our command line to retrieve required input

    CommandLine cmd = parseCommandLine(args);

    String domainName = cmd.getOptionValue("domain").toLowerCase();
    String serviceName = cmd.getOptionValue("service").toLowerCase();
    String provider = cmd.getOptionValue("provider").toLowerCase();
    String instance = cmd.getOptionValue("instance");
    String dnsSuffix = cmd.getOptionValue("dnssuffix");
    String providerKeyPath = cmd.getOptionValue("providerkey");
    String providerKeyId = cmd.getOptionValue("providerkeyid");
    String instanceKeyPath = cmd.getOptionValue("instancekey");
    String ztsUrl = cmd.getOptionValue("ztsurl");

    // get our configured private key

    PrivateKey providerKey = Crypto.loadPrivateKey(new File(providerKeyPath));

    // first we are going to generate our attestation data
    // which we are going to use jwt. ZTS Server will send
    // this object to the specified provider for validation

    String compactJws = Jwts.builder().setSubject(domainName + "." + serviceName).setIssuer(provider)
            .setAudience("zts").setId(instance)
            .setExpiration(//from  w  w  w. ja v  a2 s .  c o m
                    new Date(System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)))
            .setHeaderParam("keyId", providerKeyId).signWith(SignatureAlgorithm.RS256, providerKey).compact();

    System.out.println("JWS: \n" + compactJws + "\n");

    // now we need to generate our CSR so we can get
    // a TLS certificate for our instance

    PrivateKey instanceKey = Crypto.loadPrivateKey(new File(instanceKeyPath));
    String csr = generateCSR(domainName, serviceName, instance, dnsSuffix, instanceKey);

    if (csr == null) {
        System.err.println("Unable to generate CSR for instance");
        System.exit(1);
    }
    System.out.println("CSR: \n" + csr + "\n");

    // now let's generate our instance register object that will be sent
    // to the ZTS Server

    InstanceRegisterInformation info = new InstanceRegisterInformation().setAttestationData(compactJws)
            .setDomain(domainName).setService(serviceName).setProvider(provider).setToken(true).setCsr(csr);

    // now contact zts server to request identity for instance

    InstanceIdentity identity = null;
    Map<String, List<String>> responseHeaders = new HashMap<>();
    try (ZTSClient ztsClient = new ZTSClient(ztsUrl)) {
        identity = ztsClient.postInstanceRegisterInformation(info, responseHeaders);
    } catch (ZTSClientException ex) {
        System.out.println("Unable to register instance: " + ex.getMessage());
        System.exit(2);
    }

    System.out.println("Identity TLS Certificate: \n" + identity.getX509Certificate());
    Map<String, String> attrs = identity.getAttributes();
    if (attrs != null) {
        System.out.println("Provider Attributes:");
        for (String key : attrs.keySet()) {
            System.out.println("\t" + key + ": " + attrs.get(key));
        }
    }
}

From source file:com.searchcode.app.App.java

public static void main(String[] args) {
    injector = Guice.createInjector(new InjectorConfig());
    int server_port = Helpers.tryParseInt(
            Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT),
            Values.DEFAULTSERVERPORT);/*from   w  ww.j  a  va  2  s  .com*/
    boolean onlyLocalhost = Boolean
            .parseBoolean(Properties.getProperties().getProperty("only_localhost", "false"));

    // Database migrations happen before we start
    databaseMigrations();

    LOGGER.info("Starting searchcode server on port " + server_port);
    Spark.port(server_port);

    JobService js = injector.getInstance(JobService.class);
    Repo repo = injector.getInstance(Repo.class);
    Data data = injector.getInstance(Data.class);
    Api api = injector.getInstance(Api.class);

    ApiService apiService = injector.getInstance(ApiService.class);
    StatsService statsService = new StatsService();

    scl = Singleton.getSearchcodeLib(data);
    js.initialJobs();

    Gson gson = new Gson();

    Spark.staticFileLocation("/public");

    before((request, response) -> {
        if (onlyLocalhost) {
            if (!request.ip().equals("127.0.0.1")) {
                halt(204);
            }
        }
    });

    get("/", (req, res) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q") && !req.queryParams("q").trim().equals("")) {
            String query = req.queryParams("q").trim();
            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            List<String> reposList = new ArrayList<>();
            List<String> langsList = new ArrayList<>();
            List<String> ownsList = new ArrayList<>();

            if (req.queryParams().contains("repo")) {
                String[] repos = new String[0];
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    reposList = Arrays.asList(repos);
                }
            }

            if (req.queryParams().contains("lan")) {
                String[] langs = new String[0];
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    langsList = Arrays.asList(langs);
                }
            }

            if (req.queryParams().contains("own")) {
                String[] owns = new String[0];
                owns = req.queryParamsValues("own");

                if (owns.length != 0) {
                    ownsList = Arrays.asList(owns);
                }
            }

            map.put("searchValue", query);
            map.put("searchResultJson",
                    gson.toJson(new CodePreload(query, page, langsList, reposList, ownsList)));

            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "search_test.ftl");
        }

        // Totally pointless vanity but lets rotate the image every week
        int photoId = getWeekOfMonth();

        if (photoId <= 0) {
            photoId = 3;
        }
        if (photoId > 4) {
            photoId = 2;
        }

        CodeSearcher cs = new CodeSearcher();

        map.put("photoId", photoId);
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    get("/html/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q")) {
            String query = req.queryParams("q").trim();
            String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " ");
            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String[] repos = new String[0];
            String[] langs = new String[0];
            String reposFilter = "";
            String langsFilter = "";
            String reposQueryString = "";
            String langsQueryString = "";

            if (req.queryParams().contains("repo")) {
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    List<String> reposList = Arrays.asList(repos).stream()
                            .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList());

                    reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")";

                    List<String> reposQueryList = Arrays.asList(repos).stream()
                            .map((s) -> "&repo=" + URLEncoder.encode(s)).collect(Collectors.toList());

                    reposQueryString = StringUtils.join(reposQueryList, "");
                }
            }

            if (req.queryParams().contains("lan")) {
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    List<String> langsList = Arrays.asList(langs).stream()
                            .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList());

                    langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")";

                    List<String> langsQueryList = Arrays.asList(langs).stream()
                            .map((s) -> "&lan=" + URLEncoder.encode(s)).collect(Collectors.toList());

                    langsQueryString = StringUtils.join(langsQueryList, "");
                }
            }

            // split the query escape it and and it together
            String cleanQueryString = scl.formatQueryString(query);

            SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter, page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true));

            for (CodeFacetRepo f : searchResult.getRepoFacetResults()) {
                if (Arrays.asList(repos).contains(f.getRepoName())) {
                    f.setSelected(true);
                }
            }

            for (CodeFacetLanguage f : searchResult.getLanguageFacetResults()) {
                if (Arrays.asList(langs).contains(f.getLanguageName())) {
                    f.setSelected(true);
                }
            }

            map.put("searchValue", query);
            map.put("searchResult", searchResult);
            map.put("reposQueryString", reposQueryString);
            map.put("langsQueryString", langsQueryString);

            map.put("altQuery", altquery);

            map.put("isHtml", true);
            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "searchresults.ftl");
        }

        // Totally pointless vanity but lets rotate the image every week
        int photoId = getWeekOfMonth();

        if (photoId <= 0) {
            photoId = 3;
        }
        if (photoId > 4) {
            photoId = 2;
        }

        map.put("photoId", photoId);
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    /**
     * Allows one to write literal lucene search queries against the index
     * TODO This is still very much WIP
     */
    get("/literal/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q")) {
            String query = req.queryParams("q").trim();

            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " ");

            SearchResult searchResult = cs.search(query, page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), altquery, false));

            map.put("searchValue", query);
            map.put("searchResult", searchResult);
            map.put("reposQueryString", "");
            map.put("langsQueryString", "");

            map.put("altQuery", "");

            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "searchresults.ftl");
        }

        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    /**
     * This is the endpoint used by the frontend.
     */
    get("/api/codesearch/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);

        if (req.queryParams().contains("q") && req.queryParams("q").trim() != "") {
            String query = req.queryParams("q").trim();

            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String[] repos = new String[0];
            String[] langs = new String[0];
            String[] owners = new String[0];
            String reposFilter = "";
            String langsFilter = "";
            String ownersFilter = "";

            if (req.queryParams().contains("repo")) {
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    List<String> reposList = Arrays.asList(repos).stream()
                            .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList());

                    reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")";
                }
            }

            if (req.queryParams().contains("lan")) {
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    List<String> langsList = Arrays.asList(langs).stream()
                            .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList());

                    langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")";
                }
            }

            if (req.queryParams().contains("own")) {
                owners = req.queryParamsValues("own");

                if (owners.length != 0) {
                    List<String> ownersList = Arrays.asList(owners).stream()
                            .map((s) -> "codeowner:" + QueryParser.escape(s)).collect(Collectors.toList());

                    ownersFilter = " && (" + StringUtils.join(ownersList, " || ") + ")";
                }
            }

            // Need to pass in the filters into this query
            String cacheKey = query + page + reposFilter + langsFilter + ownersFilter;

            if (cache.containsKey(cacheKey)) {
                return cache.get(cacheKey);
            }

            // split the query escape it and and it together
            String cleanQueryString = scl.formatQueryString(query);

            SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter + ownersFilter,
                    page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true));

            searchResult.setQuery(query);

            for (String altQuery : scl.generateAltQueries(query)) {
                searchResult.addAltQuery(altQuery);
            }

            // Null out code as it isnt required and there is no point in bloating our ajax requests
            for (CodeResult codeSearchResult : searchResult.getCodeResultList()) {
                codeSearchResult.setCode(null);
            }

            cache.put(cacheKey, searchResult);
            return searchResult;
        }

        return null;
    }, new JsonTransformer());

    get("/api/repo/add/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");
        String reponames = request.queryParams("reponame");
        String repourls = request.queryParams("repourl");
        String repotype = request.queryParams("repotype");
        String repousername = request.queryParams("repousername");
        String repopassword = request.queryParams("repopassword");
        String reposource = request.queryParams("reposource");
        String repobranch = request.queryParams("repobranch");

        if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "reponame is a required parameter");
        }

        if (repourls == null || repourls.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "repourl is a required parameter");
        }

        if (repotype == null) {
            return new ApiResponse(false, "repotype is a required parameter");
        }

        if (repousername == null) {
            return new ApiResponse(false, "repousername is a required parameter");
        }

        if (repopassword == null) {
            return new ApiResponse(false, "repopassword is a required parameter");
        }

        if (reposource == null) {
            return new ApiResponse(false, "reposource is a required parameter");
        }

        if (repobranch == null) {
            return new ApiResponse(false, "repobranch is a required parameter");
        }

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format(
                    "pub=%s&reponame=%s&repourl=%s&repotype=%s&repousername=%s&repopassword=%s&reposource=%s&repobranch=%s",
                    URLEncoder.encode(publicKey), URLEncoder.encode(reponames), URLEncoder.encode(repourls),
                    URLEncoder.encode(repotype), URLEncoder.encode(repousername),
                    URLEncoder.encode(repopassword), URLEncoder.encode(reposource),
                    URLEncoder.encode(repobranch));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        // Clean
        if (repobranch == null || repobranch.trim().equals(Values.EMPTYSTRING)) {
            repobranch = "master";
        }

        repotype = repotype.trim().toLowerCase();
        if (!"git".equals(repotype) && !"svn".equals(repotype)) {
            repotype = "git";
        }

        RepoResult repoResult = repo.getRepoByName(reponames);

        if (repoResult != null) {
            return new ApiResponse(false, "repository name already exists");
        }

        repo.saveRepo(new RepoResult(-1, reponames, repotype, repourls, repousername, repopassword, reposource,
                repobranch));

        return new ApiResponse(true, "added repository successfully");
    }, new JsonTransformer());

    get("/api/repo/delete/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");
        String reponames = request.queryParams("reponame");

        if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "reponame is a required parameter");
        }

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format("pub=%s&reponame=%s", URLEncoder.encode(publicKey),
                    URLEncoder.encode(reponames));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        RepoResult rr = repo.getRepoByName(reponames);
        if (rr == null) {
            return new ApiResponse(false, "repository already deleted");
        }

        Singleton.getUniqueDeleteRepoQueue().add(rr);

        return new ApiResponse(true, "repository queued for deletion");
    }, new JsonTransformer());

    get("/api/repo/list/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format("pub=%s", URLEncoder.encode(publicKey));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        List<RepoResult> repoResultList = repo.getAllRepo();

        return new RepoResultApiResponse(true, Values.EMPTYSTRING, repoResultList);
    }, new JsonTransformer());

    get("/admin/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        CodeSearcher cs = new CodeSearcher();

        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());

        map.put("numSearches", statsService.getSearchCount());
        map.put("uptime", statsService.getUptime());

        // Put all properties here
        map.put(Values.SQLITEFILE,
                Properties.getProperties().getProperty(Values.SQLITEFILE, Values.DEFAULTSQLITEFILE));
        map.put(Values.SERVERPORT,
                Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT));
        map.put(Values.REPOSITORYLOCATION, Properties.getProperties().getProperty(Values.REPOSITORYLOCATION,
                Values.DEFAULTREPOSITORYLOCATION));
        map.put(Values.INDEXLOCATION,
                Properties.getProperties().getProperty(Values.INDEXLOCATION, Values.DEFAULTINDEXLOCATION));
        map.put(Values.FACETSLOCATION,
                Properties.getProperties().getProperty(Values.FACETSLOCATION, Values.DEFAULTFACETSLOCATION));
        map.put(Values.CHECKREPOCHANGES, Properties.getProperties().getProperty(Values.CHECKREPOCHANGES,
                Values.DEFAULTCHECKREPOCHANGES));
        map.put(Values.ONLYLOCALHOST,
                Properties.getProperties().getProperty(Values.ONLYLOCALHOST, Values.DEFAULTONLYLOCALHOST));
        map.put(Values.LOWMEMORY,
                Properties.getProperties().getProperty(Values.LOWMEMORY, Values.DEFAULTLOWMEMORY));
        map.put(Values.SPELLINGCORRECTORSIZE, Properties.getProperties()
                .getProperty(Values.SPELLINGCORRECTORSIZE, Values.DEFAULTSPELLINGCORRECTORSIZE));
        map.put(Values.USESYSTEMGIT,
                Properties.getProperties().getProperty(Values.USESYSTEMGIT, Values.DEFAULTUSESYSTEMGIT));
        map.put(Values.GITBINARYPATH,
                Properties.getProperties().getProperty(Values.GITBINARYPATH, Values.DEFAULTGITBINARYPATH));
        map.put(Values.APIENABLED,
                Properties.getProperties().getProperty(Values.APIENABLED, Values.DEFAULTAPIENABLED));
        map.put(Values.APIKEYAUTH,
                Properties.getProperties().getProperty(Values.APIKEYAUTH, Values.DEFAULTAPIKEYAUTH));
        map.put(Values.SVNBINARYPATH,
                Properties.getProperties().getProperty(Values.SVNBINARYPATH, Values.DEFAULTSVNBINARYPATH));
        map.put(Values.SVNENABLED,
                Properties.getProperties().getProperty(Values.SVNENABLED, Values.DEFAULTSVNENABLED));
        map.put(Values.MAXDOCUMENTQUEUESIZE, Properties.getProperties().getProperty(Values.MAXDOCUMENTQUEUESIZE,
                Values.DEFAULTMAXDOCUMENTQUEUESIZE));
        map.put(Values.MAXDOCUMENTQUEUELINESIZE, Properties.getProperties()
                .getProperty(Values.MAXDOCUMENTQUEUELINESIZE, Values.DEFAULTMAXDOCUMENTQUEUELINESIZE));
        map.put(Values.MAXFILELINEDEPTH, Properties.getProperties().getProperty(Values.MAXFILELINEDEPTH,
                Values.DEFAULTMAXFILELINEDEPTH));

        map.put("deletionQueue", Singleton.getUniqueDeleteRepoQueue().size());

        map.put("version", VERSION);

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin.ftl");
    }, new FreeMarkerEngine());

    get("/admin/repo/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        int repoCount = repo.getRepoCount();
        String offSet = request.queryParams("offset");
        String searchQuery = request.queryParams("q");
        int indexOffset = 0;

        Map<String, Object> map = new HashMap<>();

        if (offSet != null) {
            try {
                indexOffset = Integer.parseInt(offSet);
                if (indexOffset > repoCount || indexOffset < 0) {
                    indexOffset = 0;
                }

            } catch (NumberFormatException ex) {
                indexOffset = 0;
            }
        }

        if (searchQuery != null) {
            map.put("repoResults", repo.searchRepo(searchQuery));
        } else {
            map.put("repoResults", repo.getPagedRepo(indexOffset, 100));
        }

        map.put("searchQuery", searchQuery);
        map.put("hasPrevious", indexOffset > 0);
        map.put("hasNext", (indexOffset + 100) < repoCount);
        map.put("previousOffset", "" + (indexOffset - 100));
        map.put("nextOffset", "" + (indexOffset + 100));

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_repo.ftl");
    }, new FreeMarkerEngine());

    get("/admin/bulk/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_bulk.ftl");
    }, new FreeMarkerEngine());

    get("/admin/api/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();

        map.put("apiKeys", api.getAllApi());

        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        map.put("apiAuthentication", apiEnabled && apiAuth);
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_api.ftl");
    }, new FreeMarkerEngine());

    post("/admin/api/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        apiService.createKeys();

        response.redirect("/admin/api/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    get("/admin/api/delete/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null || !request.queryParams().contains("publicKey")) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String publicKey = request.queryParams("publicKey");
        apiService.deleteKey(publicKey);

        return true;
    }, new JsonTransformer());

    get("/admin/settings/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String[] highlighters = "agate,androidstudio,arta,ascetic,atelier-cave.dark,atelier-cave.light,atelier-dune.dark,atelier-dune.light,atelier-estuary.dark,atelier-estuary.light,atelier-forest.dark,atelier-forest.light,atelier-heath.dark,atelier-heath.light,atelier-lakeside.dark,atelier-lakeside.light,atelier-plateau.dark,atelier-plateau.light,atelier-savanna.dark,atelier-savanna.light,atelier-seaside.dark,atelier-seaside.light,atelier-sulphurpool.dark,atelier-sulphurpool.light,brown_paper,codepen-embed,color-brewer,dark,darkula,default,docco,far,foundation,github-gist,github,googlecode,grayscale,hopscotch,hybrid,idea,ir_black,kimbie.dark,kimbie.light,magula,mono-blue,monokai,monokai_sublime,obsidian,paraiso.dark,paraiso.light,pojoaque,railscasts,rainbow,school_book,solarized_dark,solarized_light,sunburst,tomorrow-night-blue,tomorrow-night-bright,tomorrow-night-eighties,tomorrow-night,tomorrow,vs,xcode,zenburn"
                .split(",");

        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("syntaxHighlighter", getSyntaxHighlighter());
        map.put("highlighters", highlighters);
        map.put("averageSalary", "" + (int) getAverageSalary());
        map.put("matchLines", "" + (int) getMatchLines());
        map.put("maxLineDepth", "" + (int) getMaxLineDepth());
        map.put("minifiedLength", "" + (int) getMinifiedLength());
        map.put("isCommunity", ISCOMMUNITY);

        return new ModelAndView(map, "admin_settings.ftl");
    }, new FreeMarkerEngine());

    get("/admin/reports/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);

        return new ModelAndView(map, "admin_reports.ftl");
    }, new FreeMarkerEngine());

    post("/admin/settings/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        if (ISCOMMUNITY) {
            response.redirect("/admin/settings/");
            halt();
        }

        String logo = request.queryParams("logo").trim();
        String syntaxHighlighter = request.queryParams("syntaxhighligher");

        try {
            double averageSalary = Double.parseDouble(request.queryParams("averagesalary"));
            data.saveData(Values.AVERAGESALARY, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.AVERAGESALARY, Values.DEFAULTAVERAGESALARY);
        }

        try {
            double averageSalary = Double.parseDouble(request.queryParams("matchlines"));
            data.saveData(Values.MATCHLINES, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MATCHLINES, Values.DEFAULTMATCHLINES);
        }

        try {
            double averageSalary = Double.parseDouble(request.queryParams("maxlinedepth"));
            data.saveData(Values.MAXLINEDEPTH, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MAXLINEDEPTH, Values.DEFAULTMAXLINEDEPTH);
        }

        try {
            double minifiedlength = Double.parseDouble(request.queryParams("minifiedlength"));
            data.saveData(Values.MINIFIEDLENGTH, "" + (int) minifiedlength);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MINIFIEDLENGTH, Values.DEFAULTMINIFIEDLENGTH);
        }

        data.saveData(Values.LOGO, logo);
        data.saveData(Values.SYNTAXHIGHLIGHTER, syntaxHighlighter);

        // Redo anything that requires updates at this point
        scl = Singleton.getSearchcodeLib(data);

        response.redirect("/admin/settings/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    post("/admin/bulk/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String repos = request.queryParams("repos");
        String repolines[] = repos.split("\\r?\\n");

        for (String line : repolines) {
            String[] repoparams = line.split(",", -1);

            if (repoparams.length == 7) {

                String branch = repoparams[6].trim();
                if (branch.equals(Values.EMPTYSTRING)) {
                    branch = "master";
                }

                String scm = repoparams[1].trim().toLowerCase();
                if (scm.equals(Values.EMPTYSTRING)) {
                    scm = "git";
                }

                RepoResult rr = repo.getRepoByName(repoparams[0]);

                if (rr == null) {
                    repo.saveRepo(new RepoResult(-1, repoparams[0], scm, repoparams[2], repoparams[3],
                            repoparams[4], repoparams[5], branch));
                }
            }
        }

        response.redirect("/admin/bulk/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    post("/admin/repo/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String[] reponames = request.queryParamsValues("reponame");
        String[] reposcms = request.queryParamsValues("reposcm");
        String[] repourls = request.queryParamsValues("repourl");
        String[] repousername = request.queryParamsValues("repousername");
        String[] repopassword = request.queryParamsValues("repopassword");
        String[] reposource = request.queryParamsValues("reposource");
        String[] repobranch = request.queryParamsValues("repobranch");

        for (int i = 0; i < reponames.length; i++) {
            if (reponames[i].trim().length() != 0) {

                String branch = repobranch[i].trim();
                if (branch.equals(Values.EMPTYSTRING)) {
                    branch = "master";
                }

                repo.saveRepo(new RepoResult(-1, reponames[i], reposcms[i], repourls[i], repousername[i],
                        repopassword[i], reposource[i], branch));
            }
        }

        response.redirect("/admin/repo/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    get("/login/", (request, response) -> {
        if (getAuthenticatedUser(request) != null) {
            response.redirect("/admin/");
            halt();
            return null;
        }
        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "login.ftl");
    }, new FreeMarkerEngine());

    post("/login/", (req, res) -> {
        if (req.queryParams().contains("password") && req.queryParams("password")
                .equals(com.searchcode.app.util.Properties.getProperties().getProperty("password"))) {
            addAuthenticatedUser(req);
            res.redirect("/admin/");
            halt();
        }
        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "login.ftl");
    }, new FreeMarkerEngine());

    get("/logout/", (req, res) -> {
        removeAuthenticatedUser(req);
        res.redirect("/");
        return null;
    });

    get("/admin/delete/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null || !request.queryParams().contains("repoName")) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String repoName = request.queryParams("repoName");
        RepoResult rr = repo.getRepoByName(repoName);

        if (rr != null) {
            Singleton.getUniqueDeleteRepoQueue().add(rr);
        }

        return true;
    }, new JsonTransformer());

    get("/admin/checkversion/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String version;
        try {
            version = IOUtils.toString(new URL("https://searchcode.com/product/version/")).replace("\"",
                    Values.EMPTYSTRING);
        } catch (IOException ex) {
            return "Unable to determine if running the latest version. Check https://searchcode.com/product/download/ for the latest release.";
        }

        if (App.VERSION.equals(version)) {
            return "Your searchcode server version " + version + " is the latest.";
        } else {
            return "Your searchcode server version " + App.VERSION
                    + " instance is out of date. The latest version is " + version + ".";
        }
    }, new JsonTransformer());

    get("/file/:codeid/:reponame/*", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        CodeSearcher cs = new CodeSearcher();
        Cocomo2 coco = new Cocomo2();

        StringBuilder code = new StringBuilder();

        String fileName = Values.EMPTYSTRING;
        if (request.splat().length != 0) {
            fileName = request.splat()[0];
        }

        CodeResult codeResult = cs.getByRepoFileName(request.params(":reponame"), fileName);

        if (codeResult == null) {
            int codeid = Integer.parseInt(request.params(":codeid"));
            codeResult = cs.getById(codeid);
        }

        if (codeResult == null) {
            response.redirect("/404/");
            halt();
        }

        List<String> codeLines = codeResult.code;
        for (int i = 0; i < codeLines.size(); i++) {
            code.append("<span id=\"" + (i + 1) + "\"></span>");
            code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i)));
            code.append("\n");
        }

        boolean highlight = true;
        if (Integer.parseInt(codeResult.codeLines) > 1000) {
            highlight = false;
        }

        RepoResult repoResult = repo.getRepoByName(codeResult.repoName);

        if (repoResult != null) {
            map.put("source", repoResult.getSource());
        }

        map.put("fileName", codeResult.fileName);
        map.put("codePath", codeResult.codePath);
        map.put("codeLength", codeResult.codeLines);
        map.put("languageName", codeResult.languageName);
        map.put("md5Hash", codeResult.md5hash);
        map.put("repoName", codeResult.repoName);
        map.put("highlight", highlight);
        map.put("repoLocation", codeResult.getRepoLocation());

        map.put("codeValue", code.toString());
        map.put("highligher", getSyntaxHighlighter());
        map.put("codeOwner", codeResult.getCodeOwner());

        double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode()));
        int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary());
        if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) {
            map.put("estimatedCost", estimatedCost);
        }

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "coderesult.ftl");
    }, new FreeMarkerEngine());

    /**
     * Deprecated should not be used
     * TODO delete this method
     */
    get("/codesearch/view/:codeid", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        int codeid = Integer.parseInt(request.params(":codeid"));
        CodeSearcher cs = new CodeSearcher();
        Cocomo2 coco = new Cocomo2();

        StringBuilder code = new StringBuilder();

        // escape all the lines and include deeplink for line number
        CodeResult codeResult = cs.getById(codeid);

        if (codeResult == null) {
            response.redirect("/404/");
            halt();
        }

        List<String> codeLines = codeResult.code;
        for (int i = 0; i < codeLines.size(); i++) {
            code.append("<span id=\"" + (i + 1) + "\"></span>");
            code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i)));
            code.append("\n");
        }

        boolean highlight = true;
        if (Integer.parseInt(codeResult.codeLines) > 1000) {
            highlight = false;
        }

        RepoResult repoResult = repo.getRepoByName(codeResult.repoName);

        if (repoResult != null) {
            map.put("source", repoResult.getSource());
        }

        map.put("fileName", codeResult.fileName);
        map.put("codePath", codeResult.codePath);
        map.put("codeLength", codeResult.codeLines);
        map.put("languageName", codeResult.languageName);
        map.put("md5Hash", codeResult.md5hash);
        map.put("repoName", codeResult.repoName);
        map.put("highlight", highlight);
        map.put("repoLocation", codeResult.getRepoLocation());

        map.put("codeValue", code.toString());
        map.put("highligher", getSyntaxHighlighter());
        map.put("codeOwner", codeResult.getCodeOwner());

        double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode()));
        int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary());
        if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) {
            map.put("estimatedCost", estimatedCost);
        }

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "coderesult.ftl");
    }, new FreeMarkerEngine());

    get("/documentation/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "documentation.ftl");
    }, new FreeMarkerEngine());

    get("/search_test/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "search_test.ftl");
    }, new FreeMarkerEngine());

    get("/404/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "404.ftl");

    }, new FreeMarkerEngine());

    /**
     * Test that was being used to display blame information
     */
    //        get("/test/:reponame/*", (request, response) -> {
    //            User user = injector.getInstance(User.class);
    //            user.Blame(request.params(":reponame"), request.splat()[0]);
    //            return "";
    //        }, new JsonTransformer());
}

From source file:edu.brown.benchmark.seats.util.GenerateHistograms.java

public static void main(String[] vargs) throws Exception {
    ArgumentsParser args = ArgumentsParser.load(vargs);

    File csv_path = new File(args.getOptParam(0));
    File output_path = new File(args.getOptParam(1));

    GenerateHistograms gh = GenerateHistograms.generate(csv_path);

    Map<String, Object> m = new ListOrderedMap<String, Object>();
    m.put("Airport Codes", gh.flights_per_airport.size());
    m.put("Airlines", gh.flights_per_airline.getValueCount());
    m.put("Departure Times", gh.flights_per_time.getValueCount());
    LOG.info(StringUtil.formatMaps(m));/*from  w  w  w . j av  a 2 s  . c o m*/

    System.err.println(StringUtil.join("\n", gh.flights_per_airport.keySet()));

    Map<String, Histogram<?>> histograms = new HashMap<String, Histogram<?>>();
    histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_DEPART_TIMES, gh.flights_per_time);
    // histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_AIRLINE, gh.flights_per_airline);
    histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_AIRPORT,
            SEATSHistogramUtil.collapseAirportFlights(gh.flights_per_airport));

    for (Entry<String, Histogram<?>> e : histograms.entrySet()) {
        File output_file = new File(output_path.getAbsolutePath() + "/" + e.getKey() + ".histogram");
        LOG.info(String.format("Writing out %s data to '%s' [samples=%d, values=%d]", e.getKey(), output_file,
                e.getValue().getSampleCount(), e.getValue().getValueCount()));
        e.getValue().save(output_file);
    } // FOR
}

From source file:com.google.appengine.tools.pipeline.impl.util.JsonUtils.java

public static void main(String[] args) throws Exception {
    JSONObject x = new JSONObject();
    x.put("first", 5);
    x.put("second", 7);
    debugPrint("hello");
    debugPrint(7);/*from   w  w  w. j a v  a2  s  .c  om*/
    debugPrint(3.14159);
    debugPrint("");
    debugPrint('x');
    debugPrint(x);
    debugPrint(null);

    Map<String, Integer> map = new HashMap<>();
    map.put("first", 5);
    map.put("second", 7);
    debugPrint(map);

    int[] array = new int[] { 5, 7 };
    debugPrint(array);

    ArrayList<Integer> arrayList = new ArrayList<>(2);
    arrayList.add(5);
    arrayList.add(7);
    debugPrint(arrayList);

    Collection<Integer> collection = new HashSet<>(2);
    collection.add(5);
    collection.add(7);
    debugPrint(collection);

    Object object = new Object();
    debugPrint(object);

    Map<String, String> map1 = new HashMap<>();
    map1.put("a", "hello");
    map1.put("b", "goodbye");

    Object[] array2 = new Object[] { 17, "yes", "no", map1 };

    Map<String, Object> map2 = new HashMap<>();
    map2.put("first", 5.4);
    map2.put("second", array2);
    map2.put("third", map1);

    debugPrint(map2);

    class MyBean {
        @SuppressWarnings("unused")
        public int getX() {
            return 11;
        }

        @SuppressWarnings("unused")
        public boolean isHot() {
            return true;
        }

        @SuppressWarnings("unused")
        public String getName() {
            return "yellow";
        }
    }
    debugPrint(new MyBean());

}

From source file:com.twentyn.patentScorer.ScoreMerger.java

public static void main(String[] args) throws Exception {
    System.out.println("Starting up...");
    System.out.flush();/*  w ww  .j av  a2s.co  m*/
    Options opts = new Options();
    opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build());

    opts.addOption(Option.builder("r").longOpt("results").required().hasArg()
            .desc("A directory of search results to read").build());
    opts.addOption(Option.builder("s").longOpt("scores").required().hasArg()
            .desc("A directory of patent classification scores to read").build());
    opts.addOption(Option.builder("o").longOpt("output").required().hasArg()
            .desc("The output file where results will be written.").build());

    HelpFormatter helpFormatter = new HelpFormatter();
    CommandLineParser cmdLineParser = new DefaultParser();
    CommandLine cmdLine = null;
    try {
        cmdLine = cmdLineParser.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Caught exception when parsing command line: " + e.getMessage());
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(1);
    }

    if (cmdLine.hasOption("help")) {
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(0);
    }
    File scoresDirectory = new File(cmdLine.getOptionValue("scores"));
    if (cmdLine.getOptionValue("scores") == null || !scoresDirectory.isDirectory()) {
        LOGGER.error("Not a directory of score files: " + cmdLine.getOptionValue("scores"));
    }

    File resultsDirectory = new File(cmdLine.getOptionValue("results"));
    if (cmdLine.getOptionValue("results") == null || !resultsDirectory.isDirectory()) {
        LOGGER.error("Not a directory of results files: " + cmdLine.getOptionValue("results"));
    }

    FileWriter outputWriter = new FileWriter(cmdLine.getOptionValue("output"));

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);

    FilenameFilter jsonFilter = new FilenameFilter() {
        public final Pattern JSON_PATTERN = Pattern.compile("\\.json$");

        public boolean accept(File dir, String name) {
            return JSON_PATTERN.matcher(name).find();
        }
    };

    Map<String, PatentScorer.ClassificationResult> scores = new HashMap<>();
    LOGGER.info("Reading scores from directory at " + scoresDirectory.getAbsolutePath());
    for (File scoreFile : scoresDirectory.listFiles(jsonFilter)) {
        BufferedReader reader = new BufferedReader(new FileReader(scoreFile));
        int count = 0;
        String line;
        while ((line = reader.readLine()) != null) {
            PatentScorer.ClassificationResult res = objectMapper.readValue(line,
                    PatentScorer.ClassificationResult.class);
            scores.put(res.docId, res);
            count++;
        }
        LOGGER.info("Read " + count + " scores from " + scoreFile.getAbsolutePath());
    }

    Map<String, List<DocumentSearch.SearchResult>> synonymsToResults = new HashMap<>();
    Map<String, List<DocumentSearch.SearchResult>> inchisToResults = new HashMap<>();
    LOGGER.info("Reading results from directory at " + resultsDirectory);
    // With help from http://stackoverflow.com/questions/6846244/jackson-and-generic-type-reference.
    JavaType resultsType = objectMapper.getTypeFactory().constructCollectionType(List.class,
            DocumentSearch.SearchResult.class);

    List<File> resultsFiles = Arrays.asList(resultsDirectory.listFiles(jsonFilter));
    Collections.sort(resultsFiles, new Comparator<File>() {
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });
    for (File resultsFile : resultsFiles) {
        BufferedReader reader = new BufferedReader(new FileReader(resultsFile));
        CharBuffer buffer = CharBuffer.allocate(Long.valueOf(resultsFile.length()).intValue());
        int bytesRead = reader.read(buffer);
        LOGGER.info("Read " + bytesRead + " bytes from " + resultsFile.getName() + " (length is "
                + resultsFile.length() + ")");
        List<DocumentSearch.SearchResult> results = objectMapper.readValue(new CharArrayReader(buffer.array()),
                resultsType);

        LOGGER.info("Read " + results.size() + " results from " + resultsFile.getAbsolutePath());

        int count = 0;
        for (DocumentSearch.SearchResult sres : results) {
            for (DocumentSearch.ResultDocument resDoc : sres.getResults()) {
                String docId = resDoc.getDocId();
                PatentScorer.ClassificationResult classificationResult = scores.get(docId);
                if (classificationResult == null) {
                    LOGGER.warn("No classification result found for " + docId);
                } else {
                    resDoc.setClassifierScore(classificationResult.getScore());
                }
            }
            if (!synonymsToResults.containsKey(sres.getSynonym())) {
                synonymsToResults.put(sres.getSynonym(), new ArrayList<DocumentSearch.SearchResult>());
            }
            synonymsToResults.get(sres.getSynonym()).add(sres);
            count++;
            if (count % 1000 == 0) {
                LOGGER.info("Processed " + count + " search result documents");
            }
        }
    }

    Comparator<DocumentSearch.ResultDocument> resultDocumentComparator = new Comparator<DocumentSearch.ResultDocument>() {
        @Override
        public int compare(DocumentSearch.ResultDocument o1, DocumentSearch.ResultDocument o2) {
            int cmp = o2.getClassifierScore().compareTo(o1.getClassifierScore());
            if (cmp != 0) {
                return cmp;
            }
            cmp = o2.getScore().compareTo(o1.getScore());
            return cmp;
        }
    };

    for (Map.Entry<String, List<DocumentSearch.SearchResult>> entry : synonymsToResults.entrySet()) {
        DocumentSearch.SearchResult newSearchRes = null;
        // Merge all result documents into a single search result.
        for (DocumentSearch.SearchResult sr : entry.getValue()) {
            if (newSearchRes == null) {
                newSearchRes = sr;
            } else {
                newSearchRes.getResults().addAll(sr.getResults());
            }
        }
        if (newSearchRes == null || newSearchRes.getResults() == null) {
            LOGGER.error("Search results for " + entry.getKey() + " are null.");
            continue;
        }
        Collections.sort(newSearchRes.getResults(), resultDocumentComparator);
        if (!inchisToResults.containsKey(newSearchRes.getInchi())) {
            inchisToResults.put(newSearchRes.getInchi(), new ArrayList<DocumentSearch.SearchResult>());
        }
        inchisToResults.get(newSearchRes.getInchi()).add(newSearchRes);
    }

    List<String> sortedKeys = new ArrayList<String>(inchisToResults.keySet());
    Collections.sort(sortedKeys);
    List<GroupedInchiResults> orderedResults = new ArrayList<>(sortedKeys.size());
    Comparator<DocumentSearch.SearchResult> synonymSorter = new Comparator<DocumentSearch.SearchResult>() {
        @Override
        public int compare(DocumentSearch.SearchResult o1, DocumentSearch.SearchResult o2) {
            return o1.getSynonym().compareTo(o2.getSynonym());
        }
    };
    for (String inchi : sortedKeys) {
        List<DocumentSearch.SearchResult> res = inchisToResults.get(inchi);
        Collections.sort(res, synonymSorter);
        orderedResults.add(new GroupedInchiResults(inchi, res));
    }

    objectMapper.writerWithView(Object.class).writeValue(outputWriter, orderedResults);
    outputWriter.close();
}