List of usage examples for java.nio.file Path toAbsolutePath
Path toAbsolutePath();
From source file:Test.java
public static void main(String[] args) { Path path = FileSystems.getDefault().getPath("/home/docs/status.txt"); path = Paths.get("/home", "docs", "users.txt"); System.out.println("Absolute path: " + path.toAbsolutePath()); // path = Paths.get("home", "docs", "users.txt"); // System.out.println("Absolute path: " + path.toAbsolutePath()); }
From source file:Test.java
public static void main(String[] args) throws Exception { Path path1 = Paths.get("/home/docs/users.txt"); Path path2 = Paths.get("/home/music/users.txt"); System.out.println(Files.isSymbolicLink(path1)); System.out.println(Files.isSymbolicLink(path2)); Path path = Paths.get(new URI("C:/home/./music/users.txt")); System.out.println("Normalized: " + path.normalize()); System.out.println("Absolute path: " + path.toAbsolutePath()); System.out.println("URI: " + path.toUri()); System.out.println("toRealPath (Do not follow links): " + path.toRealPath(LinkOption.NOFOLLOW_LINKS)); System.out.println("toRealPath: " + path.toRealPath()); }
From source file:org.kie.workbench.common.services.backend.compiler.offprocess.generator.ClassPathMavenGenerator.java
public static void main(String[] args) throws Exception { String kieVersion = args[0];// w w w . j ava 2 s . co m String baseDir = args[1]; String content = new String(Files.readAllBytes(Paths.get(baseDir + SEP + cpathPathFile))); String replaced = content.replace(getMavenRepo(), MAVEN_REPO_PLACEHOLDER); replaced = replaceTargetInTheClassPathFile(kieVersion, replaced); Path offProcessModule = Paths.get(baseDir + SEP + "target" + SEP + "classes" + SEP + classPathFile); write(offProcessModule.toAbsolutePath().toString(), replaced); logger.info( "\n************************************\nSaving {} to {} \n************************************\n\n", classPathFile, offProcessModule.toAbsolutePath().toString()); }
From source file:hdfs.MiniHDFS.java
public static void main(String[] args) throws Exception { if (args.length != 1 && args.length != 3) { throw new IllegalArgumentException( "Expected: MiniHDFS <baseDirectory> [<kerberosPrincipal> <kerberosKeytab>], " + "got: " + Arrays.toString(args)); }//from www . j av a 2s.c o m boolean secure = args.length == 3; // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain if (System.getenv("HADOOP_HOME") == null) { Path hadoopHome = baseDir.resolve("hadoop-home"); Files.createDirectories(hadoopHome); System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); } // hdfs-data/, where any data is going Path hdfsHome = baseDir.resolve("hdfs-data"); // configure cluster Configuration cfg = new Configuration(); cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); // lower default permission: TODO: needed? cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); // optionally configure security if (secure) { String kerberosPrincipal = args[1]; String keytabFile = args[2]; cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); } UserGroupInformation.setConfiguration(cfg); // TODO: remove hardcoded port! MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); if (secure) { builder.nameNodePort(9998); } else { builder.nameNodePort(9999); } MiniDFSCluster dfs = builder.build(); // Configure contents of the filesystem org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); try (FileSystem fs = dfs.getFileSystem()) { // Set the elasticsearch user directory up fs.mkdirs(esUserPath); if (UserGroupInformation.isSecurityEnabled()) { List<AclEntry> acls = new ArrayList<>(); acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch") .setPermission(FsAction.ALL).build()); fs.modifyAclEntries(esUserPath, acls); } // Install a pre-existing repository into HDFS String directoryName = "readonly-repository"; String archiveName = directoryName + ".tar.gz"; URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); if (readOnlyRepositoryArchiveURL != null) { Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); fs.copyFromLocalFile(true, true, new org.apache.hadoop.fs.Path( tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), esUserPath.suffix("/existing/" + directoryName)); FileUtils.deleteDirectory(tempDirectory.toFile()); } } // write our PID file Path tmp = Files.createTempFile(baseDir, null, null); String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); // write our port file tmp = Files.createTempFile(baseDir, null, null); Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); }
From source file:es.upm.oeg.tools.quality.ldsniffer.cmd.LDSnifferApp.java
public static void main(String[] args) { HelpFormatter help = new HelpFormatter(); String header = "Assess a list of Linked Data resources using Linked Data Quality Model."; String footer = "Please report issues at https://github.com/nandana/ld-sniffer"; try {/*from w w w. j a v a2 s.c o m*/ CommandLine line = parseArguments(args); if (line.hasOption("help")) { help.printHelp("LDSnifferApp", header, OPTIONS, footer, true); System.exit(0); } evaluationTimeout = Integer.parseInt(line.getOptionValue("t", "10")); if (line.hasOption("md")) { includeMetricDefinitions = true; } if (line.hasOption("rdf")) { rdfOutput = true; } logger.info("URL List: " + line.getOptionValue("ul")); logger.info("TDB Path: " + line.getOptionValue("tdb")); logger.info("Metrics Path: " + line.getOptionValue("ml")); logger.info("Include Metric definitions: " + line.getOptionValue("ml")); logger.info("RDF output: " + line.getOptionValue("rdf")); logger.info("Timeout (mins): " + evaluationTimeout); if (line.hasOption("ml")) { Path path = Paths.get(line.getOptionValue("ml")); if (!Files.exists(path)) { throw new IOException(path.toAbsolutePath().toString() + " : File doesn't exit."); } } //Set the TDB path String tdbDirectory; if (line.hasOption("tdb")) { tdbDirectory = line.getOptionValue("tdb"); } else { Path tempPath = Files.createTempDirectory("tdb_"); tdbDirectory = tempPath.toAbsolutePath().toString(); } // Create the URL list for the evaluation if (!line.hasOption("ul") && !line.hasOption("url")) { System.out.println("One of the following parameters are required: url or urlList "); help.printHelp("LDSnifferApp", header, OPTIONS, footer, true); System.exit(0); } else if (line.hasOption("ul") && line.hasOption("url")) { System.out.println("You have to specify either url or urlList, not both."); help.printHelp("LDSnifferApp", header, OPTIONS, footer, true); System.exit(0); } List<String> urlList = null; if (line.hasOption("ul")) { Path path = Paths.get(line.getOptionValue("ul")); logger.info("Path : " + path.toAbsolutePath().toString()); logger.info("Path exits : " + Files.exists(path)); urlList = Files.readAllLines(path, Charset.defaultCharset()); } else if (line.hasOption("url")) { urlList = new ArrayList<>(); urlList.add(line.getOptionValue("url")); } Executor executor = new Executor(tdbDirectory, urlList); executor.execute(); } catch (MissingOptionException e) { help.printHelp("LDSnifferApp", header, OPTIONS, footer, true); logger.error("Missing arguments. Reason: " + e.getMessage(), e); System.exit(1); } catch (ParseException e) { logger.error("Parsing failed. Reason: " + e.getMessage(), e); System.exit(1); } catch (IOException e) { logger.error("Execution failed. Reason: " + e.getMessage(), e); System.exit(1); } }
From source file:at.ac.tuwien.ims.latex2mobiformulaconv.app.Main.java
/** * Main application method, may exit on error. * * @param args standard posix command line arguments *///from w w w . j a v a2s . c o m public static void main(String[] args) { logger.debug("main() started with args:"); for (int i = 0; i < args.length; i++) { logger.debug("args[" + i + "]" + ": " + args[i]); } // Initialize application applicationContext = new ClassPathXmlApplicationContext("/application-context.xml"); logger.debug("Application context loaded."); setupWorkingDirectory(); logger.debug("Working directory set up to: " + workingDirectory.toAbsolutePath().toString()); initializeOptions(); // Analyse options parseCli(args); logger.debug("CLI arguments parsed."); loadConfiguration(); logger.debug("Configuration loaded."); // Start conversion Converter converter; if (replaceWithPictures) { converter = (Converter) applicationContext.getBean("image-converter"); } else { converter = (Converter) applicationContext.getBean("dom-converter"); } // Decide which HtmlToMobi Converter to use HtmlToMobiConverter htmlToMobiConverter; if (useCalibreInsteadOfKindleGen) { htmlToMobiConverter = (HtmlToMobiConverter) applicationContext.getBean(CALIBRE_HTML2MOBI_CONVERTER); } else { // default is kindlegen htmlToMobiConverter = (HtmlToMobiConverter) applicationContext.getBean(KINDLEGEN_HTML2MOBI_CONVERTER); } converter.setHtmlToMobiConverter(htmlToMobiConverter); converter.setWorkingDirectory(workingDirectory); converter.setInputPaths(inputPaths); converter.setReplaceWithPictures(replaceWithPictures); converter.setOutputPath(outputPath); converter.setFilename(filename); converter.setTitle(title); converter.setDebugMarkupOutput(debugMarkupOutput); converter.setExportMarkup(exportMarkup); converter.setNoMobiConversion(noMobiConversion); try { Path resultFile = converter.convert(); logger.info("Result : " + resultFile.toAbsolutePath().toString()); } catch (FileNotFoundException e) { logger.error(e.getMessage()); } logger.debug("main() exit."); }
From source file:Main.java
public static void main(String[] args) throws Exception { Path path = Paths.get("test.txt"); try (AsynchronousFileChannel afc = AsynchronousFileChannel.open(path, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) { ByteBuffer dataBuffer = getDataBuffer(); Future<Integer> result = afc.write(dataBuffer, 0); while (!result.isDone()) { System.out.println("Sleeping for 2 seconds..."); Thread.sleep(2000);//from w ww . j a v a 2 s .c o m } int writtenBytes = result.get(); System.out.format("%s bytes written to %s%n", writtenBytes, path.toAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } }
From source file:lucene.IndexFiles.java
/** Index all text files under a directory. */ public static void main(String[] args) { String usage = "java org.apache.lucene.demo.IndexFiles" + " [-index INDEX_PATH] [-docs DOCS_PATH] [-update]\n\n" + "This indexes the documents in DOCS_PATH, creating a Lucene index" + "in INDEX_PATH that can be searched with SearchFiles"; String indexPath = "E:/index26"; // is DFR/* ww w . j av a 2 s .co m*/ //2 is normal //3 is ib with h3 //4 is ib with h2 with porter stemmer //5 is ib with h2 with s stemmer //6 is ib with h2 without stemmer //7 is without stemmer without <p //8 is basic with all tags //9 is ib with h2 and stopwords without stemmer //10 like without ib, lower tags //11 like 10 with lower tags p, br and hr //12 like 11 with tags closed //13 is closed tags with punctuation replace and whitespace tokenizer with hyphen cared for //14 std tokenizer with hyphen taken cared for with stemmer //15 like 14 without stemming //16 like 15 with LMD //17 like 11 with LMD //18 with count of lower and upper delimiters of split //19 like 18 with (?i) to ignore case in all and valipas > 9 //20 with (?i) in all //21 is fresh 19 //22 is legalspans with LMD //23 is fresh 19 without 0 pass //24 is legalspans with InB2 //25 is 23 //26 is 25 with s stemmer and 0 //27 is legalspans demo of 50 passages //28 is modified legal span and fast //29 is 28 with s-stemming //30 is 28 with porter stemming String docsPath = "E:/documents/text"; boolean create = true; for (int i = 0; i < args.length; i++) { if ("-index".equals(args[i])) { indexPath = args[i + 1]; i++; } else if ("-docs".equals(args[i])) { docsPath = args[i + 1]; i++; } else if ("-update".equals(args[i])) { create = false; } } if (docsPath == null) { System.err.println("Usage: " + usage); System.exit(1); } final Path docDir = Paths.get(docsPath); if (!Files.isReadable(docDir)) { System.out.println("Document directory dhfndk '" + docDir.toAbsolutePath() + "' does not exist or is not readable, please check the path"); System.exit(1); } Date start = new Date(); try { System.out.println("Indexing to directory '" + indexPath + "'..."); Directory dir = FSDirectory.open(Paths.get(indexPath)); //Analyzer analyzer = new StandardAnalyzer(); //IndexWriterConfig iwc = new IndexWriterConfig(analyzer); StandardAnalyzer analyzer = new StandardAnalyzer(); //Directory dir = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(analyzer); /*IBSimilarity similarity = new IBSimilarity( new DistributionLL(),//1 //new DistributionSPL(),//2 new LambdaDF(),//1 //new LambdaTTF(), //2 new NormalizationH2());*/ /*DFRSimilarity similarity = new DFRSimilarity( ///////INB2 Similarity new BasicModelIn(), new AfterEffectL(), new NormalizationH1());*/ LMDirichletSimilarity similarity = new LMDirichletSimilarity();//////// LMD Model iwc.setSimilarity(similarity); IndexWriter writer = new IndexWriter(dir, iwc); if (create) { // Create a new index in the directory, removing any // previously indexed documents: iwc.setOpenMode(OpenMode.CREATE); } else { // Add new documents to an existing index: iwc.setOpenMode(OpenMode.CREATE_OR_APPEND); } System.out.println("Test 1"); // Optional: for better indexing performance, if you // are indexing many documents, increase the RAM // buffer. But if you do this, increase the max heap // size to the JVM (eg add -Xmx512m or -Xmx1g): // // iwc.setRAMBufferSizeMB(256.0); //IndexWriter writer = new IndexWriter(dir, iwc); System.out.println("Test 2"); indexDocs(writer, docDir); System.out.println("Test 3"); // NOTE: if you want to maximize search performance, // you can optionally call forceMerge here. This can be // a terribly costly operation, so generally it's only // worth it when your index is relatively static (ie // you're done adding documents to it): // // writer.forceMerge(1); writer.close(); Date end = new Date(); System.out.println(end.getTime() - start.getTime() + " total milliseconds"); } catch (IOException e) { System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage()); } }
From source file:com.github.houbin217jz.thumbnail.Thumbnail.java
public static void main(String[] args) { Options options = new Options(); options.addOption("s", "src", true, "????????????"); options.addOption("d", "dst", true, ""); options.addOption("r", "ratio", true, "/??, 30%???0.3????????"); options.addOption("w", "width", true, "(px)"); options.addOption("h", "height", true, "?(px)"); options.addOption("R", "recursive", false, "???????"); HelpFormatter formatter = new HelpFormatter(); String formatstr = "java -jar thumbnail.jar " + "[-s/--src <path>] " + "[-d/--dst <path>] " + "[-r/--ratio double] " + "[-w/--width integer] " + "[-h/--height integer] " + "[-R/--recursive] "; CommandLineParser parser = new PosixParser(); CommandLine cmd = null;/*from w ww .j ava2s.c om*/ try { cmd = parser.parse(options, args); } catch (ParseException e1) { formatter.printHelp(formatstr, options); return; } final Path srcDir, dstDir; final Integer width, height; final Double ratio; // if (cmd.hasOption("s")) { srcDir = Paths.get(cmd.getOptionValue("s")).toAbsolutePath(); } else { srcDir = Paths.get("").toAbsolutePath(); //?? } // if (cmd.hasOption("d")) { dstDir = Paths.get(cmd.getOptionValue("d")).toAbsolutePath(); } else { formatter.printHelp(formatstr, options); return; } if (!Files.exists(srcDir, LinkOption.NOFOLLOW_LINKS) || !Files.isDirectory(srcDir, LinkOption.NOFOLLOW_LINKS)) { System.out.println("[" + srcDir.toAbsolutePath() + "]??????"); return; } if (Files.exists(dstDir, LinkOption.NOFOLLOW_LINKS)) { if (!Files.isDirectory(dstDir, LinkOption.NOFOLLOW_LINKS)) { //???????? System.out.println("????????"); return; } } else { //???????? try { Files.createDirectories(dstDir); } catch (IOException e) { e.printStackTrace(); return; } } //?? if (cmd.hasOption("w") && cmd.hasOption("h")) { try { width = Integer.valueOf(cmd.getOptionValue("width")); height = Integer.valueOf(cmd.getOptionValue("height")); } catch (NumberFormatException e) { System.out.println("??????"); return; } } else { width = null; height = null; } //? if (cmd.hasOption("r")) { try { ratio = Double.valueOf(cmd.getOptionValue("r")); } catch (NumberFormatException e) { System.out.println("?????"); return; } } else { ratio = null; } if (width != null && ratio != null) { System.out.println("??????????????"); return; } if (width == null && ratio == null) { System.out.println("????????????"); return; } // int maxDepth = 1; if (cmd.hasOption("R")) { maxDepth = Integer.MAX_VALUE; } try { //Java 7 ??@see http://docs.oracle.com/javase/jp/7/api/java/nio/file/Files.html Files.walkFileTree(srcDir, EnumSet.of(FileVisitOption.FOLLOW_LINKS), maxDepth, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path path, BasicFileAttributes basicFileAttributes) throws IOException { //???&??? String filename = path.getFileName().toString().toLowerCase(); if (filename.endsWith(".jpg") || filename.endsWith(".jpeg")) { //Jpeg?? /* * relative??: * rootPath: /a/b/c/d * filePath: /a/b/c/d/e/f.jpg * rootPath.relativize(filePath) = e/f.jpg */ /* * resolve?? * rootPath: /a/b/c/output * relativePath: e/f.jpg * rootPath.resolve(relativePath) = /a/b/c/output/e/f.jpg */ Path dst = dstDir.resolve(srcDir.relativize(path)); if (!Files.exists(dst.getParent(), LinkOption.NOFOLLOW_LINKS)) { Files.createDirectories(dst.getParent()); } doResize(path.toFile(), dst.toFile(), width, height, ratio); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { e.printStackTrace(); } }
From source file:de.huberlin.wbi.cuneiform.cmdline.main.Main.java
public static void main(String[] args) throws IOException, ParseException, InterruptedException, NotDerivableException { CommandLine cmd;/*from w ww .j a va 2s. com*/ Options opt; BaseRepl repl; BaseCreActor cre; Path sandbox; ExecutorService executor; TicketSrcActor ticketSrc; JsonSummary summary; Path summaryPath; Log statLog; int nthread; Path workDir; statLog = LogFactory.getLog("statLogger"); executor = Executors.newCachedThreadPool(); try { opt = getOptions(); cmd = parse(args, opt); config(cmd); if (cmd.hasOption('h')) { System.out.println("CUNEIFORM - A Functional Workflow Language\nversion " + BaseRepl.LABEL_VERSION + " build " + BaseRepl.LABEL_BUILD); new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt); return; } if (cmd.hasOption('r')) Invocation.putLibPath(ForeignLambdaExpr.LANGID_R, cmd.getOptionValue('r')); if (cmd.hasOption('y')) Invocation.putLibPath(ForeignLambdaExpr.LANGID_PYTHON, cmd.getOptionValue('y')); if (cmd.hasOption('l')) sandbox = Paths.get(cmd.getOptionValue("l")); else sandbox = Paths.get(System.getProperty("user.home")).resolve(".cuneiform"); sandbox = sandbox.toAbsolutePath(); if (cmd.hasOption('c')) LocalThread.deleteIfExists(sandbox); if (cmd.hasOption('t')) nthread = Integer.valueOf(cmd.getOptionValue('t')); else nthread = Runtime.getRuntime().availableProcessors(); if (cmd.hasOption('w')) workDir = Paths.get(cmd.getOptionValue('w')); else workDir = Paths.get(System.getProperty("user.dir")); workDir = workDir.toAbsolutePath(); switch (platform) { case PLATFORM_LOCAL: if (!Files.exists(sandbox)) Files.createDirectories(sandbox); cre = new LocalCreActor(sandbox, workDir, nthread); break; case PLATFORM_HTCONDOR: if (!Files.exists(sandbox)) Files.createDirectories(sandbox); if (cmd.hasOption('m')) { // MAX_TRANSFER SIZE String maxTransferSize = cmd.getOptionValue('m'); try { cre = new CondorCreActor(sandbox, maxTransferSize); } catch (Exception e) { System.out.println("INVALID '-m' option value: " + maxTransferSize + "\n\nCUNEIFORM - A Functional Workflow Language\nversion " + BaseRepl.LABEL_VERSION + " build " + BaseRepl.LABEL_BUILD); new HelpFormatter().printHelp("java -jar cuneiform.jar [OPTION]*", opt); return; } } else { cre = new CondorCreActor(sandbox); } break; default: throw new RuntimeException("Platform not recognized."); } executor.submit(cre); ticketSrc = new TicketSrcActor(cre); executor.submit(ticketSrc); executor.shutdown(); switch (format) { case FORMAT_CF: if (cmd.hasOption("i")) repl = new InteractiveRepl(ticketSrc, statLog); else repl = new CmdlineRepl(ticketSrc, statLog); break; case FORMAT_DAX: repl = new DaxRepl(ticketSrc, statLog); break; default: throw new RuntimeException("Format not recognized."); } if (cmd.hasOption("i")) { // run in interactive mode BaseRepl.run(repl); return; } // run in quiet mode if (inputFileVector.length > 0) for (Path f : inputFileVector) repl.interpret(readFile(f)); else repl.interpret(readStdIn()); Thread.sleep(3 * Actor.DELAY); while (repl.isBusy()) Thread.sleep(Actor.DELAY); if (cmd.hasOption("s")) { summary = new JsonSummary(ticketSrc.getRunId(), sandbox, repl.getAns()); summaryPath = Paths.get(cmd.getOptionValue("s")); summaryPath = summaryPath.toAbsolutePath(); try (BufferedWriter writer = Files.newBufferedWriter(summaryPath, Charset.forName("UTF-8"))) { writer.write(summary.toString()); } } } finally { executor.shutdownNow(); } }