- Sort Score
- Result 10 results
- Languages All
Results 1 - 1 of 1 for processRobotsTxt (0.21 sec)
-
fess-crawler/src/main/java/org/codelibs/fess/crawler/client/http/HcHttpClient.java
* This method fetches and parses the robots.txt file to extract disallow/allow rules * and sitemap information. * * @param url The URL to process robots.txt for */ protected void processRobotsTxt(final String url) { if (StringUtil.isBlank(url)) { throw new CrawlerSystemException("url is null or empty."); } if (robotsTxtHelper == null || !robotsTxtHelper.isEnabled()) {
Registered: Sun Sep 21 03:50:09 UTC 2025 - Last Modified: Thu Aug 07 02:55:08 UTC 2025 - 52.2K bytes - Viewed (0)