- Sort Score
- Result 10 results
- Languages All
Results 81 - 90 of 433 for rootfs (0.03 sec)
-
src/main/java/org/codelibs/fess/opensearch/config/exentity/WebConfig.java
final Map<String, String> clientConfigMap = getConfigParameterMap(ConfigName.CLIENT); if (clientConfigMap != null) { paramMap.putAll(clientConfigMap); } // robots txt enabled if (paramMap.get(Param.Client.ROBOTS_TXT_ENABLED) == null) { paramMap.put(Param.Client.ROBOTS_TXT_ENABLED, !fessConfig.isCrawlerIgnoreRobotsTxt()); }Registered: Thu Sep 04 12:52:25 UTC 2025 - Last Modified: Sat Mar 15 06:53:53 UTC 2025 - 9.9K bytes - Viewed (0) -
okhttp/src/jvmTest/kotlin/okhttp3/internal/connection/RouteSelectorTest.kt
val selection = routeSelector.next() dns.assertRequests(uriHost) val routes = selection.routes assertRoute(routes[0], address, Proxy.NO_PROXY, dns.lookup(uriHost, 0), uriPort) assertRoute(routes[1], address, Proxy.NO_PROXY, dns.lookup(uriHost, 1), uriPort) assertThat(selection.next()).isSameAs(routes[0]) assertThat(selection.next()).isSameAs(routes[1]) assertThat(selection.hasNext()).isFalse()
Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Fri May 30 21:28:20 UTC 2025 - 20.7K bytes - Viewed (0) -
docs/changelogs/changelog_4x.md
automatically retry on an unshared connection. * Fix: Don't crash if a TLS tunnel's response body is truncated. * Fix: Don't track unusable routes beyond their usefulness. We had a bug where we could track certain bad routes indefinitely; now we only track the ones that could be necessary. * Fix: Defer proxy selection until a proxy is required. This saves calls to `ProxySelector` on
Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Wed Apr 17 13:25:31 UTC 2024 - 25.2K bytes - Viewed (0) -
okhttp-testing-support/src/main/kotlin/okhttp3/internal/RecordingOkAuthenticator.kt
val responses = mutableListOf<Response>() val routes = mutableListOf<Route>() fun onlyResponse() = responses.single() fun onlyRoute() = routes.single() @Throws(IOException::class) override fun authenticate( route: Route?, response: Response, ): Request? { if (route == null) throw NullPointerException("route == null") responses += response routes += route
Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Wed Mar 19 19:25:20 UTC 2025 - 1.7K bytes - Viewed (0) -
fess-crawler/src/test/java/org/codelibs/fess/crawler/CrawlerContextTest.java
// Add URLs to default set urlSet.add("http://example.com/robots.txt"); urlSet.add("http://test.com/robots.txt"); assertEquals(2, crawlerContext.getRobotsTxtUrlSet().size()); // Set new set Set<String> newSet = new HashSet<>(); newSet.add("http://new.com/robots.txt"); crawlerContext.setRobotsTxtUrlSet(newSet);Registered: Sun Sep 21 03:50:09 UTC 2025 - Last Modified: Sat Sep 06 04:15:37 UTC 2025 - 25.6K bytes - Viewed (0) -
android-test/src/androidTest/java/okhttp/android/test/OkHttpTest.kt
client .newBuilder() .eventListener(EventListener.NONE) .dns(dohDns) .build() dohEnabledClient.get("https://www.twitter.com/robots.txt") dohEnabledClient.get("https://www.facebook.com/robots.txt") } @Test fun testCustomTrustManager() { assumeNetwork() val trustManager = object : X509TrustManager {Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Sat Aug 02 14:12:28 UTC 2025 - 29K bytes - Viewed (0) -
android-test/src/test/kotlin/okhttp/android/test/AndroidLoggingTest.kt
val clientBuilder = OkHttpClient.Builder().connectionSpecs(listOf(ConnectionSpec.CLEARTEXT)).dns { throw UnknownHostException("shortcircuit") } val request = Request("http://google.com/robots.txt".toHttpUrl()) @Test fun testHttpLoggingInterceptor() { val interceptor = HttpLoggingInterceptor().apply { level = HttpLoggingInterceptor.Level.BASIC }Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Thu Aug 21 14:27:04 UTC 2025 - 3.2K bytes - Viewed (0) -
docs/en/docs/advanced/openapi-callbacks.md
Registered: Sun Sep 07 07:19:17 UTC 2025 - Last Modified: Sun Aug 31 09:15:41 UTC 2025 - 7.9K bytes - Viewed (0) -
docs/features/connections.md
Registered: Fri Sep 05 11:42:10 UTC 2025 - Last Modified: Mon Feb 21 03:33:59 UTC 2022 - 5.4K bytes - Viewed (0) -
fess-crawler/src/main/java/org/codelibs/fess/crawler/client/http/HcHttpClient.java
httpClientPropertyMap.put(name, value); } } /** * Processes robots.txt for the given URL. * This method fetches and parses the robots.txt file to extract disallow/allow rules * and sitemap information. * * @param url The URL to process robots.txt for */ protected void processRobotsTxt(final String url) { if (StringUtil.isBlank(url)) {
Registered: Sun Sep 21 03:50:09 UTC 2025 - Last Modified: Thu Aug 07 02:55:08 UTC 2025 - 52.2K bytes - Viewed (0)