mirror of
https://github.com/0xMassi/webclaw.git
synced 2026-04-25 00:06:21 +02:00
perf: reduce fetch timeout to 12s and retries to 2
Stress testing showed 33% of proxies are dead, causing 30s+ timeouts per request with 3 retries (worst case 94s). Reducing timeout from 30s to 12s and retries from 3 to 2 brings worst case to 25s. Combined with disabling 509 dead proxies from the pool, this should significantly improve response times under load. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
5ea646a332
commit
954aabe3e8
3 changed files with 11 additions and 4 deletions
|
|
@ -3,6 +3,14 @@
|
||||||
All notable changes to webclaw are documented here.
|
All notable changes to webclaw are documented here.
|
||||||
Format follows [Keep a Changelog](https://keepachangelog.com/).
|
Format follows [Keep a Changelog](https://keepachangelog.com/).
|
||||||
|
|
||||||
|
## [0.3.10] — 2026-04-10
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- **Fetch timeout reduced from 30s to 12s**: prevents cascading slowdowns when proxies are unresponsive. Worst-case per-URL drops from ~94s to ~25s.
|
||||||
|
- **Retry attempts reduced from 3 to 2**: combined with shorter timeout, total worst-case is 12s + 1s delay + 12s = 25s instead of 30s + 1s + 30s + 3s + 30s = 94s.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## [0.3.9] — 2026-04-04
|
## [0.3.9] — 2026-04-04
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ resolver = "2"
|
||||||
members = ["crates/*"]
|
members = ["crates/*"]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.3.9"
|
version = "0.3.10"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://github.com/0xMassi/webclaw"
|
repository = "https://github.com/0xMassi/webclaw"
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ impl Default for FetchConfig {
|
||||||
browser: BrowserProfile::Chrome,
|
browser: BrowserProfile::Chrome,
|
||||||
proxy: None,
|
proxy: None,
|
||||||
proxy_pool: Vec::new(),
|
proxy_pool: Vec::new(),
|
||||||
timeout: Duration::from_secs(30),
|
timeout: Duration::from_secs(12),
|
||||||
follow_redirects: true,
|
follow_redirects: true,
|
||||||
max_redirects: 10,
|
max_redirects: 10,
|
||||||
headers: HashMap::from([("Accept-Language".to_string(), "en-US,en;q=0.9".to_string())]),
|
headers: HashMap::from([("Accept-Language".to_string(), "en-US,en;q=0.9".to_string())]),
|
||||||
|
|
@ -207,13 +207,12 @@ impl FetchClient {
|
||||||
/// Fetch a URL and return the raw HTML + response metadata.
|
/// Fetch a URL and return the raw HTML + response metadata.
|
||||||
///
|
///
|
||||||
/// Automatically retries on transient failures (network errors, 5xx, 429)
|
/// Automatically retries on transient failures (network errors, 5xx, 429)
|
||||||
/// with exponential backoff: 0s, 1s, 3s (3 attempts total).
|
/// with exponential backoff: 0s, 1s (2 attempts total).
|
||||||
#[instrument(skip(self), fields(url = %url))]
|
#[instrument(skip(self), fields(url = %url))]
|
||||||
pub async fn fetch(&self, url: &str) -> Result<FetchResult, FetchError> {
|
pub async fn fetch(&self, url: &str) -> Result<FetchResult, FetchError> {
|
||||||
let delays = [
|
let delays = [
|
||||||
Duration::ZERO,
|
Duration::ZERO,
|
||||||
Duration::from_secs(1),
|
Duration::from_secs(1),
|
||||||
Duration::from_secs(3),
|
|
||||||
];
|
];
|
||||||
let mut last_err = None;
|
let mut last_err = None;
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue