From 13c41c13087f9dda54d824cba5b3da2d60e915c7 Mon Sep 17 00:00:00 2001 From: OverHash <46231745+OverHash@users.noreply.github.com> Date: Mon, 26 Feb 2024 23:41:24 +1300 Subject: [PATCH] Reduce concurrent requests Countdown API limits requests, so reducing this value from 6 -> 2 reduces the chance of bad API responses. In the future, we should implement retry, and also not crash the first time we fail to collect a price. Our scraper should be resilient and continue to scrape, reporting the failures at the end. --- src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index 4d13c14..7cb62b0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -12,4 +12,4 @@ pub const CACHE_PATH: &str = "cache.json"; /// The amount of milliseconds to wait between performing iterations on the pages. pub const PAGE_ITERATION_INTERVAL: Duration = Duration::from_millis(500); /// The amount of requests to perform in parallel. -pub const CONCURRENT_REQUESTS: i64 = 6; +pub const CONCURRENT_REQUESTS: i64 = 2;