diff --git a/_examples/parallel/parallel.go b/_examples/parallel/parallel.go index 261b96c02..290f659ec 100644 --- a/_examples/parallel/parallel.go +++ b/_examples/parallel/parallel.go @@ -12,6 +12,7 @@ func main() { // MaxDepth is 2, so only the links on the scraped page // and links on those pages are visited colly.MaxDepth(2), + colly.Async(true), ) // Limit the maximum parallelism to 5 @@ -28,7 +29,7 @@ func main() { // Print link fmt.Println(link) // Visit link found on page on a new thread - go e.Request.Visit(link) + e.Request.Visit(link) }) // Start scraping on https://en.wikipedia.org diff --git a/_examples/random_delay/random_delay.go b/_examples/random_delay/random_delay.go index 299bdb002..d9f58a250 100644 --- a/_examples/random_delay/random_delay.go +++ b/_examples/random_delay/random_delay.go @@ -15,6 +15,7 @@ func main() { c := colly.NewCollector( // Attach a debugger to the collector colly.Debugger(&debug.LogDebugger{}), + colly.Async(true), ) // Limit the number of threads started by colly to two @@ -27,7 +28,7 @@ func main() { // Start scraping in four threads on https://httpbin.org/delay/2 for i := 0; i < 4; i++ { - go c.Visit(fmt.Sprintf("%s?n=%d", url, i)) + c.Visit(fmt.Sprintf("%s?n=%d", url, i)) } // Start scraping on https://httpbin.org/delay/2 c.Visit(url) diff --git a/_examples/rate_limit/rate_limit.go b/_examples/rate_limit/rate_limit.go index f66957965..e17f4941f 100644 --- a/_examples/rate_limit/rate_limit.go +++ b/_examples/rate_limit/rate_limit.go @@ -12,6 +12,8 @@ func main() { // Instantiate default collector c := colly.NewCollector( + // Turn on asynchronous requests + colly.Async(true), // Attach a debugger to the collector colly.Debugger(&debug.LogDebugger{}), ) @@ -24,12 +26,10 @@ func main() { //Delay: 5 * time.Second, }) - // Start scraping in four threads on https://httpbin.org/delay/2 - for i := 0; i < 4; i++ { - go c.Visit(fmt.Sprintf("%s?n=%d", url, i)) + // Start scraping in five threads on https://httpbin.org/delay/2 + for i := 0; i < 5; i++ { + c.Visit(fmt.Sprintf("%s?n=%d", url, i)) } - // Start scraping on https://httpbin.org/delay/2 - c.Visit(url) // Wait until threads are finished c.Wait() }