playwright-go
playwright-go copied to clipboard
[Question]: Is playwhright-go thread-safe if multiple instances are created in separate go routines?
I'm using this awesome library for web scraping but when running browsers in parallel I get random crashes.
So this might be because the library is not thread-safe. In the following example I create two different instances of Chrome with playwhright.Run
. Is this save to do?
package main
import (
"sync"
"github.com/crawler/internal/crawl"
)
func main() {
wg := sync.WaitGroup{}
wg.Add(1)
go func() {
defer wg.Done()
// creates a new instances with playwhright.Run
b1, err := crawl.LauchBrowser(crawl.LaunchOptions{
Headless: false,
})
if err != nil {
panic("failed to launch browser" + err.Error())
}
for i := 0; i < 10; i++ {
p, err := crawl.NewPage(b1)
if err != nil {
panic("failed to create new page" + err.Error())
}
_, err = crawl.ScrapeHomePage(p, "https://google.com")
if err != nil {
panic("failed to scrape home page" + err.Error())
}
err = p.Context().Close()
if err != nil {
panic("failed to close context" + err.Error())
}
}
}()
wg.Add(1)
go func() {
b2, err := crawl.LauchBrowser(crawl.LaunchOptions{
Headless: false,
})
if err != nil {
panic("failed to launch browser" + err.Error())
}
defer wg.Done()
for i := 0; i < 10; i++ {
p, err := crawl.NewPage(b2)
if err != nil {
panic("failed to create new page" + err.Error())
}
_, err = crawl.ScrapeHomePage(p, "https://example.com")
if err != nil {
panic("failed to scrape home page" + err.Error())
}
err = p.Context().Close()
if err != nil {
panic("failed to close context" + err.Error())
}
}
}()
wg.Wait()
}