feat: implement site-based scraping with path overrides

This commit is contained in:
Arik Jones (aider)
2024-09-19 16:06:55 -05:00
parent 1d38e4157c
commit 569ff9924d
3 changed files with 195 additions and 110 deletions

View File

@@ -38,41 +38,53 @@ func init() {
} }
func runWeb(cmd *cobra.Command, args []string) error { func runWeb(cmd *cobra.Command, args []string) error {
scraperConfig.Verbose = verbose scraperConfig.Verbose = verbose
// Use config if available, otherwise use command-line flags var siteConfigs []scraper.SiteConfig
var urlConfigs []scraper.URLConfig if len(cfg.Scrape.Sites) > 0 {
if len(urls) == 0 && len(cfg.Scrape.URLs) > 0 { siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
urlConfigs = make([]scraper.URLConfig, len(cfg.Scrape.URLs)) for i, site := range cfg.Scrape.Sites {
for i, u := range cfg.Scrape.URLs { siteConfigs[i] = scraper.SiteConfig{
urlConfigs[i] = scraper.URLConfig{ BaseURL: site.BaseURL,
URL: u.URL, CSSLocator: site.CSSLocator,
CSSLocator: u.CSSLocator, ExcludeSelectors: site.ExcludeSelectors,
ExcludeSelectors: u.ExcludeSelectors, MaxDepth: site.MaxDepth,
OutputAlias: u.OutputAlias, AllowedPaths: site.AllowedPaths,
} ExcludePaths: site.ExcludePaths,
} OutputAlias: site.OutputAlias,
} else { PathOverrides: site.PathOverrides,
urlConfigs = make([]scraper.URLConfig, len(urls)) }
for i, u := range urls { }
urlConfigs[i] = scraper.URLConfig{URL: u, CSSLocator: includeSelector} } else {
} // Fallback to URL-based configuration if no sites are defined
} siteConfigs = make([]scraper.SiteConfig, len(urls))
for i, u := range urls {
siteConfigs[i] = scraper.SiteConfig{
BaseURL: u,
CSSLocator: includeSelector,
ExcludeSelectors: excludeSelectors,
}
}
}
if len(urlConfigs) == 0 { if len(siteConfigs) == 0 {
return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.urls' in the rollup.yml file") return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
} }
scraperConfig := scraper.Config{ scraperConfig := scraper.Config{
URLs: urlConfigs, Sites: siteConfigs,
OutputType: outputType, OutputType: cfg.Scrape.OutputType,
Verbose: verbose, Verbose: verbose,
} Scrape: scraper.ScrapeConfig{
RequestsPerSecond: cfg.Scrape.RequestsPerSecond,
BurstLimit: cfg.Scrape.BurstLimit,
},
}
scrapedContent, err := scraper.ScrapeMultipleURLs(scraperConfig) scrapedContent, err := scraper.ScrapeSites(scraperConfig)
if err != nil { if err != nil {
return fmt.Errorf("error scraping content: %v", err) return fmt.Errorf("error scraping content: %v", err)
} }
if outputType == "single" { if outputType == "single" {
return writeSingleFile(scrapedContent) return writeSingleFile(scrapedContent)

View File

@@ -15,17 +15,27 @@ type Config struct {
} }
type ScrapeConfig struct { type ScrapeConfig struct {
URLs []URLConfig `yaml:"urls"` Sites []SiteConfig `yaml:"sites"`
OutputType string `yaml:"output_type"` OutputType string `yaml:"output_type"`
RequestsPerSecond float64 `yaml:"requests_per_second"` RequestsPerSecond float64 `yaml:"requests_per_second"`
BurstLimit int `yaml:"burst_limit"` BurstLimit int `yaml:"burst_limit"`
} }
type URLConfig struct { type SiteConfig struct {
URL string `yaml:"url"` BaseURL string `yaml:"base_url"`
CSSLocator string `yaml:"css_locator"` CSSLocator string `yaml:"css_locator"`
ExcludeSelectors []string `yaml:"exclude_selectors"` ExcludeSelectors []string `yaml:"exclude_selectors"`
OutputAlias string `yaml:"output_alias"` MaxDepth int `yaml:"max_depth"`
AllowedPaths []string `yaml:"allowed_paths"`
ExcludePaths []string `yaml:"exclude_paths"`
OutputAlias string `yaml:"output_alias"`
PathOverrides []PathOverride `yaml:"path_overrides"`
}
type PathOverride struct {
Path string `yaml:"path"`
CSSLocator string `yaml:"css_locator"`
ExcludeSelectors []string `yaml:"exclude_selectors"`
} }
func Load(configPath string) (*Config, error) { func Load(configPath string) (*Config, error) {

View File

@@ -38,85 +38,148 @@ type ScrapeConfig struct {
BurstLimit int BurstLimit int
} }
// ScrapeMultipleURLs scrapes multiple URLs concurrently func ScrapeSites(config Config) (map[string]string, error) {
func ScrapeMultipleURLs(config Config) (map[string]string, error) { results := make(chan struct {
results := make(chan struct { url string
url string content string
content string err error
err error })
}, len(config.URLs))
// Use default values if not specified in the config limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
requestsPerSecond := 0.5 // Default to 1 request every 2 seconds
if config.Scrape.RequestsPerSecond > 0 {
requestsPerSecond = config.Scrape.RequestsPerSecond
}
burstLimit := 1 // Default to 1 var wg sync.WaitGroup
if config.Scrape.BurstLimit > 0 { for _, site := range config.Sites {
burstLimit = config.Scrape.BurstLimit wg.Add(1)
} go func(site SiteConfig) {
defer wg.Done()
scrapeSite(site, config, results, limiter)
}(site)
}
// Create a rate limiter based on the configuration go func() {
limiter := rate.NewLimiter(rate.Limit(requestsPerSecond), burstLimit) wg.Wait()
close(results)
}()
var wg sync.WaitGroup scrapedContent := make(map[string]string)
for _, urlConfig := range config.URLs { for result := range results {
wg.Add(1) if result.err != nil {
go func(cfg URLConfig) { logger.Printf("Error scraping %s: %v\n", result.url, result.err)
defer wg.Done() continue
}
// Wait for rate limiter before making the request scrapedContent[result.url] = result.content
err := limiter.Wait(context.Background()) }
if err != nil {
results <- struct {
url string
content string
err error
}{cfg.URL, "", fmt.Errorf("rate limiter error: %v", err)}
return
}
content, err := scrapeURL(cfg) return scrapedContent, nil
results <- struct {
url string
content string
err error
}{cfg.URL, content, err}
}(urlConfig)
}
go func() {
wg.Wait()
close(results)
}()
scrapedContent := make(map[string]string)
for result := range results {
if result.err != nil {
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
continue
}
scrapedContent[result.url] = result.content
}
return scrapedContent, nil
} }
func scrapeURL(config URLConfig) (string, error) { func scrapeSite(site SiteConfig, config Config, results chan<- struct {
content, err := FetchWebpageContent(config.URL) url string
if err != nil { content string
return "", err err error
} }, limiter *rate.Limiter) {
visited := make(map[string]bool)
queue := []string{site.BaseURL}
if config.CSSLocator != "" { for len(queue) > 0 {
content, err = ExtractContentWithCSS(content, config.CSSLocator, config.ExcludeSelectors) url := queue[0]
if err != nil { queue = queue[1:]
return "", err
}
}
return ProcessHTMLContent(content, Config{}) if visited[url] {
continue
}
visited[url] = true
if !isAllowedURL(url, site) {
continue
}
// Wait for rate limiter before making the request
err := limiter.Wait(context.Background())
if err != nil {
results <- struct {
url string
content string
err error
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
continue
}
cssLocator, excludeSelectors := getOverrides(url, site)
content, err := scrapeURL(url, cssLocator, excludeSelectors)
results <- struct {
url string
content string
err error
}{url, content, err}
if len(visited) < site.MaxDepth {
links, _ := ExtractLinks(url)
for _, link := range links {
if !visited[link] && isAllowedURL(link, site) {
queue = append(queue, link)
}
}
}
}
}
func isAllowedURL(url string, site SiteConfig) bool {
parsedURL, err := url.Parse(url)
if err != nil {
return false
}
baseURL, _ := url.Parse(site.BaseURL)
if parsedURL.Host != baseURL.Host {
return false
}
path := parsedURL.Path
for _, allowedPath := range site.AllowedPaths {
if strings.HasPrefix(path, allowedPath) {
for _, excludePath := range site.ExcludePaths {
if strings.HasPrefix(path, excludePath) {
return false
}
}
return true
}
}
return false
}
func getOverrides(url string, site SiteConfig) (string, []string) {
parsedURL, _ := url.Parse(url)
path := parsedURL.Path
for _, override := range site.PathOverrides {
if strings.HasPrefix(path, override.Path) {
if override.CSSLocator != "" {
return override.CSSLocator, override.ExcludeSelectors
}
return site.CSSLocator, override.ExcludeSelectors
}
}
return site.CSSLocator, site.ExcludeSelectors
}
func scrapeURL(url, cssLocator string, excludeSelectors []string) (string, error) {
content, err := FetchWebpageContent(url)
if err != nil {
return "", err
}
if cssLocator != "" {
content, err = ExtractContentWithCSS(content, cssLocator, excludeSelectors)
if err != nil {
return "", err
}
}
return ProcessHTMLContent(content, Config{})
} }
func getFilenameFromContent(content, url string) string { func getFilenameFromContent(content, url string) string {