6 Commits

4 changed files with 216 additions and 222 deletions

View File

@@ -63,19 +63,19 @@ rollup [command] [flags]
Rollup can be configured using a YAML file. By default, it looks for `rollup.yml` in the current directory. You can specify a different configuration file using the `--config` flag. Rollup can be configured using a YAML file. By default, it looks for `rollup.yml` in the current directory. You can specify a different configuration file using the `--config` flag.
Example `rollup.yml`: **Scrape Configuration Parameters:**
- `requests_per_second`: *(float, optional)* The rate at which requests are made per second during web scraping. Default is `1.0`.
- `burst_limit`: *(integer, optional)* The maximum number of requests that can be made in a burst. Default is `5`.
These parameters help control the request rate to avoid overloading the target servers and to comply with their rate limits.
**Example `rollup.yml` with Scrape Configuration:**
```yaml ```yaml
file_types:
- go
- md
ignore:
- node_modules/**
- vendor/**
- .git/**
code_generated:
- **/generated/**
scrape: scrape:
requests_per_second: 1.0
burst_limit: 5
sites: sites:
- base_url: https://example.com - base_url: https://example.com
css_locator: .content css_locator: .content
@@ -94,9 +94,6 @@ scrape:
css_locator: .special-content css_locator: .special-content
exclude_selectors: exclude_selectors:
- .special-ads - .special-ads
output_type: single
requests_per_second: 1.0
burst_limit: 3
``` ```
## Examples ## Examples

View File

@@ -4,9 +4,7 @@ import (
"fmt" "fmt"
"io" "io"
"log" "log"
"net/url"
"os" "os"
"regexp"
"strings" "strings"
"time" "time"
@@ -23,8 +21,6 @@ var (
excludeSelectors []string excludeSelectors []string
) )
var scraperConfig scraper.Config
var webCmd = &cobra.Command{ var webCmd = &cobra.Command{
Use: "web", Use: "web",
Short: "Scrape main content from webpages and convert to Markdown", Short: "Scrape main content from webpages and convert to Markdown",
@@ -40,94 +36,101 @@ func init() {
webCmd.Flags().StringSliceVar(&excludeSelectors, "exclude", []string{}, "CSS selectors to exclude from the extracted content (comma-separated)") webCmd.Flags().StringSliceVar(&excludeSelectors, "exclude", []string{}, "CSS selectors to exclude from the extracted content (comma-separated)")
} }
func validateScrapeConfig(scrapeConfig config.ScrapeConfig) error {
if scrapeConfig.RequestsPerSecond <= 0 {
return fmt.Errorf("requests_per_second must be greater than 0")
}
if scrapeConfig.BurstLimit <= 0 {
return fmt.Errorf("burst_limit must be greater than 0")
}
return nil
}
func runWeb(cmd *cobra.Command, args []string) error { func runWeb(cmd *cobra.Command, args []string) error {
scraper.SetupLogger(verbose) scraper.SetupLogger(verbose)
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags) logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
if !verbose { if !verbose {
logger.SetOutput(io.Discard) logger.SetOutput(io.Discard)
} }
logger.Printf("Starting web scraping process with verbose mode: %v", verbose) logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
scraperConfig.Verbose = verbose
var siteConfigs []scraper.SiteConfig // Prepare site configurations
if len(cfg.Scrape.Sites) > 0 { var siteConfigs []scraper.SiteConfig
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites)) if len(cfg.Scrape.Sites) > 0 {
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites)) // Use configurations from rollup.yml
for i, site := range cfg.Scrape.Sites { logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
siteConfigs[i] = scraper.SiteConfig{ siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
BaseURL: site.BaseURL, for i, site := range cfg.Scrape.Sites {
CSSLocator: site.CSSLocator, siteConfigs[i] = scraper.SiteConfig{
ExcludeSelectors: site.ExcludeSelectors, BaseURL: site.BaseURL,
MaxDepth: site.MaxDepth, CSSLocator: site.CSSLocator,
AllowedPaths: site.AllowedPaths, ExcludeSelectors: site.ExcludeSelectors,
ExcludePaths: site.ExcludePaths, MaxDepth: site.MaxDepth,
OutputAlias: site.OutputAlias, AllowedPaths: site.AllowedPaths,
PathOverrides: convertPathOverrides(site.PathOverrides), ExcludePaths: site.ExcludePaths,
} OutputAlias: site.OutputAlias,
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v", PathOverrides: convertPathOverrides(site.PathOverrides),
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths) }
} logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
} else { i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration") }
siteConfigs = make([]scraper.SiteConfig, len(urls)) } else {
for i, u := range urls { // Use command-line URLs
siteConfigs[i] = scraper.SiteConfig{ if len(urls) == 0 {
BaseURL: u, logger.Println("Error: No URLs provided via --urls flag")
CSSLocator: includeSelector, return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
ExcludeSelectors: excludeSelectors, }
MaxDepth: depth, siteConfigs = make([]scraper.SiteConfig, len(urls))
} for i, u := range urls {
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d", siteConfigs[i] = scraper.SiteConfig{
i+1, u, includeSelector, depth) BaseURL: u,
} CSSLocator: includeSelector,
} ExcludeSelectors: excludeSelectors,
MaxDepth: depth,
AllowedPaths: []string{"/"}, // Allow all paths by default
}
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
i+1, u, includeSelector, depth)
}
}
if len(siteConfigs) == 0 { // Set up scraper configuration
logger.Println("Error: No sites or URLs provided") scraperConfig := scraper.Config{
return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file") Sites: siteConfigs,
} OutputType: outputType,
Verbose: verbose,
Scrape: scraper.ScrapeConfig{
RequestsPerSecond: cfg.Scrape.RequestsPerSecond,
BurstLimit: cfg.Scrape.BurstLimit,
},
}
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
outputType, scraperConfig.Scrape.RequestsPerSecond, scraperConfig.Scrape.BurstLimit)
// Set default values for rate limiting // Validate scrape configuration
defaultRequestsPerSecond := 1.0 err := validateScrapeConfig(cfg.Scrape)
defaultBurstLimit := 3 if err != nil {
logger.Printf("Invalid scrape configuration: %v", err)
return err
}
// Use default values if not set in the configuration // Start scraping using scraper.ScrapeSites
requestsPerSecond := cfg.Scrape.RequestsPerSecond logger.Println("Starting scraping process")
if requestsPerSecond == 0 { scrapedContent, err := scraper.ScrapeSites(scraperConfig)
requestsPerSecond = defaultRequestsPerSecond if err != nil {
} logger.Printf("Error occurred during scraping: %v", err)
burstLimit := cfg.Scrape.BurstLimit return fmt.Errorf("error scraping content: %v", err)
if burstLimit == 0 { }
burstLimit = defaultBurstLimit logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
}
scraperConfig := scraper.Config{ // Write output to files
Sites: siteConfigs, if outputType == "single" {
OutputType: outputType, logger.Println("Writing content to a single file")
Verbose: verbose, return writeSingleFile(scrapedContent)
Scrape: scraper.ScrapeConfig{ } else {
RequestsPerSecond: requestsPerSecond, logger.Println("Writing content to multiple files")
BurstLimit: burstLimit, return writeMultipleFiles(scrapedContent)
}, }
}
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
outputType, requestsPerSecond, burstLimit)
logger.Println("Starting scraping process")
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
if err != nil {
logger.Printf("Error occurred during scraping: %v", err)
return fmt.Errorf("error scraping content: %v", err)
}
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
if outputType == "single" {
logger.Println("Writing content to a single file")
return writeSingleFile(scrapedContent)
} else {
logger.Println("Writing content to multiple files")
return writeMultipleFiles(scrapedContent)
}
} }
func writeSingleFile(content map[string]string) error { func writeSingleFile(content map[string]string) error {
@@ -151,11 +154,7 @@ func writeSingleFile(content map[string]string) error {
func writeMultipleFiles(content map[string]string) error { func writeMultipleFiles(content map[string]string) error {
for url, c := range content { for url, c := range content {
filename, err := getFilenameFromContent(c, url) filename := sanitizeFilename(url) + ".rollup.md"
if err != nil {
return fmt.Errorf("error generating filename for %s: %v", url, err)
}
file, err := os.Create(filename) file, err := os.Create(filename)
if err != nil { if err != nil {
return fmt.Errorf("error creating output file %s: %v", filename, err) return fmt.Errorf("error creating output file %s: %v", filename, err)
@@ -179,106 +178,14 @@ func generateDefaultFilename() string {
return fmt.Sprintf("web-%s.rollup.md", timestamp) return fmt.Sprintf("web-%s.rollup.md", timestamp)
} }
func scrapeRecursively(urlStr string, depth int) (string, error) {
visited := make(map[string]bool)
return scrapeURL(urlStr, depth, visited)
}
func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error) {
if depth < 0 || visited[urlStr] {
return "", nil
}
visited[urlStr] = true
content, err := testExtractAndConvertContent(urlStr)
if err != nil {
return "", err
}
if depth > 0 {
links, err := testExtractLinks(urlStr)
if err != nil {
return content, fmt.Errorf("error extracting links: %v", err)
}
for _, link := range links {
subContent, err := scrapeURL(link, depth-1, visited)
if err != nil {
fmt.Printf("Warning: Error scraping %s: %v\n", link, err)
continue
}
content += "\n\n---\n\n" + subContent
}
}
return content, nil
}
var (
testExtractAndConvertContent = extractAndConvertContent
testExtractLinks = scraper.ExtractLinks
)
func extractAndConvertContent(urlStr string) (string, error) {
content, err := scraper.FetchWebpageContent(urlStr)
if err != nil {
return "", fmt.Errorf("error fetching webpage content: %v", err)
}
if includeSelector != "" {
content, err = scraper.ExtractContentWithCSS(content, includeSelector, excludeSelectors)
if err != nil {
return "", fmt.Errorf("error extracting content with CSS: %v", err)
}
}
markdown, err := scraper.ProcessHTMLContent(content, scraper.Config{})
if err != nil {
return "", fmt.Errorf("error processing HTML content: %v", err)
}
parsedURL, err := url.Parse(urlStr)
if err != nil {
return "", fmt.Errorf("error parsing URL: %v", err)
}
header := fmt.Sprintf("# ::: Content from %s\n\n", parsedURL.String())
return header + markdown + "\n\n", nil
}
func getFilenameFromContent(content, urlStr string) (string, error) {
// Try to extract title from content
titleStart := strings.Index(content, "<title>")
titleEnd := strings.Index(content, "</title>")
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
title := strings.TrimSpace(content[titleStart+7 : titleEnd])
if title != "" {
return sanitizeFilename(title) + ".rollup.md", nil
}
}
// If no title found or title is empty, use the URL
parsedURL, err := url.Parse(urlStr)
if err != nil {
return "", fmt.Errorf("invalid URL: %v", err)
}
if parsedURL.Host == "" {
return "", fmt.Errorf("invalid URL: missing host")
}
filename := parsedURL.Host
if parsedURL.Path != "" && parsedURL.Path != "/" {
filename += strings.TrimSuffix(parsedURL.Path, "/")
}
return sanitizeFilename(filename) + ".rollup.md", nil
}
func sanitizeFilename(name string) string { func sanitizeFilename(name string) string {
// Remove any character that isn't alphanumeric, dash, or underscore // Remove any character that isn't alphanumeric, dash, or underscore
reg := regexp.MustCompile("[^a-zA-Z0-9-_]+") name = strings.Map(func(r rune) rune {
name = reg.ReplaceAllString(name, "_") if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' || r == '_' {
return r
}
return '_'
}, name)
// Trim any leading or trailing underscores // Trim any leading or trailing underscores
name = strings.Trim(name, "_") name = strings.Trim(name, "_")

View File

@@ -30,6 +30,7 @@ type SiteConfig struct {
ExcludePaths []string `yaml:"exclude_paths"` ExcludePaths []string `yaml:"exclude_paths"`
OutputAlias string `yaml:"output_alias"` OutputAlias string `yaml:"output_alias"`
PathOverrides []PathOverride `yaml:"path_overrides"` PathOverrides []PathOverride `yaml:"path_overrides"`
LinksContainerSelector string `yaml:"links_container_selector"`
} }
type PathOverride struct { type PathOverride struct {
@@ -50,6 +51,13 @@ func Load(configPath string) (*Config, error) {
return nil, fmt.Errorf("error parsing config file: %v", err) return nil, fmt.Errorf("error parsing config file: %v", err)
} }
// Set default values if they are zero or missing
if config.Scrape.RequestsPerSecond <= 0 {
config.Scrape.RequestsPerSecond = 1.0
}
if config.Scrape.BurstLimit <= 0 {
config.Scrape.BurstLimit = 5
}
return &config, nil return &config, nil
} }

View File

@@ -50,6 +50,7 @@ type SiteConfig struct {
ExcludePaths []string ExcludePaths []string
OutputAlias string OutputAlias string
PathOverrides []PathOverride PathOverrides []PathOverride
LinksContainerSelector string
} }
// PathOverride holds path-specific overrides // PathOverride holds path-specific overrides
@@ -67,21 +68,33 @@ func ScrapeSites(config Config) (map[string]string, error) {
err error err error
}) })
// Ensure RequestsPerSecond and BurstLimit are valid
if config.Scrape.RequestsPerSecond <= 0 {
config.Scrape.RequestsPerSecond = 1.0
}
if config.Scrape.BurstLimit <= 0 {
config.Scrape.BurstLimit = 5
}
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit) limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit) logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
var wg sync.WaitGroup var wg sync.WaitGroup
totalURLs := 0 totalURLs := 0
var mu sync.Mutex
for _, site := range config.Sites { for _, site := range config.Sites {
logger.Printf("Processing site: %s\n", site.BaseURL) logger.Printf("Processing site: %s\n", site.BaseURL)
wg.Add(1) wg.Add(1)
go func(site SiteConfig) { go func(site SiteConfig) {
defer wg.Done() defer wg.Done()
visited := make(map[string]bool)
for _, path := range site.AllowedPaths { for _, path := range site.AllowedPaths {
fullURL := site.BaseURL + path fullURL := site.BaseURL + path
mu.Lock()
totalURLs++ totalURLs++
mu.Unlock()
logger.Printf("Queueing URL for scraping: %s\n", fullURL) logger.Printf("Queueing URL for scraping: %s\n", fullURL)
scrapeSingleURL(fullURL, site, results, limiter) scrapeSingleURL(fullURL, site, results, limiter, visited, 0)
} }
}(site) }(site)
} }
@@ -112,8 +125,16 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
url string url string
content string content string
err error err error
}, limiter *rate.Limiter, }, limiter *rate.Limiter, visited map[string]bool, currentDepth int) {
) { if site.MaxDepth > 0 && currentDepth > site.MaxDepth {
return
}
if visited[url] {
return
}
visited[url] = true
logger.Printf("Starting to scrape URL: %s\n", url) logger.Printf("Starting to scrape URL: %s\n", url)
// Wait for rate limiter before making the request // Wait for rate limiter before making the request
@@ -128,13 +149,9 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
return return
} }
cssLocator, excludeSelectors := getOverrides(url, site) content, err := FetchWebpageContent(url)
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
content, err := scrapeURL(url, cssLocator, excludeSelectors)
if err != nil { if err != nil {
logger.Printf("Error scraping %s: %v\n", url, err) logger.Printf("Error fetching content for %s: %v\n", url, err)
results <- struct { results <- struct {
url string url string
content string content string
@@ -143,17 +160,60 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
return return
} }
if content == "" { doc, err := goquery.NewDocumentFromReader(strings.NewReader(content))
if err != nil {
logger.Printf("Error parsing HTML for %s: %v\n", url, err)
results <- struct {
url string
content string
err error
}{url, "", fmt.Errorf("error parsing HTML: %v", err)}
return
}
if site.LinksContainerSelector != "" {
logger.Printf("Processing links container for %s\n", url)
linkContainers := doc.Find(site.LinksContainerSelector)
linkContainers.Each(func(i int, container *goquery.Selection) {
container.Find("a[href]").Each(func(j int, link *goquery.Selection) {
href, exists := link.Attr("href")
if exists {
resolvedURL := resolveURL(href, url)
if isAllowedURL(resolvedURL, site) && !visited[resolvedURL] {
go scrapeSingleURL(resolvedURL, site, results, limiter, visited, currentDepth+1)
}
}
})
})
return
}
cssLocator, excludeSelectors := getOverrides(url, site)
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
extractedContent, err := ExtractContentWithCSS(content, cssLocator, excludeSelectors)
if err != nil {
logger.Printf("Error extracting content for %s: %v\n", url, err)
results <- struct {
url string
content string
err error
}{url, "", err}
return
}
if extractedContent == "" {
logger.Printf("Warning: Empty content scraped from %s\n", url) logger.Printf("Warning: Empty content scraped from %s\n", url)
} else { } else {
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(content)) logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(extractedContent))
} }
results <- struct { results <- struct {
url string url string
content string content string
err error err error
}{url, content, nil} }{url, extractedContent, nil}
} }
func scrapeSite(site SiteConfig, results chan<- struct { func scrapeSite(site SiteConfig, results chan<- struct {
@@ -220,18 +280,29 @@ func isAllowedURL(urlStr string, site SiteConfig) bool {
} }
path := parsedURL.Path path := parsedURL.Path
for _, allowedPath := range site.AllowedPaths {
if strings.HasPrefix(path, allowedPath) { // Check if the URL is within allowed paths
for _, excludePath := range site.ExcludePaths { if len(site.AllowedPaths) > 0 {
if strings.HasPrefix(path, excludePath) { allowed := false
return false for _, allowedPath := range site.AllowedPaths {
} if strings.HasPrefix(path, allowedPath) {
allowed = true
break
} }
return true }
if !allowed {
return false
} }
} }
return false // Check if the URL is in excluded paths
for _, excludePath := range site.ExcludePaths {
if strings.HasPrefix(path, excludePath) {
return false
}
}
return true
} }
func getOverrides(urlStr string, site SiteConfig) (string, []string) { func getOverrides(urlStr string, site SiteConfig) (string, []string) {
@@ -592,3 +663,14 @@ func ExtractContentWithCSS(content, includeSelector string, excludeSelectors []s
logger.Printf("Extracted content length: %d\n", len(selectedContent)) logger.Printf("Extracted content length: %d\n", len(selectedContent))
return selectedContent, nil return selectedContent, nil
} }
func resolveURL(href, base string) string {
parsedBase, err := url.Parse(base)
if err != nil {
return href
}
parsedHref, err := url.Parse(href)
if err != nil {
return href
}
return parsedBase.ResolveReference(parsedHref).String()
}