mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 23:13:22 +00:00
Fix logging and other issues from preventing scraping
This commit is contained in:
@@ -15,15 +15,27 @@ type Config struct {
|
||||
}
|
||||
|
||||
type ScrapeConfig struct {
|
||||
URLs []URLConfig `yaml:"urls"`
|
||||
OutputType string `yaml:"output_type"`
|
||||
Sites []SiteConfig `yaml:"sites"`
|
||||
OutputType string `yaml:"output_type"`
|
||||
RequestsPerSecond float64 `yaml:"requests_per_second"`
|
||||
BurstLimit int `yaml:"burst_limit"`
|
||||
}
|
||||
|
||||
type URLConfig struct {
|
||||
URL string `yaml:"url"`
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
OutputAlias string `yaml:"output_alias"`
|
||||
type SiteConfig struct {
|
||||
BaseURL string `yaml:"base_url"`
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
MaxDepth int `yaml:"max_depth"`
|
||||
AllowedPaths []string `yaml:"allowed_paths"`
|
||||
ExcludePaths []string `yaml:"exclude_paths"`
|
||||
OutputAlias string `yaml:"output_alias"`
|
||||
PathOverrides []PathOverride `yaml:"path_overrides"`
|
||||
}
|
||||
|
||||
type PathOverride struct {
|
||||
Path string `yaml:"path"`
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
}
|
||||
|
||||
func Load(configPath string) (*Config, error) {
|
||||
|
||||
120
internal/config/config_test.go
Normal file
120
internal/config/config_test.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLoad(t *testing.T) {
|
||||
// Create a temporary config file
|
||||
content := []byte(`
|
||||
file_types:
|
||||
- go
|
||||
- md
|
||||
ignore:
|
||||
- "*.tmp"
|
||||
- "**/*.log"
|
||||
code_generated:
|
||||
- "generated_*.go"
|
||||
scrape:
|
||||
sites:
|
||||
- base_url: "https://example.com"
|
||||
css_locator: "main"
|
||||
exclude_selectors:
|
||||
- ".ads"
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- "/blog"
|
||||
exclude_paths:
|
||||
- "/admin"
|
||||
output_alias: "example"
|
||||
path_overrides:
|
||||
- path: "/special"
|
||||
css_locator: ".special-content"
|
||||
exclude_selectors:
|
||||
- ".sidebar"
|
||||
output_type: "single"
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 5
|
||||
`)
|
||||
|
||||
tmpfile, err := os.CreateTemp("", "config*.yml")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tmpfile.Name())
|
||||
|
||||
if _, err := tmpfile.Write(content); err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
if err := tmpfile.Close(); err != nil {
|
||||
t.Fatalf("Failed to close temp file: %v", err)
|
||||
}
|
||||
|
||||
// Test loading the config
|
||||
config, err := Load(tmpfile.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("Load() failed: %v", err)
|
||||
}
|
||||
|
||||
// Check if the loaded config matches the expected values
|
||||
expectedConfig := &Config{
|
||||
FileTypes: []string{"go", "md"},
|
||||
Ignore: []string{"*.tmp", "**/*.log"},
|
||||
CodeGenerated: []string{"generated_*.go"},
|
||||
Scrape: ScrapeConfig{
|
||||
Sites: []SiteConfig{
|
||||
{
|
||||
BaseURL: "https://example.com",
|
||||
CSSLocator: "main",
|
||||
ExcludeSelectors: []string{".ads"},
|
||||
MaxDepth: 2,
|
||||
AllowedPaths: []string{"/blog"},
|
||||
ExcludePaths: []string{"/admin"},
|
||||
OutputAlias: "example",
|
||||
PathOverrides: []PathOverride{
|
||||
{
|
||||
Path: "/special",
|
||||
CSSLocator: ".special-content",
|
||||
ExcludeSelectors: []string{".sidebar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
OutputType: "single",
|
||||
RequestsPerSecond: 1.0,
|
||||
BurstLimit: 5,
|
||||
},
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(config, expectedConfig) {
|
||||
t.Errorf("Loaded config does not match expected config.\nGot: %+v\nWant: %+v", config, expectedConfig)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultConfigPath(t *testing.T) {
|
||||
expected := "rollup.yml"
|
||||
result := DefaultConfigPath()
|
||||
if result != expected {
|
||||
t.Errorf("DefaultConfigPath() = %q, want %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileExists(t *testing.T) {
|
||||
// Test with an existing file
|
||||
tmpfile, err := os.CreateTemp("", "testfile")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tmpfile.Name())
|
||||
|
||||
if !FileExists(tmpfile.Name()) {
|
||||
t.Errorf("FileExists(%q) = false, want true", tmpfile.Name())
|
||||
}
|
||||
|
||||
// Test with a non-existing file
|
||||
if FileExists("non_existing_file.txt") {
|
||||
t.Errorf("FileExists(\"non_existing_file.txt\") = true, want false")
|
||||
}
|
||||
}
|
||||
@@ -5,13 +5,18 @@ import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"sync"
|
||||
"context"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/playwright-community/playwright-go"
|
||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
var logger *log.Logger
|
||||
@@ -23,57 +28,240 @@ var (
|
||||
|
||||
// Config holds the scraper configuration
|
||||
type Config struct {
|
||||
URLs []URLConfig
|
||||
Sites []SiteConfig
|
||||
OutputType string
|
||||
Verbose bool
|
||||
Scrape ScrapeConfig
|
||||
}
|
||||
|
||||
// ScrapeMultipleURLs scrapes multiple URLs concurrently
|
||||
func ScrapeMultipleURLs(config Config) (map[string]string, error) {
|
||||
results := make(chan struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, len(config.URLs))
|
||||
|
||||
for _, urlConfig := range config.URLs {
|
||||
go func(cfg URLConfig) {
|
||||
content, err := scrapeURL(cfg)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{cfg.URL, content, err}
|
||||
}(urlConfig)
|
||||
}
|
||||
|
||||
scrapedContent := make(map[string]string)
|
||||
for i := 0; i < len(config.URLs); i++ {
|
||||
result := <-results
|
||||
if result.err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
||||
continue
|
||||
}
|
||||
scrapedContent[result.url] = result.content
|
||||
}
|
||||
|
||||
return scrapedContent, nil
|
||||
// ScrapeConfig holds the scraping-specific configuration
|
||||
type ScrapeConfig struct {
|
||||
RequestsPerSecond float64
|
||||
BurstLimit int
|
||||
}
|
||||
|
||||
func scrapeURL(config URLConfig) (string, error) {
|
||||
content, err := FetchWebpageContent(config.URL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// SiteConfig holds configuration for a single site
|
||||
type SiteConfig struct {
|
||||
BaseURL string
|
||||
CSSLocator string
|
||||
ExcludeSelectors []string
|
||||
MaxDepth int
|
||||
AllowedPaths []string
|
||||
ExcludePaths []string
|
||||
OutputAlias string
|
||||
PathOverrides []PathOverride
|
||||
}
|
||||
|
||||
if config.CSSLocator != "" {
|
||||
content, err = ExtractContentWithCSS(content, config.CSSLocator, config.ExcludeSelectors)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
// PathOverride holds path-specific overrides
|
||||
type PathOverride struct {
|
||||
Path string
|
||||
CSSLocator string
|
||||
ExcludeSelectors []string
|
||||
}
|
||||
|
||||
return ProcessHTMLContent(content, Config{})
|
||||
func ScrapeSites(config Config) (map[string]string, error) {
|
||||
logger.Println("Starting ScrapeSites function - Verbose mode is active")
|
||||
results := make(chan struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
})
|
||||
|
||||
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
|
||||
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
totalURLs := 0
|
||||
for _, site := range config.Sites {
|
||||
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||
wg.Add(1)
|
||||
go func(site SiteConfig) {
|
||||
defer wg.Done()
|
||||
for _, path := range site.AllowedPaths {
|
||||
fullURL := site.BaseURL + path
|
||||
totalURLs++
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, config, results, limiter)
|
||||
}
|
||||
}(site)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(results)
|
||||
logger.Println("All goroutines completed, results channel closed")
|
||||
}()
|
||||
|
||||
scrapedContent := make(map[string]string)
|
||||
for result := range results {
|
||||
if result.err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
||||
continue
|
||||
}
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", result.url, len(result.content))
|
||||
scrapedContent[result.url] = result.content
|
||||
}
|
||||
|
||||
logger.Printf("Total URLs processed: %d\n", totalURLs)
|
||||
logger.Printf("Successfully scraped content from %d URLs\n", len(scrapedContent))
|
||||
|
||||
return scrapedContent, nil
|
||||
}
|
||||
|
||||
func scrapeSingleURL(url string, site SiteConfig, config Config, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
logger.Printf("Rate limiter error for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
return
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
|
||||
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
|
||||
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", err}
|
||||
return
|
||||
}
|
||||
|
||||
if content == "" {
|
||||
logger.Printf("Warning: Empty content scraped from %s\n", url)
|
||||
} else {
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(content))
|
||||
}
|
||||
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, nil}
|
||||
}
|
||||
|
||||
func scrapeSite(site SiteConfig, config Config, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
visited := make(map[string]bool)
|
||||
queue := []string{site.BaseURL}
|
||||
|
||||
for len(queue) > 0 {
|
||||
url := queue[0]
|
||||
queue = queue[1:]
|
||||
|
||||
if visited[url] {
|
||||
continue
|
||||
}
|
||||
visited[url] = true
|
||||
|
||||
if !isAllowedURL(url, site) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
continue
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, err}
|
||||
|
||||
if len(visited) < site.MaxDepth {
|
||||
links, _ := ExtractLinks(url)
|
||||
for _, link := range links {
|
||||
if !visited[link] && isAllowedURL(link, site) {
|
||||
queue = append(queue, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isAllowedURL(urlStr string, site SiteConfig) bool {
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
baseURL, _ := url.Parse(site.BaseURL)
|
||||
if parsedURL.Host != baseURL.Host {
|
||||
return false
|
||||
}
|
||||
|
||||
path := parsedURL.Path
|
||||
for _, allowedPath := range site.AllowedPaths {
|
||||
if strings.HasPrefix(path, allowedPath) {
|
||||
for _, excludePath := range site.ExcludePaths {
|
||||
if strings.HasPrefix(path, excludePath) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func getOverrides(urlStr string, site SiteConfig) (string, []string) {
|
||||
parsedURL, _ := url.Parse(urlStr)
|
||||
path := parsedURL.Path
|
||||
|
||||
for _, override := range site.PathOverrides {
|
||||
if strings.HasPrefix(path, override.Path) {
|
||||
if override.CSSLocator != "" {
|
||||
return override.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
return site.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
}
|
||||
|
||||
return site.CSSLocator, site.ExcludeSelectors
|
||||
}
|
||||
|
||||
func scrapeURL(url, cssLocator string, excludeSelectors []string) (string, error) {
|
||||
content, err := FetchWebpageContent(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if cssLocator != "" {
|
||||
content, err = ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return ProcessHTMLContent(content, Config{})
|
||||
}
|
||||
|
||||
func getFilenameFromContent(content, url string) string {
|
||||
@@ -106,7 +294,7 @@ type URLConfig struct {
|
||||
// SetupLogger initializes the logger based on the verbose flag
|
||||
func SetupLogger(verbose bool) {
|
||||
if verbose {
|
||||
logger = log.New(log.Writer(), "SCRAPER: ", log.LstdFlags)
|
||||
logger = log.New(os.Stdout, "SCRAPER: ", log.LstdFlags)
|
||||
} else {
|
||||
logger = log.New(ioutil.Discard, "", 0)
|
||||
}
|
||||
|
||||
169
internal/scraper/scraper_test.go
Normal file
169
internal/scraper/scraper_test.go
Normal file
@@ -0,0 +1,169 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func TestIsAllowedURL(t *testing.T) {
|
||||
site := SiteConfig{
|
||||
BaseURL: "https://example.com",
|
||||
AllowedPaths: []string{"/blog", "/products"},
|
||||
ExcludePaths: []string{"/admin", "/private"},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
url string
|
||||
expected bool
|
||||
}{
|
||||
{"https://example.com/blog/post1", true},
|
||||
{"https://example.com/products/item1", true},
|
||||
{"https://example.com/admin/dashboard", false},
|
||||
{"https://example.com/private/data", false},
|
||||
{"https://example.com/other/page", false},
|
||||
{"https://othersite.com/blog/post1", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result := isAllowedURL(test.url, site)
|
||||
if result != test.expected {
|
||||
t.Errorf("isAllowedURL(%q) = %v, want %v", test.url, result, test.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetOverrides(t *testing.T) {
|
||||
site := SiteConfig{
|
||||
CSSLocator: "main",
|
||||
ExcludeSelectors: []string{".ads"},
|
||||
PathOverrides: []PathOverride{
|
||||
{
|
||||
Path: "/special",
|
||||
CSSLocator: ".special-content",
|
||||
ExcludeSelectors: []string{".sidebar"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
url string
|
||||
expectedLocator string
|
||||
expectedExcludes []string
|
||||
}{
|
||||
{"https://example.com/normal", "main", []string{".ads"}},
|
||||
{"https://example.com/special", ".special-content", []string{".sidebar"}},
|
||||
{"https://example.com/special/page", ".special-content", []string{".sidebar"}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
locator, excludes := getOverrides(test.url, site)
|
||||
if locator != test.expectedLocator {
|
||||
t.Errorf("getOverrides(%q) locator = %q, want %q", test.url, locator, test.expectedLocator)
|
||||
}
|
||||
if !reflect.DeepEqual(excludes, test.expectedExcludes) {
|
||||
t.Errorf("getOverrides(%q) excludes = %v, want %v", test.url, excludes, test.expectedExcludes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractContentWithCSS(t *testing.T) {
|
||||
html := `
|
||||
<html>
|
||||
<body>
|
||||
<main>
|
||||
<h1>Main Content</h1>
|
||||
<p>This is the main content.</p>
|
||||
<div class="ads">Advertisement</div>
|
||||
</main>
|
||||
<aside>Sidebar content</aside>
|
||||
</body>
|
||||
</html>
|
||||
`
|
||||
|
||||
tests := []struct {
|
||||
includeSelector string
|
||||
excludeSelectors []string
|
||||
expected string
|
||||
}{
|
||||
{"main", nil, "<h1>Main Content</h1>\n<p>This is the main content.</p>\n<div class=\"ads\">Advertisement</div>"},
|
||||
{"main", []string{".ads"}, "<h1>Main Content</h1>\n<p>This is the main content.</p>"},
|
||||
{"aside", nil, "Sidebar content"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result, err := ExtractContentWithCSS(html, test.includeSelector, test.excludeSelectors)
|
||||
if err != nil {
|
||||
t.Errorf("ExtractContentWithCSS() returned error: %v", err)
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(result) != strings.TrimSpace(test.expected) {
|
||||
t.Errorf("ExtractContentWithCSS() = %q, want %q", result, test.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessHTMLContent(t *testing.T) {
|
||||
html := `
|
||||
<html>
|
||||
<body>
|
||||
<h1>Test Heading</h1>
|
||||
<p>This is a <strong>test</strong> paragraph.</p>
|
||||
<ul>
|
||||
<li>Item 1</li>
|
||||
<li>Item 2</li>
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
`
|
||||
|
||||
expected := strings.TrimSpace(`
|
||||
# Test Heading
|
||||
|
||||
This is a **test** paragraph.
|
||||
|
||||
- Item 1
|
||||
- Item 2
|
||||
`)
|
||||
|
||||
result, err := ProcessHTMLContent(html, Config{})
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessHTMLContent() returned error: %v", err)
|
||||
}
|
||||
|
||||
if strings.TrimSpace(result) != expected {
|
||||
t.Errorf("ProcessHTMLContent() = %q, want %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractLinks(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
w.Write([]byte(`
|
||||
<html>
|
||||
<body>
|
||||
<a href="https://example.com/page1">Page 1</a>
|
||||
<a href="https://example.com/page2">Page 2</a>
|
||||
<a href="https://othersite.com">Other Site</a>
|
||||
</body>
|
||||
</html>
|
||||
`))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
links, err := ExtractLinks(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("ExtractLinks() returned error: %v", err)
|
||||
}
|
||||
|
||||
expectedLinks := []string{
|
||||
"https://example.com/page1",
|
||||
"https://example.com/page2",
|
||||
"https://othersite.com",
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(links, expectedLinks) {
|
||||
t.Errorf("ExtractLinks() = %v, want %v", links, expectedLinks)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user