mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 15:03:17 +00:00
Compare commits
1 Commits
link-navig
...
v0.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02e39baf38 |
78
README.md
78
README.md
@@ -15,7 +15,7 @@ Rollup aggregates the contents of text-based files and webpages into a markdown
|
||||
- Flexible configuration file support (YAML)
|
||||
- Automatic generation of default configuration file
|
||||
- Custom output file naming
|
||||
- Concurrent processing for improved performance
|
||||
- Rate limiting for web scraping to respect server resources
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -63,37 +63,39 @@ rollup [command] [flags]
|
||||
|
||||
Rollup can be configured using a YAML file. By default, it looks for `rollup.yml` in the current directory. You can specify a different configuration file using the `--config` flag.
|
||||
|
||||
**Scrape Configuration Parameters:**
|
||||
|
||||
- `requests_per_second`: *(float, optional)* The rate at which requests are made per second during web scraping. Default is `1.0`.
|
||||
- `burst_limit`: *(integer, optional)* The maximum number of requests that can be made in a burst. Default is `5`.
|
||||
|
||||
These parameters help control the request rate to avoid overloading the target servers and to comply with their rate limits.
|
||||
|
||||
**Example `rollup.yml` with Scrape Configuration:**
|
||||
Example `rollup.yml`:
|
||||
|
||||
```yaml
|
||||
scrape:
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 5
|
||||
sites:
|
||||
- base_url: https://example.com
|
||||
css_locator: .content
|
||||
exclude_selectors:
|
||||
- .ads
|
||||
- .navigation
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- /blog
|
||||
- /docs
|
||||
exclude_paths:
|
||||
- /admin
|
||||
output_alias: example
|
||||
path_overrides:
|
||||
- path: /special-page
|
||||
css_locator: .special-content
|
||||
exclude_selectors:
|
||||
- .special-ads
|
||||
file_extensions:
|
||||
- go
|
||||
- md
|
||||
ignore_paths:
|
||||
- node_modules/**
|
||||
- vendor/**
|
||||
- .git/**
|
||||
code_generated_paths:
|
||||
- **/generated/**
|
||||
sites:
|
||||
- base_url: https://example.com
|
||||
css_locator: .content
|
||||
exclude_selectors:
|
||||
- .ads
|
||||
- .navigation
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- /blog
|
||||
- /docs
|
||||
exclude_paths:
|
||||
- /admin
|
||||
output_alias: example
|
||||
path_overrides:
|
||||
- path: /special-page
|
||||
css_locator: .special-content
|
||||
exclude_selectors:
|
||||
- .special-ads
|
||||
output_type: single
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 3
|
||||
```
|
||||
|
||||
## Examples
|
||||
@@ -104,10 +106,10 @@ scrape:
|
||||
rollup files
|
||||
```
|
||||
|
||||
2. Web scraping with multiple URLs and increased concurrency:
|
||||
2. Web scraping with multiple URLs:
|
||||
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com --concurrent=8
|
||||
rollup web --urls=https://example.com,https://another-example.com
|
||||
```
|
||||
|
||||
3. Generate a default configuration file:
|
||||
@@ -116,20 +118,22 @@ scrape:
|
||||
rollup generate
|
||||
```
|
||||
|
||||
4. Use a custom configuration file and specify output:
|
||||
4. Use a custom configuration file:
|
||||
|
||||
```bash
|
||||
rollup files --config=my-config.yml --output=project_summary.md
|
||||
rollup files --config=my-config.yml
|
||||
```
|
||||
|
||||
5. Web scraping with separate output files and custom timeout:
|
||||
5. Web scraping with separate output files:
|
||||
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate --timeout=60
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate
|
||||
```
|
||||
|
||||
6. Rollup files with specific types and ignore patterns:
|
||||
|
||||
```bash
|
||||
rollup files --types=.go,.md --ignore=vendor/**,*_test.go
|
||||
rollup files --types=go,md --ignore=vendor/**,*_test.go
|
||||
```
|
||||
|
||||
7. Web scraping with depth and CSS selector:
|
||||
|
||||
64
cmd/files.go
64
cmd/files.go
@@ -108,19 +108,20 @@ func isIgnored(filePath string, patterns []string) bool {
|
||||
|
||||
func runRollup(cfg *config.Config) error {
|
||||
// Use config if available, otherwise use command-line flags
|
||||
var types, codeGenList, ignoreList []string
|
||||
if cfg != nil && len(cfg.FileTypes) > 0 {
|
||||
types = cfg.FileTypes
|
||||
var types []string
|
||||
var codeGenList, ignoreList []string
|
||||
if cfg != nil && len(cfg.FileExtensions) > 0 {
|
||||
types = cfg.FileExtensions
|
||||
} else {
|
||||
types = strings.Split(fileTypes, ",")
|
||||
}
|
||||
if cfg != nil && len(cfg.CodeGenerated) > 0 {
|
||||
codeGenList = cfg.CodeGenerated
|
||||
if cfg != nil && len(cfg.CodeGeneratedPaths) > 0 {
|
||||
codeGenList = cfg.CodeGeneratedPaths
|
||||
} else {
|
||||
codeGenList = strings.Split(codeGenPatterns, ",")
|
||||
}
|
||||
if cfg != nil && cfg.Ignore != nil && len(cfg.Ignore) > 0 {
|
||||
ignoreList = cfg.Ignore
|
||||
if cfg != nil && len(cfg.IgnorePaths) > 0 {
|
||||
ignoreList = cfg.IgnorePaths
|
||||
} else {
|
||||
ignoreList = strings.Split(ignorePatterns, ",")
|
||||
}
|
||||
@@ -145,6 +146,11 @@ func runRollup(cfg *config.Config) error {
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
startTime := time.Now()
|
||||
showProgress := false
|
||||
progressTicker := time.NewTicker(500 * time.Millisecond)
|
||||
defer progressTicker.Stop()
|
||||
|
||||
// Walk through the directory
|
||||
err = filepath.Walk(absPath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
@@ -160,16 +166,25 @@ func runRollup(cfg *config.Config) error {
|
||||
|
||||
// Check if the file should be ignored
|
||||
if isIgnored(relPath, ignoreList) {
|
||||
if verbose {
|
||||
fmt.Printf("Ignoring file: %s\n", relPath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
ext := filepath.Ext(path)
|
||||
for _, t := range types {
|
||||
if ext == "."+t {
|
||||
// Verbose logging for processed file
|
||||
if verbose {
|
||||
size := humanReadableSize(info.Size())
|
||||
fmt.Printf("Processing file: %s (%s)\n", relPath, size)
|
||||
}
|
||||
|
||||
// Read file contents
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
fmt.Printf("Error reading file %s: %v", path, err)
|
||||
fmt.Printf("Error reading file %s: %v\n", path, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -185,12 +200,43 @@ func runRollup(cfg *config.Config) error {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !showProgress && time.Since(startTime) > 5*time.Second {
|
||||
showProgress = true
|
||||
fmt.Print("This is taking a while (hold tight) ")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-progressTicker.C:
|
||||
if showProgress {
|
||||
fmt.Print(".")
|
||||
}
|
||||
default:
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error walking through directory: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Rollup complete. Output file: %s", outputFileName)
|
||||
if showProgress {
|
||||
fmt.Println() // Print a newline after the progress dots
|
||||
}
|
||||
|
||||
fmt.Printf("Rollup complete. Output file: %s\n", outputFileName)
|
||||
return nil
|
||||
}
|
||||
|
||||
func humanReadableSize(size int64) string {
|
||||
const unit = 1024
|
||||
if size < unit {
|
||||
return fmt.Sprintf("%d B", size)
|
||||
}
|
||||
div, exp := int64(unit), 0
|
||||
for n := size / unit; n >= unit; n /= unit {
|
||||
div *= unit
|
||||
exp++
|
||||
}
|
||||
return fmt.Sprintf("%.1f %cB", float64(size)/float64(div), "KMGTPE"[exp])
|
||||
}
|
||||
|
||||
@@ -112,9 +112,9 @@ func TestRunRollup(t *testing.T) {
|
||||
|
||||
// Set up test configuration
|
||||
cfg = &config.Config{
|
||||
FileTypes: []string{"go", "txt", "md"},
|
||||
Ignore: []string{"*.json", ".git/**", "vendor/**"},
|
||||
CodeGenerated: []string{"generated_*.go"},
|
||||
FileExtensions: []string{"go", "txt", "md"},
|
||||
IgnorePaths: []string{"*.json", ".git/**", "vendor/**"},
|
||||
CodeGeneratedPaths: []string{"generated_*.go"},
|
||||
}
|
||||
|
||||
// Change working directory to the temp directory
|
||||
|
||||
@@ -38,23 +38,23 @@ func runGenerate(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
cfg := config.Config{
|
||||
FileTypes: make([]string, 0, len(fileTypes)),
|
||||
Ignore: []string{"node_modules/**", "vendor/**", ".git/**"},
|
||||
FileExtensions: make([]string, 0, len(fileTypes)),
|
||||
IgnorePaths: []string{"node_modules/**", "vendor/**", ".git/**"},
|
||||
}
|
||||
|
||||
for ext := range fileTypes {
|
||||
cfg.FileTypes = append(cfg.FileTypes, ext)
|
||||
cfg.FileExtensions = append(cfg.FileExtensions, ext)
|
||||
}
|
||||
|
||||
// Sort file types for consistency
|
||||
sort.Strings(cfg.FileTypes)
|
||||
sort.Strings(cfg.FileExtensions)
|
||||
|
||||
yamlData, err := yaml.Marshal(&cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshaling config: %v", err)
|
||||
}
|
||||
|
||||
outputPath := config.DefaultConfigPath()
|
||||
outputPath := "rollup.yml"
|
||||
err = os.WriteFile(outputPath, yamlData, 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing config file: %v", err)
|
||||
|
||||
282
cmd/web.go
282
cmd/web.go
@@ -4,7 +4,9 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -16,11 +18,12 @@ import (
|
||||
var (
|
||||
urls []string
|
||||
outputType string
|
||||
depth int
|
||||
includeSelector string
|
||||
excludeSelectors []string
|
||||
)
|
||||
|
||||
var scraperConfig scraper.Config
|
||||
|
||||
var webCmd = &cobra.Command{
|
||||
Use: "web",
|
||||
Short: "Scrape main content from webpages and convert to Markdown",
|
||||
@@ -31,106 +34,121 @@ var webCmd = &cobra.Command{
|
||||
func init() {
|
||||
webCmd.Flags().StringSliceVarP(&urls, "urls", "u", []string{}, "URLs of the webpages to scrape (comma-separated)")
|
||||
webCmd.Flags().StringVarP(&outputType, "output", "o", "single", "Output type: 'single' for one file, 'separate' for multiple files")
|
||||
webCmd.Flags().IntVarP(&depth, "depth", "d", 0, "Depth of link traversal (default: 0, only scrape the given URLs)")
|
||||
webCmd.Flags().StringVar(&includeSelector, "css", "", "CSS selector to extract specific content")
|
||||
webCmd.Flags().StringSliceVar(&excludeSelectors, "exclude", []string{}, "CSS selectors to exclude from the extracted content (comma-separated)")
|
||||
}
|
||||
|
||||
func validateScrapeConfig(scrapeConfig config.ScrapeConfig) error {
|
||||
if scrapeConfig.RequestsPerSecond <= 0 {
|
||||
return fmt.Errorf("requests_per_second must be greater than 0")
|
||||
}
|
||||
if scrapeConfig.BurstLimit <= 0 {
|
||||
return fmt.Errorf("burst_limit must be greater than 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func runWeb(cmd *cobra.Command, args []string) error {
|
||||
scraper.SetupLogger(verbose)
|
||||
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||
if !verbose {
|
||||
logger.SetOutput(io.Discard)
|
||||
}
|
||||
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||
scraper.SetupLogger(verbose)
|
||||
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||
if !verbose {
|
||||
logger.SetOutput(io.Discard)
|
||||
}
|
||||
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||
scraperConfig.Verbose = verbose
|
||||
|
||||
// Prepare site configurations
|
||||
var siteConfigs []scraper.SiteConfig
|
||||
if len(cfg.Scrape.Sites) > 0 {
|
||||
// Use configurations from rollup.yml
|
||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
||||
for i, site := range cfg.Scrape.Sites {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
BaseURL: site.BaseURL,
|
||||
CSSLocator: site.CSSLocator,
|
||||
ExcludeSelectors: site.ExcludeSelectors,
|
||||
MaxDepth: site.MaxDepth,
|
||||
AllowedPaths: site.AllowedPaths,
|
||||
ExcludePaths: site.ExcludePaths,
|
||||
OutputAlias: site.OutputAlias,
|
||||
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||
}
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||
}
|
||||
} else {
|
||||
// Use command-line URLs
|
||||
if len(urls) == 0 {
|
||||
logger.Println("Error: No URLs provided via --urls flag")
|
||||
return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||
}
|
||||
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
||||
for i, u := range urls {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
BaseURL: u,
|
||||
CSSLocator: includeSelector,
|
||||
ExcludeSelectors: excludeSelectors,
|
||||
MaxDepth: depth,
|
||||
AllowedPaths: []string{"/"}, // Allow all paths by default
|
||||
}
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||
i+1, u, includeSelector, depth)
|
||||
}
|
||||
}
|
||||
var siteConfigs []scraper.SiteConfig
|
||||
if len(cfg.Sites) > 0 {
|
||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Sites))
|
||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Sites))
|
||||
for i, site := range cfg.Sites {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
BaseURL: site.BaseURL,
|
||||
CSSLocator: site.CSSLocator,
|
||||
ExcludeSelectors: site.ExcludeSelectors,
|
||||
AllowedPaths: site.AllowedPaths,
|
||||
ExcludePaths: site.ExcludePaths,
|
||||
OutputAlias: site.OutputAlias,
|
||||
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||
}
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.AllowedPaths)
|
||||
}
|
||||
} else {
|
||||
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration")
|
||||
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
||||
for i, u := range urls {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
BaseURL: u,
|
||||
CSSLocator: includeSelector,
|
||||
ExcludeSelectors: excludeSelectors,
|
||||
}
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s",
|
||||
i+1, u, includeSelector)
|
||||
}
|
||||
}
|
||||
|
||||
// Set up scraper configuration
|
||||
scraperConfig := scraper.Config{
|
||||
Sites: siteConfigs,
|
||||
OutputType: outputType,
|
||||
Verbose: verbose,
|
||||
Scrape: scraper.ScrapeConfig{
|
||||
RequestsPerSecond: cfg.Scrape.RequestsPerSecond,
|
||||
BurstLimit: cfg.Scrape.BurstLimit,
|
||||
},
|
||||
}
|
||||
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||
outputType, scraperConfig.Scrape.RequestsPerSecond, scraperConfig.Scrape.BurstLimit)
|
||||
if len(siteConfigs) == 0 {
|
||||
logger.Println("Error: No sites or URLs provided")
|
||||
return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||
}
|
||||
|
||||
// Validate scrape configuration
|
||||
err := validateScrapeConfig(cfg.Scrape)
|
||||
if err != nil {
|
||||
logger.Printf("Invalid scrape configuration: %v", err)
|
||||
return err
|
||||
}
|
||||
// Set default values for rate limiting
|
||||
defaultRequestsPerSecond := 1.0
|
||||
defaultBurstLimit := 3
|
||||
|
||||
// Start scraping using scraper.ScrapeSites
|
||||
logger.Println("Starting scraping process")
|
||||
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||
if err != nil {
|
||||
logger.Printf("Error occurred during scraping: %v", err)
|
||||
return fmt.Errorf("error scraping content: %v", err)
|
||||
}
|
||||
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
||||
// Use default values if not set in the configuration
|
||||
requestsPerSecond := defaultRequestsPerSecond
|
||||
if cfg.RequestsPerSecond != nil {
|
||||
requestsPerSecond = *cfg.RequestsPerSecond
|
||||
}
|
||||
burstLimit := defaultBurstLimit
|
||||
if cfg.BurstLimit != nil {
|
||||
burstLimit = *cfg.BurstLimit
|
||||
}
|
||||
|
||||
// Write output to files
|
||||
if outputType == "single" {
|
||||
logger.Println("Writing content to a single file")
|
||||
return writeSingleFile(scrapedContent)
|
||||
} else {
|
||||
logger.Println("Writing content to multiple files")
|
||||
return writeMultipleFiles(scrapedContent)
|
||||
}
|
||||
scraperConfig := scraper.Config{
|
||||
Sites: siteConfigs,
|
||||
OutputType: outputType,
|
||||
Verbose: verbose,
|
||||
Scrape: scraper.ScrapeConfig{
|
||||
RequestsPerSecond: requestsPerSecond,
|
||||
BurstLimit: burstLimit,
|
||||
},
|
||||
}
|
||||
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||
outputType, requestsPerSecond, burstLimit)
|
||||
|
||||
logger.Println("Starting scraping process")
|
||||
startTime := time.Now()
|
||||
progressTicker := time.NewTicker(time.Second)
|
||||
defer progressTicker.Stop()
|
||||
|
||||
done := make(chan bool)
|
||||
messagePrinted := false
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-progressTicker.C:
|
||||
if time.Since(startTime) > 5*time.Second && !messagePrinted {
|
||||
fmt.Print("This is taking a while (hold tight) ")
|
||||
messagePrinted = true
|
||||
} else if messagePrinted {
|
||||
fmt.Print(".")
|
||||
}
|
||||
case <-done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||
done <- true
|
||||
fmt.Println() // New line after progress indicator
|
||||
|
||||
if err != nil {
|
||||
logger.Printf("Error occurred during scraping: %v", err)
|
||||
return fmt.Errorf("error scraping content: %v", err)
|
||||
}
|
||||
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
||||
|
||||
if outputType == "single" {
|
||||
logger.Println("Writing content to a single file")
|
||||
return writeSingleFile(scrapedContent)
|
||||
} else {
|
||||
logger.Println("Writing content to multiple files")
|
||||
return writeMultipleFiles(scrapedContent)
|
||||
}
|
||||
}
|
||||
|
||||
func writeSingleFile(content map[string]string) error {
|
||||
@@ -154,7 +172,11 @@ func writeSingleFile(content map[string]string) error {
|
||||
|
||||
func writeMultipleFiles(content map[string]string) error {
|
||||
for url, c := range content {
|
||||
filename := sanitizeFilename(url) + ".rollup.md"
|
||||
filename, err := getFilenameFromContent(c, url)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error generating filename for %s: %v", url, err)
|
||||
}
|
||||
|
||||
file, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
||||
@@ -178,14 +200,78 @@ func generateDefaultFilename() string {
|
||||
return fmt.Sprintf("web-%s.rollup.md", timestamp)
|
||||
}
|
||||
|
||||
func scrapeURL(urlStr string) (string, error) {
|
||||
content, err := testExtractAndConvertContent(urlStr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
var (
|
||||
testExtractAndConvertContent = extractAndConvertContent
|
||||
)
|
||||
|
||||
func extractAndConvertContent(urlStr string) (string, error) {
|
||||
content, err := scraper.FetchWebpageContent(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error fetching webpage content: %v", err)
|
||||
}
|
||||
|
||||
if includeSelector != "" {
|
||||
content, err = scraper.ExtractContentWithCSS(content, includeSelector, excludeSelectors)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error extracting content with CSS: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
markdown, err := scraper.ProcessHTMLContent(content, scraper.Config{})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error processing HTML content: %v", err)
|
||||
}
|
||||
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing URL: %v", err)
|
||||
}
|
||||
header := fmt.Sprintf("# ::: Content from %s\n\n", parsedURL.String())
|
||||
|
||||
return header + markdown + "\n\n", nil
|
||||
}
|
||||
|
||||
func getFilenameFromContent(content, urlStr string) (string, error) {
|
||||
// Try to extract title from content
|
||||
titleStart := strings.Index(content, "<title>")
|
||||
titleEnd := strings.Index(content, "</title>")
|
||||
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
|
||||
title := strings.TrimSpace(content[titleStart+7 : titleEnd])
|
||||
if title != "" {
|
||||
return sanitizeFilename(title) + ".rollup.md", nil
|
||||
}
|
||||
}
|
||||
|
||||
// If no title found or title is empty, use the URL
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid URL: %v", err)
|
||||
}
|
||||
|
||||
if parsedURL.Host == "" {
|
||||
return "", fmt.Errorf("invalid URL: missing host")
|
||||
}
|
||||
|
||||
filename := parsedURL.Host
|
||||
if parsedURL.Path != "" && parsedURL.Path != "/" {
|
||||
filename += strings.TrimSuffix(parsedURL.Path, "/")
|
||||
}
|
||||
return sanitizeFilename(filename) + ".rollup.md", nil
|
||||
}
|
||||
|
||||
func sanitizeFilename(name string) string {
|
||||
// Remove any character that isn't alphanumeric, dash, or underscore
|
||||
name = strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' || r == '_' {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, name)
|
||||
reg := regexp.MustCompile("[^a-zA-Z0-9-_]+")
|
||||
name = reg.ReplaceAllString(name, "_")
|
||||
|
||||
// Trim any leading or trailing underscores
|
||||
name = strings.Trim(name, "_")
|
||||
|
||||
@@ -7,36 +7,64 @@ import (
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Config represents the configuration for the rollup tool
|
||||
type Config struct {
|
||||
FileTypes []string `yaml:"file_types"`
|
||||
Ignore []string `yaml:"ignore"`
|
||||
CodeGenerated []string `yaml:"code_generated"`
|
||||
Scrape ScrapeConfig `yaml:"scrape"`
|
||||
}
|
||||
|
||||
type ScrapeConfig struct {
|
||||
Sites []SiteConfig `yaml:"sites"`
|
||||
OutputType string `yaml:"output_type"`
|
||||
RequestsPerSecond float64 `yaml:"requests_per_second"`
|
||||
BurstLimit int `yaml:"burst_limit"`
|
||||
// FileExtensions is a list of file extensions to include in the rollup
|
||||
FileExtensions []string `yaml:"file_extensions"`
|
||||
|
||||
// IgnorePaths is a list of glob patterns for paths to ignore
|
||||
IgnorePaths []string `yaml:"ignore_paths"`
|
||||
|
||||
// CodeGeneratedPaths is a list of glob patterns for code-generated files
|
||||
CodeGeneratedPaths []string `yaml:"code_generated_paths"`
|
||||
|
||||
// Sites is a list of site configurations for web scraping
|
||||
Sites []SiteConfig `yaml:"sites"`
|
||||
|
||||
// OutputType specifies how the output should be generated
|
||||
OutputType string `yaml:"output_type"`
|
||||
|
||||
// RequestsPerSecond limits the rate of web requests
|
||||
RequestsPerSecond *float64 `yaml:"requests_per_second,omitempty"`
|
||||
|
||||
// BurstLimit sets the maximum burst size for rate limiting
|
||||
BurstLimit *int `yaml:"burst_limit,omitempty"`
|
||||
}
|
||||
|
||||
// SiteConfig contains configuration for scraping a single site
|
||||
type SiteConfig struct {
|
||||
BaseURL string `yaml:"base_url"`
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
MaxDepth int `yaml:"max_depth"`
|
||||
AllowedPaths []string `yaml:"allowed_paths"`
|
||||
ExcludePaths []string `yaml:"exclude_paths"`
|
||||
OutputAlias string `yaml:"output_alias"`
|
||||
PathOverrides []PathOverride `yaml:"path_overrides"`
|
||||
LinksContainerSelector string `yaml:"links_container_selector"`
|
||||
// BaseURL is the starting point for scraping this site
|
||||
BaseURL string `yaml:"base_url"`
|
||||
|
||||
// CSSLocator is used to extract specific content
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
|
||||
// ExcludeSelectors lists CSS selectors for content to exclude
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
|
||||
// AllowedPaths lists paths that are allowed to be scraped
|
||||
AllowedPaths []string `yaml:"allowed_paths"`
|
||||
|
||||
// ExcludePaths lists paths that should not be scraped
|
||||
ExcludePaths []string `yaml:"exclude_paths"`
|
||||
|
||||
// OutputAlias provides an alternative name for output files
|
||||
OutputAlias string `yaml:"output_alias"`
|
||||
|
||||
// PathOverrides allows for path-specific configurations
|
||||
PathOverrides []PathOverride `yaml:"path_overrides"`
|
||||
}
|
||||
|
||||
// PathOverride allows for path-specific configurations
|
||||
type PathOverride struct {
|
||||
Path string `yaml:"path"`
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
// Path is the URL path this override applies to
|
||||
Path string `yaml:"path"`
|
||||
|
||||
// CSSLocator overrides the site-wide CSS locator for this path
|
||||
CSSLocator string `yaml:"css_locator"`
|
||||
|
||||
// ExcludeSelectors overrides the site-wide exclude selectors for this path
|
||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||
}
|
||||
|
||||
func Load(configPath string) (*Config, error) {
|
||||
@@ -51,22 +79,28 @@ func Load(configPath string) (*Config, error) {
|
||||
return nil, fmt.Errorf("error parsing config file: %v", err)
|
||||
}
|
||||
|
||||
// Set default values if they are zero or missing
|
||||
if config.Scrape.RequestsPerSecond <= 0 {
|
||||
config.Scrape.RequestsPerSecond = 1.0
|
||||
}
|
||||
if config.Scrape.BurstLimit <= 0 {
|
||||
config.Scrape.BurstLimit = 5
|
||||
if err := config.Validate(); err != nil {
|
||||
return nil, fmt.Errorf("invalid configuration: %v", err)
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func DefaultConfigPath() string {
|
||||
return "rollup.yml"
|
||||
}
|
||||
// Validate checks the configuration for any invalid values
|
||||
func (c *Config) Validate() error {
|
||||
if c.RequestsPerSecond != nil && *c.RequestsPerSecond <= 0 {
|
||||
return fmt.Errorf("requests_per_second must be positive")
|
||||
}
|
||||
|
||||
func FileExists(filename string) bool {
|
||||
_, err := os.Stat(filename)
|
||||
return err == nil
|
||||
}
|
||||
if c.BurstLimit != nil && *c.BurstLimit <= 0 {
|
||||
return fmt.Errorf("burst_limit must be positive")
|
||||
}
|
||||
|
||||
for _, site := range c.Sites {
|
||||
if site.BaseURL == "" {
|
||||
return fmt.Errorf("base_url must be specified for each site")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -9,34 +9,33 @@ import (
|
||||
func TestLoad(t *testing.T) {
|
||||
// Create a temporary config file
|
||||
content := []byte(`
|
||||
file_types:
|
||||
- go
|
||||
- md
|
||||
ignore:
|
||||
file_extensions:
|
||||
- .go
|
||||
- .md
|
||||
ignore_paths:
|
||||
- "*.tmp"
|
||||
- "**/*.log"
|
||||
code_generated:
|
||||
code_generated_paths:
|
||||
- "generated_*.go"
|
||||
scrape:
|
||||
sites:
|
||||
- base_url: "https://example.com"
|
||||
css_locator: "main"
|
||||
exclude_selectors:
|
||||
- ".ads"
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- "/blog"
|
||||
exclude_paths:
|
||||
- "/admin"
|
||||
output_alias: "example"
|
||||
path_overrides:
|
||||
- path: "/special"
|
||||
css_locator: ".special-content"
|
||||
exclude_selectors:
|
||||
- ".sidebar"
|
||||
output_type: "single"
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 5
|
||||
sites:
|
||||
- base_url: "https://example.com"
|
||||
css_locator: "main"
|
||||
exclude_selectors:
|
||||
- ".ads"
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- "/blog"
|
||||
exclude_paths:
|
||||
- "/admin"
|
||||
output_alias: "example"
|
||||
path_overrides:
|
||||
- path: "/special"
|
||||
css_locator: ".special-content"
|
||||
exclude_selectors:
|
||||
- ".sidebar"
|
||||
output_type: "single"
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 5
|
||||
`)
|
||||
|
||||
tmpfile, err := os.CreateTemp("", "config*.yml")
|
||||
@@ -59,33 +58,33 @@ scrape:
|
||||
}
|
||||
|
||||
// Check if the loaded config matches the expected values
|
||||
rps := 1.0
|
||||
bl := 5
|
||||
expectedConfig := &Config{
|
||||
FileTypes: []string{"go", "md"},
|
||||
Ignore: []string{"*.tmp", "**/*.log"},
|
||||
CodeGenerated: []string{"generated_*.go"},
|
||||
Scrape: ScrapeConfig{
|
||||
Sites: []SiteConfig{
|
||||
{
|
||||
BaseURL: "https://example.com",
|
||||
CSSLocator: "main",
|
||||
ExcludeSelectors: []string{".ads"},
|
||||
MaxDepth: 2,
|
||||
AllowedPaths: []string{"/blog"},
|
||||
ExcludePaths: []string{"/admin"},
|
||||
OutputAlias: "example",
|
||||
PathOverrides: []PathOverride{
|
||||
{
|
||||
Path: "/special",
|
||||
CSSLocator: ".special-content",
|
||||
ExcludeSelectors: []string{".sidebar"},
|
||||
},
|
||||
FileExtensions: []string{".go", ".md"},
|
||||
IgnorePaths: []string{"*.tmp", "**/*.log"},
|
||||
CodeGeneratedPaths: []string{"generated_*.go"},
|
||||
Sites: []SiteConfig{
|
||||
{
|
||||
BaseURL: "https://example.com",
|
||||
CSSLocator: "main",
|
||||
ExcludeSelectors: []string{".ads"},
|
||||
MaxDepth: 2,
|
||||
AllowedPaths: []string{"/blog"},
|
||||
ExcludePaths: []string{"/admin"},
|
||||
OutputAlias: "example",
|
||||
PathOverrides: []PathOverride{
|
||||
{
|
||||
Path: "/special",
|
||||
CSSLocator: ".special-content",
|
||||
ExcludeSelectors: []string{".sidebar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
OutputType: "single",
|
||||
RequestsPerSecond: 1.0,
|
||||
BurstLimit: 5,
|
||||
},
|
||||
OutputType: "single",
|
||||
RequestsPerSecond: &rps,
|
||||
BurstLimit: &bl,
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(config, expectedConfig) {
|
||||
@@ -93,28 +92,67 @@ scrape:
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultConfigPath(t *testing.T) {
|
||||
expected := "rollup.yml"
|
||||
result := DefaultConfigPath()
|
||||
if result != expected {
|
||||
t.Errorf("DefaultConfigPath() = %q, want %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileExists(t *testing.T) {
|
||||
// Test with an existing file
|
||||
tmpfile, err := os.CreateTemp("", "testfile")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tmpfile.Name())
|
||||
|
||||
if !FileExists(tmpfile.Name()) {
|
||||
t.Errorf("FileExists(%q) = false, want true", tmpfile.Name())
|
||||
}
|
||||
|
||||
// Test with a non-existing file
|
||||
if FileExists("non_existing_file.txt") {
|
||||
t.Errorf("FileExists(\"non_existing_file.txt\") = true, want false")
|
||||
func TestValidate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
config Config
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Valid config",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
Sites: []SiteConfig{
|
||||
{BaseURL: "https://example.com", MaxDepth: 2},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "No file extensions",
|
||||
config: Config{},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid requests per second",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
RequestsPerSecond: func() *float64 { f := -1.0; return &f }(),
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid burst limit",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
BurstLimit: func() *int { i := -1; return &i }(),
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Site without base URL",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
Sites: []SiteConfig{{}},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Negative max depth",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
Sites: []SiteConfig{{BaseURL: "https://example.com", MaxDepth: -1}},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.config.Validate()
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,12 +45,10 @@ type SiteConfig struct {
|
||||
BaseURL string
|
||||
CSSLocator string
|
||||
ExcludeSelectors []string
|
||||
MaxDepth int
|
||||
AllowedPaths []string
|
||||
ExcludePaths []string
|
||||
OutputAlias string
|
||||
PathOverrides []PathOverride
|
||||
LinksContainerSelector string
|
||||
}
|
||||
|
||||
// PathOverride holds path-specific overrides
|
||||
@@ -68,33 +66,21 @@ func ScrapeSites(config Config) (map[string]string, error) {
|
||||
err error
|
||||
})
|
||||
|
||||
// Ensure RequestsPerSecond and BurstLimit are valid
|
||||
if config.Scrape.RequestsPerSecond <= 0 {
|
||||
config.Scrape.RequestsPerSecond = 1.0
|
||||
}
|
||||
if config.Scrape.BurstLimit <= 0 {
|
||||
config.Scrape.BurstLimit = 5
|
||||
}
|
||||
|
||||
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
|
||||
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
totalURLs := 0
|
||||
var mu sync.Mutex
|
||||
for _, site := range config.Sites {
|
||||
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||
wg.Add(1)
|
||||
go func(site SiteConfig) {
|
||||
defer wg.Done()
|
||||
visited := make(map[string]bool)
|
||||
for _, path := range site.AllowedPaths {
|
||||
fullURL := site.BaseURL + path
|
||||
mu.Lock()
|
||||
totalURLs++
|
||||
mu.Unlock()
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, results, limiter, visited, 0)
|
||||
scrapeSingleURL(fullURL, site, results, limiter)
|
||||
}
|
||||
}(site)
|
||||
}
|
||||
@@ -125,16 +111,8 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter, visited map[string]bool, currentDepth int) {
|
||||
if site.MaxDepth > 0 && currentDepth > site.MaxDepth {
|
||||
return
|
||||
}
|
||||
|
||||
if visited[url] {
|
||||
return
|
||||
}
|
||||
visited[url] = true
|
||||
|
||||
}, limiter *rate.Limiter,
|
||||
) {
|
||||
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
@@ -149,52 +127,13 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||
return
|
||||
}
|
||||
|
||||
content, err := FetchWebpageContent(url)
|
||||
if err != nil {
|
||||
logger.Printf("Error fetching content for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", err}
|
||||
return
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(content))
|
||||
if err != nil {
|
||||
logger.Printf("Error parsing HTML for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("error parsing HTML: %v", err)}
|
||||
return
|
||||
}
|
||||
|
||||
if site.LinksContainerSelector != "" {
|
||||
logger.Printf("Processing links container for %s\n", url)
|
||||
linkContainers := doc.Find(site.LinksContainerSelector)
|
||||
linkContainers.Each(func(i int, container *goquery.Selection) {
|
||||
container.Find("a[href]").Each(func(j int, link *goquery.Selection) {
|
||||
href, exists := link.Attr("href")
|
||||
if exists {
|
||||
resolvedURL := resolveURL(href, url)
|
||||
if isAllowedURL(resolvedURL, site) && !visited[resolvedURL] {
|
||||
go scrapeSingleURL(resolvedURL, site, results, limiter, visited, currentDepth+1)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
|
||||
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
|
||||
|
||||
extractedContent, err := ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
logger.Printf("Error extracting content for %s: %v\n", url, err)
|
||||
logger.Printf("Error scraping %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
@@ -203,70 +142,19 @@ func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||
return
|
||||
}
|
||||
|
||||
if extractedContent == "" {
|
||||
if content == "" {
|
||||
logger.Printf("Warning: Empty content scraped from %s\n", url)
|
||||
} else {
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(extractedContent))
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(content))
|
||||
}
|
||||
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, extractedContent, nil}
|
||||
}{url, content, nil}
|
||||
}
|
||||
|
||||
func scrapeSite(site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter,
|
||||
) {
|
||||
visited := make(map[string]bool)
|
||||
queue := []string{site.BaseURL}
|
||||
|
||||
for len(queue) > 0 {
|
||||
url := queue[0]
|
||||
queue = queue[1:]
|
||||
|
||||
if visited[url] {
|
||||
continue
|
||||
}
|
||||
visited[url] = true
|
||||
|
||||
if !isAllowedURL(url, site) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
continue
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, err}
|
||||
|
||||
if len(visited) < site.MaxDepth {
|
||||
links, _ := ExtractLinks(url)
|
||||
for _, link := range links {
|
||||
if !visited[link] && isAllowedURL(link, site) {
|
||||
queue = append(queue, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isAllowedURL(urlStr string, site SiteConfig) bool {
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
@@ -280,29 +168,18 @@ func isAllowedURL(urlStr string, site SiteConfig) bool {
|
||||
}
|
||||
|
||||
path := parsedURL.Path
|
||||
|
||||
// Check if the URL is within allowed paths
|
||||
if len(site.AllowedPaths) > 0 {
|
||||
allowed := false
|
||||
for _, allowedPath := range site.AllowedPaths {
|
||||
if strings.HasPrefix(path, allowedPath) {
|
||||
allowed = true
|
||||
break
|
||||
for _, allowedPath := range site.AllowedPaths {
|
||||
if strings.HasPrefix(path, allowedPath) {
|
||||
for _, excludePath := range site.ExcludePaths {
|
||||
if strings.HasPrefix(path, excludePath) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if !allowed {
|
||||
return false
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the URL is in excluded paths
|
||||
for _, excludePath := range site.ExcludePaths {
|
||||
if strings.HasPrefix(path, excludePath) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
return false
|
||||
}
|
||||
|
||||
func getOverrides(urlStr string, site SiteConfig) (string, []string) {
|
||||
@@ -581,40 +458,6 @@ func scrollPage(page playwright.Page) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ExtractLinks extracts all links from the given URL
|
||||
func ExtractLinks(urlStr string) ([]string, error) {
|
||||
logger.Printf("Extracting links from URL: %s\n", urlStr)
|
||||
|
||||
page, err := browser.NewPage()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not create page: %v", err)
|
||||
}
|
||||
defer page.Close()
|
||||
|
||||
if _, err = page.Goto(urlStr, playwright.PageGotoOptions{
|
||||
WaitUntil: playwright.WaitUntilStateNetworkidle,
|
||||
}); err != nil {
|
||||
return nil, fmt.Errorf("could not go to page: %v", err)
|
||||
}
|
||||
|
||||
links, err := page.Evaluate(`() => {
|
||||
const anchors = document.querySelectorAll('a');
|
||||
return Array.from(anchors).map(a => a.href);
|
||||
}`)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not extract links: %v", err)
|
||||
}
|
||||
|
||||
var result []string
|
||||
for _, link := range links.([]interface{}) {
|
||||
// Normalize URL by removing trailing slash
|
||||
normalizedLink := strings.TrimRight(link.(string), "/")
|
||||
result = append(result, normalizedLink)
|
||||
}
|
||||
|
||||
logger.Printf("Extracted %d links\n", len(result))
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ExtractContentWithCSS extracts content from HTML using a CSS selector
|
||||
func ExtractContentWithCSS(content, includeSelector string, excludeSelectors []string) (string, error) {
|
||||
@@ -663,14 +506,3 @@ func ExtractContentWithCSS(content, includeSelector string, excludeSelectors []s
|
||||
logger.Printf("Extracted content length: %d\n", len(selectedContent))
|
||||
return selectedContent, nil
|
||||
}
|
||||
func resolveURL(href, base string) string {
|
||||
parsedBase, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return href
|
||||
}
|
||||
parsedHref, err := url.Parse(href)
|
||||
if err != nil {
|
||||
return href
|
||||
}
|
||||
return parsedBase.ResolveReference(parsedHref).String()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user