mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 15:03:17 +00:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 333b9a366c | |||
|
|
1869dae89a | ||
|
|
d3ff7cb862 | ||
|
|
ea410e4abb | ||
|
|
7d8e25b1ad | ||
|
|
691832e282 | ||
|
|
31e0fa5ea4 | ||
|
|
71f63ddaa8 | ||
|
|
574800c241 | ||
|
|
d5a94f5468 | ||
|
|
59994c085c | ||
|
|
396f092d50 | ||
|
|
274ef7ea79 | ||
|
|
a55e8df02a | ||
|
|
364b185269 | ||
|
|
952c2dda02 | ||
|
|
de84d68b4c | ||
|
|
e5d4c514a7 | ||
|
|
6ff44f81bb | ||
|
|
2fd411ce65 | ||
|
|
73116e8d82 |
59
README.md
59
README.md
@@ -4,16 +4,18 @@ Rollup aggregates the contents of text-based files and webpages into a markdown
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- File type filtering
|
- File type filtering for targeted content aggregation
|
||||||
- Ignore patterns for excluding files
|
- Ignore patterns for excluding specific files or directories
|
||||||
- Support for code-generated file detection
|
- Support for code-generated file detection and exclusion
|
||||||
- Advanced web scraping functionality
|
- Advanced web scraping functionality with depth control
|
||||||
- Verbose logging option for detailed output
|
- Verbose logging option for detailed operation insights
|
||||||
- Exclusionary CSS selectors for web scraping
|
- Exclusionary CSS selectors for precise web content extraction
|
||||||
- Support for multiple URLs in web scraping
|
- Support for multiple URLs in web scraping operations
|
||||||
- Configurable output format for web scraping (single file or separate files)
|
- Configurable output format for web scraping (single file or separate files)
|
||||||
- Configuration file support (YAML)
|
- Flexible configuration file support (YAML)
|
||||||
- Generation of default configuration file
|
- Automatic generation of default configuration file
|
||||||
|
- Custom output file naming
|
||||||
|
- Concurrent processing for improved performance
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -74,14 +76,27 @@ ignore:
|
|||||||
code_generated:
|
code_generated:
|
||||||
- **/generated/**
|
- **/generated/**
|
||||||
scrape:
|
scrape:
|
||||||
urls:
|
sites:
|
||||||
- url: https://example.com
|
- base_url: https://example.com
|
||||||
css_locator: .content
|
css_locator: .content
|
||||||
exclude_selectors:
|
exclude_selectors:
|
||||||
- .ads
|
- .ads
|
||||||
- .navigation
|
- .navigation
|
||||||
|
max_depth: 2
|
||||||
|
allowed_paths:
|
||||||
|
- /blog
|
||||||
|
- /docs
|
||||||
|
exclude_paths:
|
||||||
|
- /admin
|
||||||
output_alias: example
|
output_alias: example
|
||||||
|
path_overrides:
|
||||||
|
- path: /special-page
|
||||||
|
css_locator: .special-content
|
||||||
|
exclude_selectors:
|
||||||
|
- .special-ads
|
||||||
output_type: single
|
output_type: single
|
||||||
|
requests_per_second: 1.0
|
||||||
|
burst_limit: 3
|
||||||
```
|
```
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
@@ -92,10 +107,10 @@ scrape:
|
|||||||
rollup files
|
rollup files
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Web scraping with multiple URLs:
|
2. Web scraping with multiple URLs and increased concurrency:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
rollup web --urls=https://example.com,https://another-example.com
|
rollup web --urls=https://example.com,https://another-example.com --concurrent=8
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Generate a default configuration file:
|
3. Generate a default configuration file:
|
||||||
@@ -104,15 +119,25 @@ scrape:
|
|||||||
rollup generate
|
rollup generate
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Use a custom configuration file:
|
4. Use a custom configuration file and specify output:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
rollup files --config=my-config.yml
|
rollup files --config=my-config.yml --output=project_summary.md
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Web scraping with separate output files:
|
5. Web scraping with separate output files and custom timeout:
|
||||||
```bash
|
```bash
|
||||||
rollup web --urls=https://example.com,https://another-example.com --output=separate
|
rollup web --urls=https://example.com,https://another-example.com --output=separate --timeout=60
|
||||||
|
```
|
||||||
|
|
||||||
|
6. Rollup files with specific types and ignore patterns:
|
||||||
|
```bash
|
||||||
|
rollup files --types=.go,.md --ignore=vendor/**,*_test.go
|
||||||
|
```
|
||||||
|
|
||||||
|
7. Web scraping with depth and CSS selector:
|
||||||
|
```bash
|
||||||
|
rollup web --urls=https://example.com --depth=2 --css=.main-content
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|||||||
18
cmd/files.go
18
cmd/files.go
@@ -8,8 +8,11 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/tnypxl/rollup/internal/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var cfg *config.Config
|
||||||
|
|
||||||
var (
|
var (
|
||||||
path string
|
path string
|
||||||
fileTypes string
|
fileTypes string
|
||||||
@@ -24,7 +27,7 @@ var filesCmd = &cobra.Command{
|
|||||||
in a given project, current path or a custom path, to a single timestamped markdown file
|
in a given project, current path or a custom path, to a single timestamped markdown file
|
||||||
whose name is <project-directory-name>-rollup-<timestamp>.md.`,
|
whose name is <project-directory-name>-rollup-<timestamp>.md.`,
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return runRollup()
|
return runRollup(cfg)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,16 +90,23 @@ func isIgnored(filePath string, patterns []string) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
matched, err := filepath.Match(pattern, filepath.Base(filePath))
|
// Check if the pattern matches the full path or any part of it
|
||||||
if err == nil && matched {
|
if matched, _ := filepath.Match(pattern, filePath); matched {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
pathParts := strings.Split(filePath, string(os.PathSeparator))
|
||||||
|
for i := range pathParts {
|
||||||
|
partialPath := filepath.Join(pathParts[:i+1]...)
|
||||||
|
if matched, _ := filepath.Match(pattern, partialPath); matched {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func runRollup() error {
|
func runRollup(cfg *config.Config) error {
|
||||||
// Use config if available, otherwise use command-line flags
|
// Use config if available, otherwise use command-line flags
|
||||||
var types, codeGenList, ignoreList []string
|
var types, codeGenList, ignoreList []string
|
||||||
if cfg != nil && len(cfg.FileTypes) > 0 {
|
if cfg != nil && len(cfg.FileTypes) > 0 {
|
||||||
|
|||||||
172
cmd/files_test.go
Normal file
172
cmd/files_test.go
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/tnypxl/rollup/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMatchGlob(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
pattern string
|
||||||
|
path string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"*.go", "file.go", true},
|
||||||
|
{"*.go", "file.txt", false},
|
||||||
|
{"**/*.go", "dir/file.go", true},
|
||||||
|
{"**/*.go", "dir/subdir/file.go", true},
|
||||||
|
{"dir/*.go", "dir/file.go", true},
|
||||||
|
{"dir/*.go", "otherdir/file.go", false},
|
||||||
|
{"**/test_*.go", "internal/test_helper.go", true},
|
||||||
|
{"docs/**/*.md", "docs/api/endpoints.md", true},
|
||||||
|
{"docs/**/*.md", "src/docs/readme.md", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result := matchGlob(test.pattern, test.path)
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("matchGlob(%q, %q) = %v; want %v", test.pattern, test.path, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsCodeGenerated(t *testing.T) {
|
||||||
|
patterns := []string{"generated_*.go", "**/auto_*.go", "**/*_gen.go"}
|
||||||
|
tests := []struct {
|
||||||
|
path string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"generated_file.go", true},
|
||||||
|
{"normal_file.go", false},
|
||||||
|
{"subdir/auto_file.go", true},
|
||||||
|
{"subdir/normal_file.go", false},
|
||||||
|
{"pkg/models_gen.go", true},
|
||||||
|
{"pkg/handler.go", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result := isCodeGenerated(test.path, patterns)
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("isCodeGenerated(%q, %v) = %v; want %v", test.path, patterns, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsIgnored(t *testing.T) {
|
||||||
|
patterns := []string{"*.tmp", "**/*.log", ".git/**", "vendor/**"}
|
||||||
|
tests := []struct {
|
||||||
|
path string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"file.tmp", true},
|
||||||
|
{"file.go", false},
|
||||||
|
{"subdir/file.log", true},
|
||||||
|
{"subdir/file.txt", false},
|
||||||
|
{".git/config", true},
|
||||||
|
{"src/.git/config", false},
|
||||||
|
{"vendor/package/file.go", true},
|
||||||
|
{"internal/vendor/file.go", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result := isIgnored(test.path, patterns)
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("isIgnored(%q, %v) = %v; want %v", test.path, patterns, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRunRollup(t *testing.T) {
|
||||||
|
// Create a temporary directory for testing
|
||||||
|
tempDir, err := os.MkdirTemp("", "rollup_test")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp dir: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
// Create some test files
|
||||||
|
files := map[string]string{
|
||||||
|
"file1.go": "package main\n\nfunc main() {}\n",
|
||||||
|
"file2.txt": "This is a text file.\n",
|
||||||
|
"subdir/file3.go": "package subdir\n\nfunc Func() {}\n",
|
||||||
|
"subdir/file4.json": "{\"key\": \"value\"}\n",
|
||||||
|
"generated_model.go": "// Code generated DO NOT EDIT.\n\npackage model\n",
|
||||||
|
"docs/api/readme.md": "# API Documentation\n",
|
||||||
|
".git/config": "[core]\n\trepositoryformatversion = 0\n",
|
||||||
|
"vendor/lib/helper.go": "package lib\n\nfunc Helper() {}\n",
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, content := range files {
|
||||||
|
path := filepath.Join(tempDir, name)
|
||||||
|
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||||
|
t.Fatalf("Failed to create directory: %v", err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||||
|
t.Fatalf("Failed to write file: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up test configuration
|
||||||
|
cfg = &config.Config{
|
||||||
|
FileTypes: []string{"go", "txt", "md"},
|
||||||
|
Ignore: []string{"*.json", ".git/**", "vendor/**"},
|
||||||
|
CodeGenerated: []string{"generated_*.go"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change working directory to the temp directory
|
||||||
|
originalWd, _ := os.Getwd()
|
||||||
|
os.Chdir(tempDir)
|
||||||
|
defer os.Chdir(originalWd)
|
||||||
|
|
||||||
|
// Run the rollup
|
||||||
|
if err := runRollup(cfg); err != nil {
|
||||||
|
t.Fatalf("runRollup() failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the output file was created
|
||||||
|
outputFiles, err := filepath.Glob("*.rollup.md")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Error globbing for output file: %v", err)
|
||||||
|
}
|
||||||
|
if len(outputFiles) == 0 {
|
||||||
|
allFiles, _ := filepath.Glob("*")
|
||||||
|
t.Fatalf("No rollup.md file found. Files in directory: %v", allFiles)
|
||||||
|
}
|
||||||
|
outputFile := outputFiles[0]
|
||||||
|
|
||||||
|
// Read the content of the output file
|
||||||
|
content, err := os.ReadFile(outputFile)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to read output file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the content includes the expected files
|
||||||
|
expectedContent := []string{
|
||||||
|
"# File: file1.go",
|
||||||
|
"# File: file2.txt",
|
||||||
|
"# File: subdir/file3.go",
|
||||||
|
"# File: docs/api/readme.md",
|
||||||
|
"# File: generated_model.go (Code-generated, Read-only)",
|
||||||
|
}
|
||||||
|
for _, expected := range expectedContent {
|
||||||
|
if !strings.Contains(string(content), expected) {
|
||||||
|
t.Errorf("Output file does not contain expected content: %s", expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the ignored files are not included
|
||||||
|
ignoredContent := []string{
|
||||||
|
"file4.json",
|
||||||
|
".git/config",
|
||||||
|
"vendor/lib/helper.go",
|
||||||
|
}
|
||||||
|
for _, ignored := range ignoredContent {
|
||||||
|
if strings.Contains(string(content), ignored) {
|
||||||
|
t.Errorf("Output file contains ignored file: %s", ignored)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,7 +7,6 @@ import (
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
configFile string
|
configFile string
|
||||||
cfg *config.Config
|
|
||||||
verbose bool
|
verbose bool
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -19,10 +18,10 @@ Use subcommands to perform specific operations.`,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Execute(conf *config.Config) error {
|
func Execute(conf *config.Config) error {
|
||||||
cfg = conf
|
if conf == nil {
|
||||||
if cfg == nil {
|
conf = &config.Config{} // Use an empty config if none is provided
|
||||||
cfg = &config.Config{} // Use an empty config if none is provided
|
|
||||||
}
|
}
|
||||||
|
cfg = conf // Set the cfg variable in cmd/files.go
|
||||||
return rootCmd.Execute()
|
return rootCmd.Execute()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
143
cmd/web.go
143
cmd/web.go
@@ -2,6 +2,8 @@ package cmd
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
@@ -9,6 +11,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/tnypxl/rollup/internal/config"
|
||||||
"github.com/tnypxl/rollup/internal/scraper"
|
"github.com/tnypxl/rollup/internal/scraper"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -38,45 +41,91 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func runWeb(cmd *cobra.Command, args []string) error {
|
func runWeb(cmd *cobra.Command, args []string) error {
|
||||||
|
scraper.SetupLogger(verbose)
|
||||||
|
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||||
|
if !verbose {
|
||||||
|
logger.SetOutput(io.Discard)
|
||||||
|
}
|
||||||
|
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||||
scraperConfig.Verbose = verbose
|
scraperConfig.Verbose = verbose
|
||||||
|
|
||||||
// Use config if available, otherwise use command-line flags
|
var siteConfigs []scraper.SiteConfig
|
||||||
var urlConfigs []scraper.URLConfig
|
if len(cfg.Scrape.Sites) > 0 {
|
||||||
if len(urls) == 0 && len(cfg.Scrape.URLs) > 0 {
|
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
||||||
urlConfigs = make([]scraper.URLConfig, len(cfg.Scrape.URLs))
|
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
||||||
for i, u := range cfg.Scrape.URLs {
|
for i, site := range cfg.Scrape.Sites {
|
||||||
urlConfigs[i] = scraper.URLConfig{
|
siteConfigs[i] = scraper.SiteConfig{
|
||||||
URL: u.URL,
|
BaseURL: site.BaseURL,
|
||||||
CSSLocator: u.CSSLocator,
|
CSSLocator: site.CSSLocator,
|
||||||
ExcludeSelectors: u.ExcludeSelectors,
|
ExcludeSelectors: site.ExcludeSelectors,
|
||||||
OutputAlias: u.OutputAlias,
|
MaxDepth: site.MaxDepth,
|
||||||
|
AllowedPaths: site.AllowedPaths,
|
||||||
|
ExcludePaths: site.ExcludePaths,
|
||||||
|
OutputAlias: site.OutputAlias,
|
||||||
|
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||||
}
|
}
|
||||||
|
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||||
|
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
urlConfigs = make([]scraper.URLConfig, len(urls))
|
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration")
|
||||||
|
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
||||||
for i, u := range urls {
|
for i, u := range urls {
|
||||||
urlConfigs[i] = scraper.URLConfig{URL: u, CSSLocator: includeSelector}
|
siteConfigs[i] = scraper.SiteConfig{
|
||||||
|
BaseURL: u,
|
||||||
|
CSSLocator: includeSelector,
|
||||||
|
ExcludeSelectors: excludeSelectors,
|
||||||
|
MaxDepth: depth,
|
||||||
|
}
|
||||||
|
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||||
|
i+1, u, includeSelector, depth)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(urlConfigs) == 0 {
|
if len(siteConfigs) == 0 {
|
||||||
return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.urls' in the rollup.yml file")
|
logger.Println("Error: No sites or URLs provided")
|
||||||
|
return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set default values for rate limiting
|
||||||
|
defaultRequestsPerSecond := 1.0
|
||||||
|
defaultBurstLimit := 3
|
||||||
|
|
||||||
|
// Use default values if not set in the configuration
|
||||||
|
requestsPerSecond := cfg.Scrape.RequestsPerSecond
|
||||||
|
if requestsPerSecond == 0 {
|
||||||
|
requestsPerSecond = defaultRequestsPerSecond
|
||||||
|
}
|
||||||
|
burstLimit := cfg.Scrape.BurstLimit
|
||||||
|
if burstLimit == 0 {
|
||||||
|
burstLimit = defaultBurstLimit
|
||||||
}
|
}
|
||||||
|
|
||||||
scraperConfig := scraper.Config{
|
scraperConfig := scraper.Config{
|
||||||
URLs: urlConfigs,
|
Sites: siteConfigs,
|
||||||
OutputType: outputType,
|
OutputType: outputType,
|
||||||
Verbose: verbose,
|
Verbose: verbose,
|
||||||
|
Scrape: scraper.ScrapeConfig{
|
||||||
|
RequestsPerSecond: requestsPerSecond,
|
||||||
|
BurstLimit: burstLimit,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||||
|
outputType, requestsPerSecond, burstLimit)
|
||||||
|
|
||||||
scrapedContent, err := scraper.ScrapeMultipleURLs(scraperConfig)
|
logger.Println("Starting scraping process")
|
||||||
|
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
logger.Printf("Error occurred during scraping: %v", err)
|
||||||
return fmt.Errorf("error scraping content: %v", err)
|
return fmt.Errorf("error scraping content: %v", err)
|
||||||
}
|
}
|
||||||
|
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
||||||
|
|
||||||
if outputType == "single" {
|
if outputType == "single" {
|
||||||
|
logger.Println("Writing content to a single file")
|
||||||
return writeSingleFile(scrapedContent)
|
return writeSingleFile(scrapedContent)
|
||||||
} else {
|
} else {
|
||||||
|
logger.Println("Writing content to multiple files")
|
||||||
return writeMultipleFiles(scrapedContent)
|
return writeMultipleFiles(scrapedContent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -90,7 +139,7 @@ func writeSingleFile(content map[string]string) error {
|
|||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
for url, c := range content {
|
for url, c := range content {
|
||||||
_, err = fmt.Fprintf(file, "# Content from %s\n\n%s\n\n---\n\n", url, c)
|
_, err = fmt.Fprintf(file, "# ::: Content from %s\n\n%s\n\n---\n\n", url, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error writing content to file: %v", err)
|
return fmt.Errorf("error writing content to file: %v", err)
|
||||||
}
|
}
|
||||||
@@ -102,20 +151,26 @@ func writeSingleFile(content map[string]string) error {
|
|||||||
|
|
||||||
func writeMultipleFiles(content map[string]string) error {
|
func writeMultipleFiles(content map[string]string) error {
|
||||||
for url, c := range content {
|
for url, c := range content {
|
||||||
filename := getFilenameFromContent(c, url)
|
filename, err := getFilenameFromContent(c, url)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error generating filename for %s: %v", url, err)
|
||||||
|
}
|
||||||
|
|
||||||
file, err := os.Create(filename)
|
file, err := os.Create(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = fmt.Fprintf(file, "# Content from %s\n\n%s", url, c)
|
_, err = file.WriteString(fmt.Sprintf("# ::: Content from %s\n\n%s\n", url, c))
|
||||||
file.Close()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
file.Close()
|
||||||
return fmt.Errorf("error writing content to file %s: %v", filename, err)
|
return fmt.Errorf("error writing content to file %s: %v", filename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
file.Close()
|
||||||
fmt.Printf("Content from %s has been saved to %s\n", url, filename)
|
fmt.Printf("Content from %s has been saved to %s\n", url, filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,13 +191,13 @@ func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error
|
|||||||
|
|
||||||
visited[urlStr] = true
|
visited[urlStr] = true
|
||||||
|
|
||||||
content, err := extractAndConvertContent(urlStr)
|
content, err := testExtractAndConvertContent(urlStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if depth > 0 {
|
if depth > 0 {
|
||||||
links, err := scraper.ExtractLinks(urlStr)
|
links, err := testExtractLinks(urlStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return content, fmt.Errorf("error extracting links: %v", err)
|
return content, fmt.Errorf("error extracting links: %v", err)
|
||||||
}
|
}
|
||||||
@@ -160,6 +215,11 @@ func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error
|
|||||||
return content, nil
|
return content, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
testExtractAndConvertContent = extractAndConvertContent
|
||||||
|
testExtractLinks = scraper.ExtractLinks
|
||||||
|
)
|
||||||
|
|
||||||
func extractAndConvertContent(urlStr string) (string, error) {
|
func extractAndConvertContent(urlStr string) (string, error) {
|
||||||
content, err := scraper.FetchWebpageContent(urlStr)
|
content, err := scraper.FetchWebpageContent(urlStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -182,22 +242,37 @@ func extractAndConvertContent(urlStr string) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("error parsing URL: %v", err)
|
return "", fmt.Errorf("error parsing URL: %v", err)
|
||||||
}
|
}
|
||||||
header := fmt.Sprintf("# Content from %s\n\n", parsedURL.String())
|
header := fmt.Sprintf("# ::: Content from %s\n\n", parsedURL.String())
|
||||||
|
|
||||||
return header + markdown + "\n\n", nil
|
return header + markdown + "\n\n", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFilenameFromContent(content, url string) string {
|
func getFilenameFromContent(content, urlStr string) (string, error) {
|
||||||
// Try to extract title from content
|
// Try to extract title from content
|
||||||
titleStart := strings.Index(content, "<title>")
|
titleStart := strings.Index(content, "<title>")
|
||||||
titleEnd := strings.Index(content, "</title>")
|
titleEnd := strings.Index(content, "</title>")
|
||||||
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
|
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
|
||||||
title := content[titleStart+7 : titleEnd]
|
title := strings.TrimSpace(content[titleStart+7 : titleEnd])
|
||||||
return sanitizeFilename(title) + ".md"
|
if title != "" {
|
||||||
|
return sanitizeFilename(title) + ".rollup.md", nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no title found, use the URL
|
// If no title found or title is empty, use the URL
|
||||||
return sanitizeFilename(url) + ".md"
|
parsedURL, err := url.Parse(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("invalid URL: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsedURL.Host == "" {
|
||||||
|
return "", fmt.Errorf("invalid URL: missing host")
|
||||||
|
}
|
||||||
|
|
||||||
|
filename := parsedURL.Host
|
||||||
|
if parsedURL.Path != "" && parsedURL.Path != "/" {
|
||||||
|
filename += strings.TrimSuffix(parsedURL.Path, "/")
|
||||||
|
}
|
||||||
|
return sanitizeFilename(filename) + ".rollup.md", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func sanitizeFilename(name string) string {
|
func sanitizeFilename(name string) string {
|
||||||
@@ -215,3 +290,15 @@ func sanitizeFilename(name string) string {
|
|||||||
|
|
||||||
return name
|
return name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func convertPathOverrides(configOverrides []config.PathOverride) []scraper.PathOverride {
|
||||||
|
scraperOverrides := make([]scraper.PathOverride, len(configOverrides))
|
||||||
|
for i, override := range configOverrides {
|
||||||
|
scraperOverrides[i] = scraper.PathOverride{
|
||||||
|
Path: override.Path,
|
||||||
|
CSSLocator: override.CSSLocator,
|
||||||
|
ExcludeSelectors: override.ExcludeSelectors,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return scraperOverrides
|
||||||
|
}
|
||||||
|
|||||||
154
cmd/web_test.go
Normal file
154
cmd/web_test.go
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"strings"
|
||||||
|
"github.com/tnypxl/rollup/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConvertPathOverrides(t *testing.T) {
|
||||||
|
configOverrides := []config.PathOverride{
|
||||||
|
{
|
||||||
|
Path: "/blog",
|
||||||
|
CSSLocator: "article",
|
||||||
|
ExcludeSelectors: []string{".ads", ".comments"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Path: "/products",
|
||||||
|
CSSLocator: ".product-description",
|
||||||
|
ExcludeSelectors: []string{".related-items"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
scraperOverrides := convertPathOverrides(configOverrides)
|
||||||
|
|
||||||
|
if len(scraperOverrides) != len(configOverrides) {
|
||||||
|
t.Errorf("Expected %d overrides, got %d", len(configOverrides), len(scraperOverrides))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, override := range scraperOverrides {
|
||||||
|
if override.Path != configOverrides[i].Path {
|
||||||
|
t.Errorf("Expected Path %s, got %s", configOverrides[i].Path, override.Path)
|
||||||
|
}
|
||||||
|
if override.CSSLocator != configOverrides[i].CSSLocator {
|
||||||
|
t.Errorf("Expected CSSLocator %s, got %s", configOverrides[i].CSSLocator, override.CSSLocator)
|
||||||
|
}
|
||||||
|
if len(override.ExcludeSelectors) != len(configOverrides[i].ExcludeSelectors) {
|
||||||
|
t.Errorf("Expected %d ExcludeSelectors, got %d", len(configOverrides[i].ExcludeSelectors), len(override.ExcludeSelectors))
|
||||||
|
}
|
||||||
|
for j, selector := range override.ExcludeSelectors {
|
||||||
|
if selector != configOverrides[i].ExcludeSelectors[j] {
|
||||||
|
t.Errorf("Expected ExcludeSelector %s, got %s", configOverrides[i].ExcludeSelectors[j], selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizeFilename(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"Hello, World!", "Hello_World"},
|
||||||
|
{"file/with/path", "file_with_path"},
|
||||||
|
{"file.with.dots", "file_with_dots"},
|
||||||
|
{"___leading_underscores___", "leading_underscores"},
|
||||||
|
{"", "untitled"},
|
||||||
|
{"!@#$%^&*()", "untitled"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result := sanitizeFilename(test.input)
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("sanitizeFilename(%q) = %q; want %q", test.input, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetFilenameFromContent(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
content string
|
||||||
|
url string
|
||||||
|
expected string
|
||||||
|
expectErr bool
|
||||||
|
}{
|
||||||
|
{"<title>Test Page</title>", "http://example.com", "Test_Page.rollup.md", false},
|
||||||
|
{"No title here", "http://example.com/page", "example_com_page.rollup.md", false},
|
||||||
|
{"<title> Trim Me </title>", "http://example.com", "Trim_Me.rollup.md", false},
|
||||||
|
{"<title></title>", "http://example.com", "example_com.rollup.md", false},
|
||||||
|
{"<title> </title>", "http://example.com", "example_com.rollup.md", false},
|
||||||
|
{"Invalid URL", "not a valid url", "", true},
|
||||||
|
{"No host", "http://", "", true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result, err := getFilenameFromContent(test.content, test.url)
|
||||||
|
if test.expectErr {
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("getFilenameFromContent(%q, %q) expected an error, but got none", test.content, test.url)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("getFilenameFromContent(%q, %q) unexpected error: %v", test.content, test.url, err)
|
||||||
|
}
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("getFilenameFromContent(%q, %q) = %q; want %q", test.content, test.url, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock functions for testing
|
||||||
|
func mockExtractAndConvertContent(urlStr string) (string, error) {
|
||||||
|
return "Mocked content for " + urlStr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mockExtractLinks() ([]string, error) {
|
||||||
|
return []string{"http://example.com/link1", "http://example.com/link2"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestScrapeURL(t *testing.T) {
|
||||||
|
// Store the original functions
|
||||||
|
originalExtractAndConvertContent := testExtractAndConvertContent
|
||||||
|
originalExtractLinks := testExtractLinks
|
||||||
|
|
||||||
|
// Define mock functions
|
||||||
|
testExtractAndConvertContent = func(urlStr string) (string, error) {
|
||||||
|
return "Mocked content for " + urlStr, nil
|
||||||
|
}
|
||||||
|
testExtractLinks = func(urlStr string) ([]string, error) {
|
||||||
|
return []string{"http://example.com/link1", "http://example.com/link2"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer the restoration of original functions
|
||||||
|
defer func() {
|
||||||
|
testExtractAndConvertContent = originalExtractAndConvertContent
|
||||||
|
testExtractLinks = originalExtractLinks
|
||||||
|
}()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
url string
|
||||||
|
depth int
|
||||||
|
expectedCalls int
|
||||||
|
}{
|
||||||
|
{"http://example.com", 0, 1},
|
||||||
|
{"http://example.com", 1, 3},
|
||||||
|
{"http://example.com", 2, 3}, // Same as depth 1 because our mock only returns 2 links
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
content, err := scrapeURL(test.url, test.depth, visited)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("scrapeURL(%q, %d) returned error: %v", test.url, test.depth, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(visited) != test.expectedCalls {
|
||||||
|
t.Errorf("scrapeURL(%q, %d) made %d calls, expected %d", test.url, test.depth, len(visited), test.expectedCalls)
|
||||||
|
}
|
||||||
|
expectedContent := "Mocked content for " + test.url
|
||||||
|
if !strings.Contains(content, expectedContent) {
|
||||||
|
t.Errorf("scrapeURL(%q, %d) content doesn't contain %q", test.url, test.depth, expectedContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
21
docs/CHANGELOG.md
Normal file
21
docs/CHANGELOG.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.0.3] - 2024-09-22
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Implemented web scraping functionality using Playwright
|
||||||
|
- Added support for CSS selectors to extract specific content
|
||||||
|
- Introduced rate limiting for web requests
|
||||||
|
- Created configuration options for scraping settings
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Improved error handling and logging throughout the application
|
||||||
|
- Enhanced URL parsing and validation
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Resolved issues with concurrent scraping operations
|
||||||
1
go.mod
1
go.mod
@@ -5,6 +5,7 @@ go 1.23
|
|||||||
require (
|
require (
|
||||||
github.com/JohannesKaufmann/html-to-markdown v1.6.0
|
github.com/JohannesKaufmann/html-to-markdown v1.6.0
|
||||||
github.com/spf13/cobra v1.8.1
|
github.com/spf13/cobra v1.8.1
|
||||||
|
golang.org/x/time v0.6.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
|||||||
2
go.sum
2
go.sum
@@ -102,6 +102,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
|||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
|
||||||
|
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
|||||||
@@ -15,15 +15,27 @@ type Config struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ScrapeConfig struct {
|
type ScrapeConfig struct {
|
||||||
URLs []URLConfig `yaml:"urls"`
|
Sites []SiteConfig `yaml:"sites"`
|
||||||
OutputType string `yaml:"output_type"`
|
OutputType string `yaml:"output_type"`
|
||||||
|
RequestsPerSecond float64 `yaml:"requests_per_second"`
|
||||||
|
BurstLimit int `yaml:"burst_limit"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type URLConfig struct {
|
type SiteConfig struct {
|
||||||
URL string `yaml:"url"`
|
BaseURL string `yaml:"base_url"`
|
||||||
CSSLocator string `yaml:"css_locator"`
|
CSSLocator string `yaml:"css_locator"`
|
||||||
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||||
OutputAlias string `yaml:"output_alias"`
|
MaxDepth int `yaml:"max_depth"`
|
||||||
|
AllowedPaths []string `yaml:"allowed_paths"`
|
||||||
|
ExcludePaths []string `yaml:"exclude_paths"`
|
||||||
|
OutputAlias string `yaml:"output_alias"`
|
||||||
|
PathOverrides []PathOverride `yaml:"path_overrides"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PathOverride struct {
|
||||||
|
Path string `yaml:"path"`
|
||||||
|
CSSLocator string `yaml:"css_locator"`
|
||||||
|
ExcludeSelectors []string `yaml:"exclude_selectors"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Load(configPath string) (*Config, error) {
|
func Load(configPath string) (*Config, error) {
|
||||||
|
|||||||
120
internal/config/config_test.go
Normal file
120
internal/config/config_test.go
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLoad(t *testing.T) {
|
||||||
|
// Create a temporary config file
|
||||||
|
content := []byte(`
|
||||||
|
file_types:
|
||||||
|
- go
|
||||||
|
- md
|
||||||
|
ignore:
|
||||||
|
- "*.tmp"
|
||||||
|
- "**/*.log"
|
||||||
|
code_generated:
|
||||||
|
- "generated_*.go"
|
||||||
|
scrape:
|
||||||
|
sites:
|
||||||
|
- base_url: "https://example.com"
|
||||||
|
css_locator: "main"
|
||||||
|
exclude_selectors:
|
||||||
|
- ".ads"
|
||||||
|
max_depth: 2
|
||||||
|
allowed_paths:
|
||||||
|
- "/blog"
|
||||||
|
exclude_paths:
|
||||||
|
- "/admin"
|
||||||
|
output_alias: "example"
|
||||||
|
path_overrides:
|
||||||
|
- path: "/special"
|
||||||
|
css_locator: ".special-content"
|
||||||
|
exclude_selectors:
|
||||||
|
- ".sidebar"
|
||||||
|
output_type: "single"
|
||||||
|
requests_per_second: 1.0
|
||||||
|
burst_limit: 5
|
||||||
|
`)
|
||||||
|
|
||||||
|
tmpfile, err := os.CreateTemp("", "config*.yml")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp file: %v", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
|
||||||
|
if _, err = tmpfile.Write(content); err != nil {
|
||||||
|
t.Fatalf("Failed to write to temp file: %v", err)
|
||||||
|
}
|
||||||
|
if err = tmpfile.Close(); err != nil {
|
||||||
|
t.Fatalf("Failed to close temp file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test loading the config
|
||||||
|
config, err := Load(tmpfile.Name())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Load() failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the loaded config matches the expected values
|
||||||
|
expectedConfig := &Config{
|
||||||
|
FileTypes: []string{"go", "md"},
|
||||||
|
Ignore: []string{"*.tmp", "**/*.log"},
|
||||||
|
CodeGenerated: []string{"generated_*.go"},
|
||||||
|
Scrape: ScrapeConfig{
|
||||||
|
Sites: []SiteConfig{
|
||||||
|
{
|
||||||
|
BaseURL: "https://example.com",
|
||||||
|
CSSLocator: "main",
|
||||||
|
ExcludeSelectors: []string{".ads"},
|
||||||
|
MaxDepth: 2,
|
||||||
|
AllowedPaths: []string{"/blog"},
|
||||||
|
ExcludePaths: []string{"/admin"},
|
||||||
|
OutputAlias: "example",
|
||||||
|
PathOverrides: []PathOverride{
|
||||||
|
{
|
||||||
|
Path: "/special",
|
||||||
|
CSSLocator: ".special-content",
|
||||||
|
ExcludeSelectors: []string{".sidebar"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
OutputType: "single",
|
||||||
|
RequestsPerSecond: 1.0,
|
||||||
|
BurstLimit: 5,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(config, expectedConfig) {
|
||||||
|
t.Errorf("Loaded config does not match expected config.\nGot: %+v\nWant: %+v", config, expectedConfig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDefaultConfigPath(t *testing.T) {
|
||||||
|
expected := "rollup.yml"
|
||||||
|
result := DefaultConfigPath()
|
||||||
|
if result != expected {
|
||||||
|
t.Errorf("DefaultConfigPath() = %q, want %q", result, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileExists(t *testing.T) {
|
||||||
|
// Test with an existing file
|
||||||
|
tmpfile, err := os.CreateTemp("", "testfile")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp file: %v", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
|
||||||
|
if !FileExists(tmpfile.Name()) {
|
||||||
|
t.Errorf("FileExists(%q) = false, want true", tmpfile.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with a non-existing file
|
||||||
|
if FileExists("non_existing_file.txt") {
|
||||||
|
t.Errorf("FileExists(\"non_existing_file.txt\") = true, want false")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,17 +1,22 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
"github.com/playwright-community/playwright-go"
|
"github.com/playwright-community/playwright-go"
|
||||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
"golang.org/x/time/rate"
|
||||||
)
|
)
|
||||||
|
|
||||||
var logger *log.Logger
|
var logger *log.Logger
|
||||||
@@ -23,51 +28,236 @@ var (
|
|||||||
|
|
||||||
// Config holds the scraper configuration
|
// Config holds the scraper configuration
|
||||||
type Config struct {
|
type Config struct {
|
||||||
URLs []URLConfig
|
Sites []SiteConfig
|
||||||
OutputType string
|
OutputType string
|
||||||
Verbose bool
|
Verbose bool
|
||||||
|
Scrape ScrapeConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
// ScrapeMultipleURLs scrapes multiple URLs concurrently
|
// ScrapeConfig holds the scraping-specific configuration
|
||||||
func ScrapeMultipleURLs(config Config) (map[string]string, error) {
|
type ScrapeConfig struct {
|
||||||
|
RequestsPerSecond float64
|
||||||
|
BurstLimit int
|
||||||
|
}
|
||||||
|
|
||||||
|
// SiteConfig holds configuration for a single site
|
||||||
|
type SiteConfig struct {
|
||||||
|
BaseURL string
|
||||||
|
CSSLocator string
|
||||||
|
ExcludeSelectors []string
|
||||||
|
MaxDepth int
|
||||||
|
AllowedPaths []string
|
||||||
|
ExcludePaths []string
|
||||||
|
OutputAlias string
|
||||||
|
PathOverrides []PathOverride
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathOverride holds path-specific overrides
|
||||||
|
type PathOverride struct {
|
||||||
|
Path string
|
||||||
|
CSSLocator string
|
||||||
|
ExcludeSelectors []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func ScrapeSites(config Config) (map[string]string, error) {
|
||||||
|
logger.Println("Starting ScrapeSites function - Verbose mode is active")
|
||||||
results := make(chan struct {
|
results := make(chan struct {
|
||||||
url string
|
url string
|
||||||
content string
|
content string
|
||||||
err error
|
err error
|
||||||
}, len(config.URLs))
|
})
|
||||||
|
|
||||||
for _, urlConfig := range config.URLs {
|
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
|
||||||
go func(cfg URLConfig) {
|
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
|
||||||
content, err := scrapeURL(cfg)
|
|
||||||
results <- struct {
|
var wg sync.WaitGroup
|
||||||
url string
|
totalURLs := 0
|
||||||
content string
|
for _, site := range config.Sites {
|
||||||
err error
|
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||||
}{cfg.URL, content, err}
|
wg.Add(1)
|
||||||
}(urlConfig)
|
go func(site SiteConfig) {
|
||||||
|
defer wg.Done()
|
||||||
|
for _, path := range site.AllowedPaths {
|
||||||
|
fullURL := site.BaseURL + path
|
||||||
|
totalURLs++
|
||||||
|
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||||
|
scrapeSingleURL(fullURL, site, results, limiter)
|
||||||
|
}
|
||||||
|
}(site)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
wg.Wait()
|
||||||
|
close(results)
|
||||||
|
logger.Println("All goroutines completed, results channel closed")
|
||||||
|
}()
|
||||||
|
|
||||||
scrapedContent := make(map[string]string)
|
scrapedContent := make(map[string]string)
|
||||||
for i := 0; i < len(config.URLs); i++ {
|
for result := range results {
|
||||||
result := <-results
|
|
||||||
if result.err != nil {
|
if result.err != nil {
|
||||||
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
logger.Printf("Successfully scraped content from %s (length: %d)\n", result.url, len(result.content))
|
||||||
scrapedContent[result.url] = result.content
|
scrapedContent[result.url] = result.content
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.Printf("Total URLs processed: %d\n", totalURLs)
|
||||||
|
logger.Printf("Successfully scraped content from %d URLs\n", len(scrapedContent))
|
||||||
|
|
||||||
return scrapedContent, nil
|
return scrapedContent, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeURL(config URLConfig) (string, error) {
|
func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||||
content, err := FetchWebpageContent(config.URL)
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}, limiter *rate.Limiter,
|
||||||
|
) {
|
||||||
|
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||||
|
|
||||||
|
// Wait for rate limiter before making the request
|
||||||
|
err := limiter.Wait(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
logger.Printf("Rate limiter error for %s: %v\n", url, err)
|
||||||
|
results <- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||||
|
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
|
||||||
|
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
|
||||||
|
|
||||||
|
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||||
|
if err != nil {
|
||||||
|
logger.Printf("Error scraping %s: %v\n", url, err)
|
||||||
|
results <- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}{url, "", err}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if content == "" {
|
||||||
|
logger.Printf("Warning: Empty content scraped from %s\n", url)
|
||||||
|
} else {
|
||||||
|
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
results <- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}{url, content, nil}
|
||||||
|
}
|
||||||
|
|
||||||
|
func scrapeSite(site SiteConfig, results chan<- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}, limiter *rate.Limiter,
|
||||||
|
) {
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
queue := []string{site.BaseURL}
|
||||||
|
|
||||||
|
for len(queue) > 0 {
|
||||||
|
url := queue[0]
|
||||||
|
queue = queue[1:]
|
||||||
|
|
||||||
|
if visited[url] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
visited[url] = true
|
||||||
|
|
||||||
|
if !isAllowedURL(url, site) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for rate limiter before making the request
|
||||||
|
err := limiter.Wait(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
results <- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||||
|
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||||
|
results <- struct {
|
||||||
|
url string
|
||||||
|
content string
|
||||||
|
err error
|
||||||
|
}{url, content, err}
|
||||||
|
|
||||||
|
if len(visited) < site.MaxDepth {
|
||||||
|
links, _ := ExtractLinks(url)
|
||||||
|
for _, link := range links {
|
||||||
|
if !visited[link] && isAllowedURL(link, site) {
|
||||||
|
queue = append(queue, link)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAllowedURL(urlStr string, site SiteConfig) bool {
|
||||||
|
parsedURL, err := url.Parse(urlStr)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
baseURL, _ := url.Parse(site.BaseURL)
|
||||||
|
if parsedURL.Host != baseURL.Host {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
path := parsedURL.Path
|
||||||
|
for _, allowedPath := range site.AllowedPaths {
|
||||||
|
if strings.HasPrefix(path, allowedPath) {
|
||||||
|
for _, excludePath := range site.ExcludePaths {
|
||||||
|
if strings.HasPrefix(path, excludePath) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func getOverrides(urlStr string, site SiteConfig) (string, []string) {
|
||||||
|
parsedURL, _ := url.Parse(urlStr)
|
||||||
|
path := parsedURL.Path
|
||||||
|
|
||||||
|
for _, override := range site.PathOverrides {
|
||||||
|
if strings.HasPrefix(path, override.Path) {
|
||||||
|
if override.CSSLocator != "" {
|
||||||
|
return override.CSSLocator, override.ExcludeSelectors
|
||||||
|
}
|
||||||
|
return site.CSSLocator, override.ExcludeSelectors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return site.CSSLocator, site.ExcludeSelectors
|
||||||
|
}
|
||||||
|
|
||||||
|
func scrapeURL(url, cssLocator string, excludeSelectors []string) (string, error) {
|
||||||
|
content, err := FetchWebpageContent(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.CSSLocator != "" {
|
if cssLocator != "" {
|
||||||
content, err = ExtractContentWithCSS(content, config.CSSLocator, config.ExcludeSelectors)
|
content, err = ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -106,9 +296,9 @@ type URLConfig struct {
|
|||||||
// SetupLogger initializes the logger based on the verbose flag
|
// SetupLogger initializes the logger based on the verbose flag
|
||||||
func SetupLogger(verbose bool) {
|
func SetupLogger(verbose bool) {
|
||||||
if verbose {
|
if verbose {
|
||||||
logger = log.New(log.Writer(), "SCRAPER: ", log.LstdFlags)
|
logger = log.New(os.Stdout, "SCRAPER: ", log.LstdFlags)
|
||||||
} else {
|
} else {
|
||||||
logger = log.New(ioutil.Discard, "", 0)
|
logger = log.New(io.Discard, "", 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,6 +341,16 @@ func ClosePlaywright() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InitBrowser initializes the browser
|
||||||
|
func InitBrowser() error {
|
||||||
|
return InitPlaywright()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseBrowser closes the browser
|
||||||
|
func CloseBrowser() {
|
||||||
|
ClosePlaywright()
|
||||||
|
}
|
||||||
|
|
||||||
// FetchWebpageContent retrieves the content of a webpage using Playwright
|
// FetchWebpageContent retrieves the content of a webpage using Playwright
|
||||||
func FetchWebpageContent(urlStr string) (string, error) {
|
func FetchWebpageContent(urlStr string) (string, error) {
|
||||||
logger.Printf("Fetching webpage content for URL: %s\n", urlStr)
|
logger.Printf("Fetching webpage content for URL: %s\n", urlStr)
|
||||||
@@ -189,7 +389,9 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.Println("Waiting for body element")
|
logger.Println("Waiting for body element")
|
||||||
_, err = page.WaitForSelector("body", playwright.PageWaitForSelectorOptions{
|
|
||||||
|
bodyElement := page.Locator("body")
|
||||||
|
err = bodyElement.WaitFor(playwright.LocatorWaitForOptions{
|
||||||
State: playwright.WaitForSelectorStateVisible,
|
State: playwright.WaitForSelectorStateVisible,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -206,7 +408,7 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
|||||||
|
|
||||||
if content == "" {
|
if content == "" {
|
||||||
logger.Println(" content is empty, falling back to body content")
|
logger.Println(" content is empty, falling back to body content")
|
||||||
content, err = page.InnerHTML("body")
|
content, err = bodyElement.InnerHTML()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Printf("Error getting body content: %v\n", err)
|
logger.Printf("Error getting body content: %v\n", err)
|
||||||
return "", fmt.Errorf("could not get body content: %v", err)
|
return "", fmt.Errorf("could not get body content: %v", err)
|
||||||
@@ -259,6 +461,8 @@ func scrollPage(page playwright.Page) error {
|
|||||||
() => {
|
() => {
|
||||||
window.scrollTo(0, document.body.scrollHeight);
|
window.scrollTo(0, document.body.scrollHeight);
|
||||||
return document.body.scrollHeight;
|
return document.body.scrollHeight;
|
||||||
|
// wait for 500 ms
|
||||||
|
new Promise(resolve => setTimeout(resolve, 500));
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -290,7 +494,9 @@ func scrollPage(page playwright.Page) error {
|
|||||||
|
|
||||||
previousHeight = currentHeight
|
previousHeight = currentHeight
|
||||||
|
|
||||||
page.WaitForTimeout(500)
|
// Wait for a while before scrolling again
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Println("Scrolling back to top")
|
logger.Println("Scrolling back to top")
|
||||||
@@ -330,7 +536,9 @@ func ExtractLinks(urlStr string) ([]string, error) {
|
|||||||
|
|
||||||
var result []string
|
var result []string
|
||||||
for _, link := range links.([]interface{}) {
|
for _, link := range links.([]interface{}) {
|
||||||
result = append(result, link.(string))
|
// Normalize URL by removing trailing slash
|
||||||
|
normalizedLink := strings.TrimRight(link.(string), "/")
|
||||||
|
result = append(result, normalizedLink)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Printf("Extracted %d links\n", len(result))
|
logger.Printf("Extracted %d links\n", len(result))
|
||||||
@@ -364,6 +572,23 @@ func ExtractContentWithCSS(content, includeSelector string, excludeSelectors []s
|
|||||||
return "", fmt.Errorf("error extracting content with CSS selector: %v", err)
|
return "", fmt.Errorf("error extracting content with CSS selector: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Trim leading and trailing whitespace
|
||||||
|
selectedContent = strings.TrimSpace(selectedContent)
|
||||||
|
|
||||||
|
// Normalize newlines
|
||||||
|
selectedContent = strings.ReplaceAll(selectedContent, "\r\n", "\n")
|
||||||
|
selectedContent = strings.ReplaceAll(selectedContent, "\r", "\n")
|
||||||
|
|
||||||
|
// Remove indentation while preserving structure
|
||||||
|
lines := strings.Split(selectedContent, "\n")
|
||||||
|
for i, line := range lines {
|
||||||
|
lines[i] = strings.TrimSpace(line)
|
||||||
|
}
|
||||||
|
selectedContent = strings.Join(lines, "\n")
|
||||||
|
|
||||||
|
// Remove any leading or trailing newlines
|
||||||
|
selectedContent = strings.Trim(selectedContent, "\n")
|
||||||
|
|
||||||
logger.Printf("Extracted content length: %d\n", len(selectedContent))
|
logger.Printf("Extracted content length: %d\n", len(selectedContent))
|
||||||
return selectedContent, nil
|
return selectedContent, nil
|
||||||
}
|
}
|
||||||
|
|||||||
181
internal/scraper/scraper_test.go
Normal file
181
internal/scraper/scraper_test.go
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
package scraper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestIsAllowedURL(t *testing.T) {
|
||||||
|
site := SiteConfig{
|
||||||
|
BaseURL: "https://example.com",
|
||||||
|
AllowedPaths: []string{"/blog", "/products"},
|
||||||
|
ExcludePaths: []string{"/admin", "/private"},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
url string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"https://example.com/blog/post1", true},
|
||||||
|
{"https://example.com/products/item1", true},
|
||||||
|
{"https://example.com/admin/dashboard", false},
|
||||||
|
{"https://example.com/private/data", false},
|
||||||
|
{"https://example.com/other/page", false},
|
||||||
|
{"https://othersite.com/blog/post1", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result := isAllowedURL(test.url, site)
|
||||||
|
if result != test.expected {
|
||||||
|
t.Errorf("isAllowedURL(%q) = %v, want %v", test.url, result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetOverrides(t *testing.T) {
|
||||||
|
site := SiteConfig{
|
||||||
|
CSSLocator: "main",
|
||||||
|
ExcludeSelectors: []string{".ads"},
|
||||||
|
PathOverrides: []PathOverride{
|
||||||
|
{
|
||||||
|
Path: "/special",
|
||||||
|
CSSLocator: ".special-content",
|
||||||
|
ExcludeSelectors: []string{".sidebar"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
url string
|
||||||
|
expectedLocator string
|
||||||
|
expectedExcludes []string
|
||||||
|
}{
|
||||||
|
{"https://example.com/normal", "main", []string{".ads"}},
|
||||||
|
{"https://example.com/special", ".special-content", []string{".sidebar"}},
|
||||||
|
{"https://example.com/special/page", ".special-content", []string{".sidebar"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
locator, excludes := getOverrides(test.url, site)
|
||||||
|
if locator != test.expectedLocator {
|
||||||
|
t.Errorf("getOverrides(%q) locator = %q, want %q", test.url, locator, test.expectedLocator)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(excludes, test.expectedExcludes) {
|
||||||
|
t.Errorf("getOverrides(%q) excludes = %v, want %v", test.url, excludes, test.expectedExcludes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtractContentWithCSS(t *testing.T) {
|
||||||
|
// Initialize logger for testing
|
||||||
|
logger = log.New(io.Discard, "", 0)
|
||||||
|
|
||||||
|
html := `
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<main>
|
||||||
|
<h1>Main Content</h1>
|
||||||
|
<p>This is the main content.</p>
|
||||||
|
<div class="ads">Advertisement</div>
|
||||||
|
</main>
|
||||||
|
<aside>Sidebar content</aside>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
includeSelector string
|
||||||
|
excludeSelectors []string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{"main", nil, "<h1>Main Content</h1>\n<p>This is the main content.</p>\n<div class=\"ads\">Advertisement</div>"},
|
||||||
|
{"main", []string{".ads"}, "<h1>Main Content</h1>\n<p>This is the main content.</p>"},
|
||||||
|
{"aside", nil, "Sidebar content"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
result, err := ExtractContentWithCSS(html, test.includeSelector, test.excludeSelectors)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("ExtractContentWithCSS() returned error: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(result) != strings.TrimSpace(test.expected) {
|
||||||
|
t.Errorf("ExtractContentWithCSS() = %q, want %q", result, test.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessHTMLContent(t *testing.T) {
|
||||||
|
html := `
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<h1>Test Heading</h1>
|
||||||
|
<p>This is a <strong>test</strong> paragraph.</p>
|
||||||
|
<ul>
|
||||||
|
<li>Item 1</li>
|
||||||
|
<li>Item 2</li>
|
||||||
|
</ul>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
|
|
||||||
|
expected := strings.TrimSpace(`
|
||||||
|
# Test Heading
|
||||||
|
|
||||||
|
This is a **test** paragraph.
|
||||||
|
|
||||||
|
- Item 1
|
||||||
|
- Item 2
|
||||||
|
`)
|
||||||
|
|
||||||
|
result, err := ProcessHTMLContent(html, Config{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ProcessHTMLContent() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.TrimSpace(result) != expected {
|
||||||
|
t.Errorf("ProcessHTMLContent() = %q, want %q", result, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtractLinks(t *testing.T) {
|
||||||
|
// Initialize Playwright before running the test
|
||||||
|
if err := InitPlaywright(); err != nil {
|
||||||
|
t.Fatalf("Failed to initialize Playwright: %v", err)
|
||||||
|
}
|
||||||
|
defer ClosePlaywright()
|
||||||
|
|
||||||
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "text/html")
|
||||||
|
w.Write([]byte(`
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<a href="https://example.com/page1">Page 1</a>
|
||||||
|
<a href="https://example.com/page2">Page 2</a>
|
||||||
|
<a href="https://othersite.com">Other Site</a>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`))
|
||||||
|
}))
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
links, err := ExtractLinks(server.URL)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ExtractLinks() returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedLinks := []string{
|
||||||
|
"https://example.com/page1",
|
||||||
|
"https://example.com/page2",
|
||||||
|
"https://othersite.com",
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(links, expectedLinks) {
|
||||||
|
t.Errorf("ExtractLinks() = %v, want %v", links, expectedLinks)
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user