mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 15:03:17 +00:00
Compare commits
30 Commits
fix-loggin
...
link-navig
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e42ad24999 | ||
|
|
01465a08b7 | ||
|
|
e3355269b8 | ||
|
|
54c3776baf | ||
|
|
ee1561c502 | ||
|
|
5e8a257ff8 | ||
| 333b9a366c | |||
|
|
1869dae89a | ||
|
|
d3ff7cb862 | ||
|
|
ea410e4abb | ||
|
|
7d8e25b1ad | ||
|
|
691832e282 | ||
|
|
31e0fa5ea4 | ||
|
|
71f63ddaa8 | ||
|
|
574800c241 | ||
|
|
d5a94f5468 | ||
|
|
59994c085c | ||
|
|
396f092d50 | ||
|
|
274ef7ea79 | ||
|
|
a55e8df02a | ||
|
|
364b185269 | ||
|
|
952c2dda02 | ||
|
|
de84d68b4c | ||
|
|
e5d4c514a7 | ||
|
|
6ff44f81bb | ||
|
|
2fd411ce65 | ||
|
|
73116e8d82 | ||
| 5482621d99 | |||
| 3788a08b00 | |||
| 8ba54001ce |
78
README.md
78
README.md
@@ -4,16 +4,18 @@ Rollup aggregates the contents of text-based files and webpages into a markdown
|
||||
|
||||
## Features
|
||||
|
||||
- File type filtering
|
||||
- Ignore patterns for excluding files
|
||||
- Support for code-generated file detection
|
||||
- Advanced web scraping functionality
|
||||
- Verbose logging option for detailed output
|
||||
- Exclusionary CSS selectors for web scraping
|
||||
- Support for multiple URLs in web scraping
|
||||
- File type filtering for targeted content aggregation
|
||||
- Ignore patterns for excluding specific files or directories
|
||||
- Support for code-generated file detection and exclusion
|
||||
- Advanced web scraping functionality with depth control
|
||||
- Verbose logging option for detailed operation insights
|
||||
- Exclusionary CSS selectors for precise web content extraction
|
||||
- Support for multiple URLs in web scraping operations
|
||||
- Configurable output format for web scraping (single file or separate files)
|
||||
- Configuration file support (YAML)
|
||||
- Generation of default configuration file
|
||||
- Flexible configuration file support (YAML)
|
||||
- Automatic generation of default configuration file
|
||||
- Custom output file naming
|
||||
- Concurrent processing for improved performance
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -61,27 +63,37 @@ rollup [command] [flags]
|
||||
|
||||
Rollup can be configured using a YAML file. By default, it looks for `rollup.yml` in the current directory. You can specify a different configuration file using the `--config` flag.
|
||||
|
||||
Example `rollup.yml`:
|
||||
**Scrape Configuration Parameters:**
|
||||
|
||||
- `requests_per_second`: *(float, optional)* The rate at which requests are made per second during web scraping. Default is `1.0`.
|
||||
- `burst_limit`: *(integer, optional)* The maximum number of requests that can be made in a burst. Default is `5`.
|
||||
|
||||
These parameters help control the request rate to avoid overloading the target servers and to comply with their rate limits.
|
||||
|
||||
**Example `rollup.yml` with Scrape Configuration:**
|
||||
|
||||
```yaml
|
||||
file_types:
|
||||
- go
|
||||
- md
|
||||
ignore:
|
||||
- node_modules/**
|
||||
- vendor/**
|
||||
- .git/**
|
||||
code_generated:
|
||||
- **/generated/**
|
||||
scrape:
|
||||
urls:
|
||||
- url: https://example.com
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 5
|
||||
sites:
|
||||
- base_url: https://example.com
|
||||
css_locator: .content
|
||||
exclude_selectors:
|
||||
- .ads
|
||||
- .navigation
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- /blog
|
||||
- /docs
|
||||
exclude_paths:
|
||||
- /admin
|
||||
output_alias: example
|
||||
output_type: single
|
||||
path_overrides:
|
||||
- path: /special-page
|
||||
css_locator: .special-content
|
||||
exclude_selectors:
|
||||
- .special-ads
|
||||
```
|
||||
|
||||
## Examples
|
||||
@@ -92,10 +104,10 @@ scrape:
|
||||
rollup files
|
||||
```
|
||||
|
||||
2. Web scraping with multiple URLs:
|
||||
2. Web scraping with multiple URLs and increased concurrency:
|
||||
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com
|
||||
rollup web --urls=https://example.com,https://another-example.com --concurrent=8
|
||||
```
|
||||
|
||||
3. Generate a default configuration file:
|
||||
@@ -104,15 +116,25 @@ scrape:
|
||||
rollup generate
|
||||
```
|
||||
|
||||
4. Use a custom configuration file:
|
||||
4. Use a custom configuration file and specify output:
|
||||
|
||||
```bash
|
||||
rollup files --config=my-config.yml
|
||||
rollup files --config=my-config.yml --output=project_summary.md
|
||||
```
|
||||
|
||||
5. Web scraping with separate output files:
|
||||
5. Web scraping with separate output files and custom timeout:
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate --timeout=60
|
||||
```
|
||||
|
||||
6. Rollup files with specific types and ignore patterns:
|
||||
```bash
|
||||
rollup files --types=.go,.md --ignore=vendor/**,*_test.go
|
||||
```
|
||||
|
||||
7. Web scraping with depth and CSS selector:
|
||||
```bash
|
||||
rollup web --urls=https://example.com --depth=2 --css=.main-content
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
18
cmd/files.go
18
cmd/files.go
@@ -8,8 +8,11 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/tnypxl/rollup/internal/config"
|
||||
)
|
||||
|
||||
var cfg *config.Config
|
||||
|
||||
var (
|
||||
path string
|
||||
fileTypes string
|
||||
@@ -24,7 +27,7 @@ var filesCmd = &cobra.Command{
|
||||
in a given project, current path or a custom path, to a single timestamped markdown file
|
||||
whose name is <project-directory-name>-rollup-<timestamp>.md.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return runRollup()
|
||||
return runRollup(cfg)
|
||||
},
|
||||
}
|
||||
|
||||
@@ -87,16 +90,23 @@ func isIgnored(filePath string, patterns []string) bool {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
matched, err := filepath.Match(pattern, filepath.Base(filePath))
|
||||
if err == nil && matched {
|
||||
// Check if the pattern matches the full path or any part of it
|
||||
if matched, _ := filepath.Match(pattern, filePath); matched {
|
||||
return true
|
||||
}
|
||||
pathParts := strings.Split(filePath, string(os.PathSeparator))
|
||||
for i := range pathParts {
|
||||
partialPath := filepath.Join(pathParts[:i+1]...)
|
||||
if matched, _ := filepath.Match(pattern, partialPath); matched {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func runRollup() error {
|
||||
func runRollup(cfg *config.Config) error {
|
||||
// Use config if available, otherwise use command-line flags
|
||||
var types, codeGenList, ignoreList []string
|
||||
if cfg != nil && len(cfg.FileTypes) > 0 {
|
||||
|
||||
@@ -21,6 +21,9 @@ func TestMatchGlob(t *testing.T) {
|
||||
{"**/*.go", "dir/subdir/file.go", true},
|
||||
{"dir/*.go", "dir/file.go", true},
|
||||
{"dir/*.go", "otherdir/file.go", false},
|
||||
{"**/test_*.go", "internal/test_helper.go", true},
|
||||
{"docs/**/*.md", "docs/api/endpoints.md", true},
|
||||
{"docs/**/*.md", "src/docs/readme.md", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -32,7 +35,7 @@ func TestMatchGlob(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestIsCodeGenerated(t *testing.T) {
|
||||
patterns := []string{"generated_*.go", "**/auto_*.go"}
|
||||
patterns := []string{"generated_*.go", "**/auto_*.go", "**/*_gen.go"}
|
||||
tests := []struct {
|
||||
path string
|
||||
expected bool
|
||||
@@ -41,6 +44,8 @@ func TestIsCodeGenerated(t *testing.T) {
|
||||
{"normal_file.go", false},
|
||||
{"subdir/auto_file.go", true},
|
||||
{"subdir/normal_file.go", false},
|
||||
{"pkg/models_gen.go", true},
|
||||
{"pkg/handler.go", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -52,7 +57,7 @@ func TestIsCodeGenerated(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestIsIgnored(t *testing.T) {
|
||||
patterns := []string{"*.tmp", "**/*.log"}
|
||||
patterns := []string{"*.tmp", "**/*.log", ".git/**", "vendor/**"}
|
||||
tests := []struct {
|
||||
path string
|
||||
expected bool
|
||||
@@ -61,6 +66,10 @@ func TestIsIgnored(t *testing.T) {
|
||||
{"file.go", false},
|
||||
{"subdir/file.log", true},
|
||||
{"subdir/file.txt", false},
|
||||
{".git/config", true},
|
||||
{"src/.git/config", false},
|
||||
{"vendor/package/file.go", true},
|
||||
{"internal/vendor/file.go", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
@@ -81,49 +90,56 @@ func TestRunRollup(t *testing.T) {
|
||||
|
||||
// Create some test files
|
||||
files := map[string]string{
|
||||
"file1.go": "package main\n\nfunc main() {}\n",
|
||||
"file2.txt": "This is a text file.\n",
|
||||
"subdir/file3.go": "package subdir\n\nfunc Func() {}\n",
|
||||
"subdir/file4.json": "{\"key\": \"value\"}\n",
|
||||
"file1.go": "package main\n\nfunc main() {}\n",
|
||||
"file2.txt": "This is a text file.\n",
|
||||
"subdir/file3.go": "package subdir\n\nfunc Func() {}\n",
|
||||
"subdir/file4.json": "{\"key\": \"value\"}\n",
|
||||
"generated_model.go": "// Code generated DO NOT EDIT.\n\npackage model\n",
|
||||
"docs/api/readme.md": "# API Documentation\n",
|
||||
".git/config": "[core]\n\trepositoryformatversion = 0\n",
|
||||
"vendor/lib/helper.go": "package lib\n\nfunc Helper() {}\n",
|
||||
}
|
||||
|
||||
for name, content := range files {
|
||||
path := filepath.Join(tempDir, name)
|
||||
err := os.MkdirAll(filepath.Dir(path), 0755)
|
||||
if err != nil {
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
t.Fatalf("Failed to create directory: %v", err)
|
||||
}
|
||||
err = os.WriteFile(path, []byte(content), 0644)
|
||||
if err != nil {
|
||||
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||
t.Fatalf("Failed to write file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Set up test configuration
|
||||
cfg = &config.Config{
|
||||
FileTypes: []string{"go", "txt"},
|
||||
Ignore: []string{"*.json"},
|
||||
CodeGenerated: []string{},
|
||||
FileTypes: []string{"go", "txt", "md"},
|
||||
Ignore: []string{"*.json", ".git/**", "vendor/**"},
|
||||
CodeGenerated: []string{"generated_*.go"},
|
||||
}
|
||||
path = tempDir
|
||||
|
||||
// Change working directory to the temp directory
|
||||
originalWd, _ := os.Getwd()
|
||||
os.Chdir(tempDir)
|
||||
defer os.Chdir(originalWd)
|
||||
|
||||
// Run the rollup
|
||||
err = runRollup()
|
||||
if err != nil {
|
||||
if err := runRollup(cfg); err != nil {
|
||||
t.Fatalf("runRollup() failed: %v", err)
|
||||
}
|
||||
|
||||
// Check if the output file was created
|
||||
outputFiles, err := filepath.Glob(filepath.Join(tempDir, "*.rollup.md"))
|
||||
outputFiles, err := filepath.Glob("*.rollup.md")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob output files: %v", err)
|
||||
t.Fatalf("Error globbing for output file: %v", err)
|
||||
}
|
||||
if len(outputFiles) != 1 {
|
||||
t.Fatalf("Expected 1 output file, got %d", len(outputFiles))
|
||||
if len(outputFiles) == 0 {
|
||||
allFiles, _ := filepath.Glob("*")
|
||||
t.Fatalf("No rollup.md file found. Files in directory: %v", allFiles)
|
||||
}
|
||||
outputFile := outputFiles[0]
|
||||
|
||||
// Read the content of the output file
|
||||
content, err := os.ReadFile(outputFiles[0])
|
||||
content, err := os.ReadFile(outputFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read output file: %v", err)
|
||||
}
|
||||
@@ -133,6 +149,8 @@ func TestRunRollup(t *testing.T) {
|
||||
"# File: file1.go",
|
||||
"# File: file2.txt",
|
||||
"# File: subdir/file3.go",
|
||||
"# File: docs/api/readme.md",
|
||||
"# File: generated_model.go (Code-generated, Read-only)",
|
||||
}
|
||||
for _, expected := range expectedContent {
|
||||
if !strings.Contains(string(content), expected) {
|
||||
@@ -140,8 +158,15 @@ func TestRunRollup(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the ignored file is not included
|
||||
if strings.Contains(string(content), "file4.json") {
|
||||
t.Errorf("Output file contains ignored file: file4.json")
|
||||
// Check if the ignored files are not included
|
||||
ignoredContent := []string{
|
||||
"file4.json",
|
||||
".git/config",
|
||||
"vendor/lib/helper.go",
|
||||
}
|
||||
for _, ignored := range ignoredContent {
|
||||
if strings.Contains(string(content), ignored) {
|
||||
t.Errorf("Output file contains ignored file: %s", ignored)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
|
||||
var (
|
||||
configFile string
|
||||
cfg *config.Config
|
||||
verbose bool
|
||||
)
|
||||
|
||||
@@ -19,10 +18,10 @@ Use subcommands to perform specific operations.`,
|
||||
}
|
||||
|
||||
func Execute(conf *config.Config) error {
|
||||
cfg = conf
|
||||
if cfg == nil {
|
||||
cfg = &config.Config{} // Use an empty config if none is provided
|
||||
if conf == nil {
|
||||
conf = &config.Config{} // Use an empty config if none is provided
|
||||
}
|
||||
cfg = conf // Set the cfg variable in cmd/files.go
|
||||
return rootCmd.Execute()
|
||||
}
|
||||
|
||||
|
||||
189
cmd/web.go
189
cmd/web.go
@@ -2,11 +2,9 @@ package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -23,8 +21,6 @@ var (
|
||||
excludeSelectors []string
|
||||
)
|
||||
|
||||
var scraperConfig scraper.Config
|
||||
|
||||
var webCmd = &cobra.Command{
|
||||
Use: "web",
|
||||
Short: "Scrape main content from webpages and convert to Markdown",
|
||||
@@ -40,17 +36,28 @@ func init() {
|
||||
webCmd.Flags().StringSliceVar(&excludeSelectors, "exclude", []string{}, "CSS selectors to exclude from the extracted content (comma-separated)")
|
||||
}
|
||||
|
||||
func validateScrapeConfig(scrapeConfig config.ScrapeConfig) error {
|
||||
if scrapeConfig.RequestsPerSecond <= 0 {
|
||||
return fmt.Errorf("requests_per_second must be greater than 0")
|
||||
}
|
||||
if scrapeConfig.BurstLimit <= 0 {
|
||||
return fmt.Errorf("burst_limit must be greater than 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func runWeb(cmd *cobra.Command, args []string) error {
|
||||
scraper.SetupLogger(verbose)
|
||||
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||
if !verbose {
|
||||
logger.SetOutput(ioutil.Discard)
|
||||
logger.SetOutput(io.Discard)
|
||||
}
|
||||
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||
scraperConfig.Verbose = verbose
|
||||
|
||||
// Prepare site configurations
|
||||
var siteConfigs []scraper.SiteConfig
|
||||
if len(cfg.Scrape.Sites) > 0 {
|
||||
// Use configurations from rollup.yml
|
||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
||||
for i, site := range cfg.Scrape.Sites {
|
||||
@@ -64,11 +71,15 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
OutputAlias: site.OutputAlias,
|
||||
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||
}
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||
}
|
||||
} else {
|
||||
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration")
|
||||
// Use command-line URLs
|
||||
if len(urls) == 0 {
|
||||
logger.Println("Error: No URLs provided via --urls flag")
|
||||
return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||
}
|
||||
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
||||
for i, u := range urls {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
@@ -76,43 +87,34 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
CSSLocator: includeSelector,
|
||||
ExcludeSelectors: excludeSelectors,
|
||||
MaxDepth: depth,
|
||||
AllowedPaths: []string{"/"}, // Allow all paths by default
|
||||
}
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||
i+1, u, includeSelector, depth)
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||
i+1, u, includeSelector, depth)
|
||||
}
|
||||
}
|
||||
|
||||
if len(siteConfigs) == 0 {
|
||||
logger.Println("Error: No sites or URLs provided")
|
||||
return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||
}
|
||||
|
||||
// Set default values for rate limiting
|
||||
defaultRequestsPerSecond := 1.0
|
||||
defaultBurstLimit := 3
|
||||
|
||||
// Use default values if not set in the configuration
|
||||
requestsPerSecond := cfg.Scrape.RequestsPerSecond
|
||||
if requestsPerSecond == 0 {
|
||||
requestsPerSecond = defaultRequestsPerSecond
|
||||
}
|
||||
burstLimit := cfg.Scrape.BurstLimit
|
||||
if burstLimit == 0 {
|
||||
burstLimit = defaultBurstLimit
|
||||
}
|
||||
|
||||
// Set up scraper configuration
|
||||
scraperConfig := scraper.Config{
|
||||
Sites: siteConfigs,
|
||||
OutputType: outputType,
|
||||
Verbose: verbose,
|
||||
Scrape: scraper.ScrapeConfig{
|
||||
RequestsPerSecond: requestsPerSecond,
|
||||
BurstLimit: burstLimit,
|
||||
RequestsPerSecond: cfg.Scrape.RequestsPerSecond,
|
||||
BurstLimit: cfg.Scrape.BurstLimit,
|
||||
},
|
||||
}
|
||||
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||
outputType, requestsPerSecond, burstLimit)
|
||||
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||
outputType, scraperConfig.Scrape.RequestsPerSecond, scraperConfig.Scrape.BurstLimit)
|
||||
|
||||
// Validate scrape configuration
|
||||
err := validateScrapeConfig(cfg.Scrape)
|
||||
if err != nil {
|
||||
logger.Printf("Invalid scrape configuration: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// Start scraping using scraper.ScrapeSites
|
||||
logger.Println("Starting scraping process")
|
||||
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||
if err != nil {
|
||||
@@ -121,6 +123,7 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
||||
|
||||
// Write output to files
|
||||
if outputType == "single" {
|
||||
logger.Println("Writing content to a single file")
|
||||
return writeSingleFile(scrapedContent)
|
||||
@@ -131,7 +134,7 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
func writeSingleFile(content map[string]string) error {
|
||||
outputFile := generateDefaultFilename(urls)
|
||||
outputFile := generateDefaultFilename()
|
||||
file, err := os.Create(outputFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating output file: %v", err)
|
||||
@@ -139,7 +142,7 @@ func writeSingleFile(content map[string]string) error {
|
||||
defer file.Close()
|
||||
|
||||
for url, c := range content {
|
||||
_, err = file.WriteString(fmt.Sprintf("# Content from %s\n\n%s\n\n---\n\n", url, c))
|
||||
_, err = fmt.Fprintf(file, "# ::: Content from %s\n\n%s\n\n---\n\n", url, c)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing content to file: %v", err)
|
||||
}
|
||||
@@ -151,17 +154,13 @@ func writeSingleFile(content map[string]string) error {
|
||||
|
||||
func writeMultipleFiles(content map[string]string) error {
|
||||
for url, c := range content {
|
||||
filename, err := getFilenameFromContent(c, url)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error generating filename for %s: %v", url, err)
|
||||
}
|
||||
|
||||
filename := sanitizeFilename(url) + ".rollup.md"
|
||||
file, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
||||
}
|
||||
|
||||
_, err = file.WriteString(fmt.Sprintf("# Content from %s\n\n%s\n", url, c))
|
||||
_, err = file.WriteString(fmt.Sprintf("# ::: Content from %s\n\n%s\n", url, c))
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return fmt.Errorf("error writing content to file %s: %v", filename, err)
|
||||
@@ -174,109 +173,19 @@ func writeMultipleFiles(content map[string]string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func generateDefaultFilename(urls []string) string {
|
||||
func generateDefaultFilename() string {
|
||||
timestamp := time.Now().Format("20060102-150405")
|
||||
return fmt.Sprintf("web-%s.rollup.md", timestamp)
|
||||
}
|
||||
|
||||
func scrapeRecursively(urlStr string, depth int) (string, error) {
|
||||
visited := make(map[string]bool)
|
||||
return scrapeURL(urlStr, depth, visited)
|
||||
}
|
||||
|
||||
func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error) {
|
||||
if depth < 0 || visited[urlStr] {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
visited[urlStr] = true
|
||||
|
||||
content, err := testExtractAndConvertContent(urlStr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if depth > 0 {
|
||||
links, err := testExtractLinks(urlStr)
|
||||
if err != nil {
|
||||
return content, fmt.Errorf("error extracting links: %v", err)
|
||||
}
|
||||
|
||||
for _, link := range links {
|
||||
subContent, err := scrapeURL(link, depth-1, visited)
|
||||
if err != nil {
|
||||
fmt.Printf("Warning: Error scraping %s: %v\n", link, err)
|
||||
continue
|
||||
}
|
||||
content += "\n\n---\n\n" + subContent
|
||||
}
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
var testExtractAndConvertContent = extractAndConvertContent
|
||||
var testExtractLinks = scraper.ExtractLinks
|
||||
|
||||
func extractAndConvertContent(urlStr string) (string, error) {
|
||||
content, err := scraper.FetchWebpageContent(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error fetching webpage content: %v", err)
|
||||
}
|
||||
|
||||
if includeSelector != "" {
|
||||
content, err = scraper.ExtractContentWithCSS(content, includeSelector, excludeSelectors)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error extracting content with CSS: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
markdown, err := scraper.ProcessHTMLContent(content, scraper.Config{})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error processing HTML content: %v", err)
|
||||
}
|
||||
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing URL: %v", err)
|
||||
}
|
||||
header := fmt.Sprintf("# Content from %s\n\n", parsedURL.String())
|
||||
|
||||
return header + markdown + "\n\n", nil
|
||||
}
|
||||
|
||||
func getFilenameFromContent(content, urlStr string) (string, error) {
|
||||
// Try to extract title from content
|
||||
titleStart := strings.Index(content, "<title>")
|
||||
titleEnd := strings.Index(content, "</title>")
|
||||
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
|
||||
title := strings.TrimSpace(content[titleStart+7 : titleEnd])
|
||||
if title != "" {
|
||||
return sanitizeFilename(title) + ".rollup.md", nil
|
||||
}
|
||||
}
|
||||
|
||||
// If no title found or title is empty, use the URL
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid URL: %v", err)
|
||||
}
|
||||
|
||||
if parsedURL.Host == "" {
|
||||
return "", fmt.Errorf("invalid URL: missing host")
|
||||
}
|
||||
|
||||
filename := parsedURL.Host
|
||||
if parsedURL.Path != "" && parsedURL.Path != "/" {
|
||||
filename += strings.TrimSuffix(parsedURL.Path, "/")
|
||||
}
|
||||
return sanitizeFilename(filename) + ".rollup.md", nil
|
||||
}
|
||||
|
||||
func sanitizeFilename(name string) string {
|
||||
// Remove any character that isn't alphanumeric, dash, or underscore
|
||||
reg := regexp.MustCompile("[^a-zA-Z0-9-_]+")
|
||||
name = reg.ReplaceAllString(name, "_")
|
||||
name = strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' || r == '_' {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, name)
|
||||
|
||||
// Trim any leading or trailing underscores
|
||||
name = strings.Trim(name, "_")
|
||||
|
||||
@@ -103,7 +103,7 @@ func mockExtractAndConvertContent(urlStr string) (string, error) {
|
||||
return "Mocked content for " + urlStr, nil
|
||||
}
|
||||
|
||||
func mockExtractLinks(urlStr string) ([]string, error) {
|
||||
func mockExtractLinks() ([]string, error) {
|
||||
return []string{"http://example.com/link1", "http://example.com/link2"}, nil
|
||||
}
|
||||
|
||||
|
||||
21
docs/CHANGELOG.md
Normal file
21
docs/CHANGELOG.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.0.3] - 2024-09-22
|
||||
|
||||
### Added
|
||||
- Implemented web scraping functionality using Playwright
|
||||
- Added support for CSS selectors to extract specific content
|
||||
- Introduced rate limiting for web requests
|
||||
- Created configuration options for scraping settings
|
||||
|
||||
### Changed
|
||||
- Improved error handling and logging throughout the application
|
||||
- Enhanced URL parsing and validation
|
||||
|
||||
### Fixed
|
||||
- Resolved issues with concurrent scraping operations
|
||||
@@ -30,6 +30,7 @@ type SiteConfig struct {
|
||||
ExcludePaths []string `yaml:"exclude_paths"`
|
||||
OutputAlias string `yaml:"output_alias"`
|
||||
PathOverrides []PathOverride `yaml:"path_overrides"`
|
||||
LinksContainerSelector string `yaml:"links_container_selector"`
|
||||
}
|
||||
|
||||
type PathOverride struct {
|
||||
@@ -50,6 +51,13 @@ func Load(configPath string) (*Config, error) {
|
||||
return nil, fmt.Errorf("error parsing config file: %v", err)
|
||||
}
|
||||
|
||||
// Set default values if they are zero or missing
|
||||
if config.Scrape.RequestsPerSecond <= 0 {
|
||||
config.Scrape.RequestsPerSecond = 1.0
|
||||
}
|
||||
if config.Scrape.BurstLimit <= 0 {
|
||||
config.Scrape.BurstLimit = 5
|
||||
}
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -45,10 +45,10 @@ scrape:
|
||||
}
|
||||
defer os.Remove(tmpfile.Name())
|
||||
|
||||
if _, err := tmpfile.Write(content); err != nil {
|
||||
if _, err = tmpfile.Write(content); err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
if err := tmpfile.Close(); err != nil {
|
||||
if err = tmpfile.Close(); err != nil {
|
||||
t.Fatalf("Failed to close temp file: %v", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"sync"
|
||||
"context"
|
||||
"time"
|
||||
|
||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/playwright-community/playwright-go"
|
||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
@@ -50,6 +50,7 @@ type SiteConfig struct {
|
||||
ExcludePaths []string
|
||||
OutputAlias string
|
||||
PathOverrides []PathOverride
|
||||
LinksContainerSelector string
|
||||
}
|
||||
|
||||
// PathOverride holds path-specific overrides
|
||||
@@ -60,208 +61,280 @@ type PathOverride struct {
|
||||
}
|
||||
|
||||
func ScrapeSites(config Config) (map[string]string, error) {
|
||||
logger.Println("Starting ScrapeSites function - Verbose mode is active")
|
||||
results := make(chan struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
})
|
||||
logger.Println("Starting ScrapeSites function - Verbose mode is active")
|
||||
results := make(chan struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
})
|
||||
|
||||
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
|
||||
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
|
||||
// Ensure RequestsPerSecond and BurstLimit are valid
|
||||
if config.Scrape.RequestsPerSecond <= 0 {
|
||||
config.Scrape.RequestsPerSecond = 1.0
|
||||
}
|
||||
if config.Scrape.BurstLimit <= 0 {
|
||||
config.Scrape.BurstLimit = 5
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
totalURLs := 0
|
||||
for _, site := range config.Sites {
|
||||
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||
wg.Add(1)
|
||||
go func(site SiteConfig) {
|
||||
defer wg.Done()
|
||||
for _, path := range site.AllowedPaths {
|
||||
fullURL := site.BaseURL + path
|
||||
totalURLs++
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, config, results, limiter)
|
||||
}
|
||||
}(site)
|
||||
}
|
||||
limiter := rate.NewLimiter(rate.Limit(config.Scrape.RequestsPerSecond), config.Scrape.BurstLimit)
|
||||
logger.Printf("Rate limiter configured with %f requests per second and burst limit of %d\n", config.Scrape.RequestsPerSecond, config.Scrape.BurstLimit)
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(results)
|
||||
logger.Println("All goroutines completed, results channel closed")
|
||||
}()
|
||||
var wg sync.WaitGroup
|
||||
totalURLs := 0
|
||||
var mu sync.Mutex
|
||||
for _, site := range config.Sites {
|
||||
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||
wg.Add(1)
|
||||
go func(site SiteConfig) {
|
||||
defer wg.Done()
|
||||
visited := make(map[string]bool)
|
||||
for _, path := range site.AllowedPaths {
|
||||
fullURL := site.BaseURL + path
|
||||
mu.Lock()
|
||||
totalURLs++
|
||||
mu.Unlock()
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, results, limiter, visited, 0)
|
||||
}
|
||||
}(site)
|
||||
}
|
||||
|
||||
scrapedContent := make(map[string]string)
|
||||
for result := range results {
|
||||
if result.err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
||||
continue
|
||||
}
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", result.url, len(result.content))
|
||||
scrapedContent[result.url] = result.content
|
||||
}
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(results)
|
||||
logger.Println("All goroutines completed, results channel closed")
|
||||
}()
|
||||
|
||||
logger.Printf("Total URLs processed: %d\n", totalURLs)
|
||||
logger.Printf("Successfully scraped content from %d URLs\n", len(scrapedContent))
|
||||
scrapedContent := make(map[string]string)
|
||||
for result := range results {
|
||||
if result.err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", result.url, result.err)
|
||||
continue
|
||||
}
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", result.url, len(result.content))
|
||||
scrapedContent[result.url] = result.content
|
||||
}
|
||||
|
||||
return scrapedContent, nil
|
||||
logger.Printf("Total URLs processed: %d\n", totalURLs)
|
||||
logger.Printf("Successfully scraped content from %d URLs\n", len(scrapedContent))
|
||||
|
||||
return scrapedContent, nil
|
||||
}
|
||||
|
||||
func scrapeSingleURL(url string, site SiteConfig, config Config, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||
func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter, visited map[string]bool, currentDepth int) {
|
||||
if site.MaxDepth > 0 && currentDepth > site.MaxDepth {
|
||||
return
|
||||
}
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
logger.Printf("Rate limiter error for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
return
|
||||
}
|
||||
if visited[url] {
|
||||
return
|
||||
}
|
||||
visited[url] = true
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
|
||||
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
|
||||
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
logger.Printf("Error scraping %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", err}
|
||||
return
|
||||
}
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
logger.Printf("Rate limiter error for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
return
|
||||
}
|
||||
|
||||
if content == "" {
|
||||
logger.Printf("Warning: Empty content scraped from %s\n", url)
|
||||
} else {
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(content))
|
||||
}
|
||||
content, err := FetchWebpageContent(url)
|
||||
if err != nil {
|
||||
logger.Printf("Error fetching content for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", err}
|
||||
return
|
||||
}
|
||||
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, nil}
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(content))
|
||||
if err != nil {
|
||||
logger.Printf("Error parsing HTML for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("error parsing HTML: %v", err)}
|
||||
return
|
||||
}
|
||||
|
||||
if site.LinksContainerSelector != "" {
|
||||
logger.Printf("Processing links container for %s\n", url)
|
||||
linkContainers := doc.Find(site.LinksContainerSelector)
|
||||
linkContainers.Each(func(i int, container *goquery.Selection) {
|
||||
container.Find("a[href]").Each(func(j int, link *goquery.Selection) {
|
||||
href, exists := link.Attr("href")
|
||||
if exists {
|
||||
resolvedURL := resolveURL(href, url)
|
||||
if isAllowedURL(resolvedURL, site) && !visited[resolvedURL] {
|
||||
go scrapeSingleURL(resolvedURL, site, results, limiter, visited, currentDepth+1)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
logger.Printf("Using CSS locator for %s: %s\n", url, cssLocator)
|
||||
logger.Printf("Exclude selectors for %s: %v\n", url, excludeSelectors)
|
||||
|
||||
extractedContent, err := ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
logger.Printf("Error extracting content for %s: %v\n", url, err)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", err}
|
||||
return
|
||||
}
|
||||
|
||||
if extractedContent == "" {
|
||||
logger.Printf("Warning: Empty content scraped from %s\n", url)
|
||||
} else {
|
||||
logger.Printf("Successfully scraped content from %s (length: %d)\n", url, len(extractedContent))
|
||||
}
|
||||
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, extractedContent, nil}
|
||||
}
|
||||
|
||||
func scrapeSite(site SiteConfig, config Config, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
visited := make(map[string]bool)
|
||||
queue := []string{site.BaseURL}
|
||||
func scrapeSite(site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter,
|
||||
) {
|
||||
visited := make(map[string]bool)
|
||||
queue := []string{site.BaseURL}
|
||||
|
||||
for len(queue) > 0 {
|
||||
url := queue[0]
|
||||
queue = queue[1:]
|
||||
for len(queue) > 0 {
|
||||
url := queue[0]
|
||||
queue = queue[1:]
|
||||
|
||||
if visited[url] {
|
||||
continue
|
||||
}
|
||||
visited[url] = true
|
||||
if visited[url] {
|
||||
continue
|
||||
}
|
||||
visited[url] = true
|
||||
|
||||
if !isAllowedURL(url, site) {
|
||||
continue
|
||||
}
|
||||
if !isAllowedURL(url, site) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
continue
|
||||
}
|
||||
// Wait for rate limiter before making the request
|
||||
err := limiter.Wait(context.Background())
|
||||
if err != nil {
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, "", fmt.Errorf("rate limiter error: %v", err)}
|
||||
continue
|
||||
}
|
||||
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, err}
|
||||
cssLocator, excludeSelectors := getOverrides(url, site)
|
||||
content, err := scrapeURL(url, cssLocator, excludeSelectors)
|
||||
results <- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}{url, content, err}
|
||||
|
||||
if len(visited) < site.MaxDepth {
|
||||
links, _ := ExtractLinks(url)
|
||||
for _, link := range links {
|
||||
if !visited[link] && isAllowedURL(link, site) {
|
||||
queue = append(queue, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(visited) < site.MaxDepth {
|
||||
links, _ := ExtractLinks(url)
|
||||
for _, link := range links {
|
||||
if !visited[link] && isAllowedURL(link, site) {
|
||||
queue = append(queue, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isAllowedURL(urlStr string, site SiteConfig) bool {
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
parsedURL, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
baseURL, _ := url.Parse(site.BaseURL)
|
||||
if parsedURL.Host != baseURL.Host {
|
||||
return false
|
||||
}
|
||||
baseURL, _ := url.Parse(site.BaseURL)
|
||||
if parsedURL.Host != baseURL.Host {
|
||||
return false
|
||||
}
|
||||
|
||||
path := parsedURL.Path
|
||||
for _, allowedPath := range site.AllowedPaths {
|
||||
if strings.HasPrefix(path, allowedPath) {
|
||||
for _, excludePath := range site.ExcludePaths {
|
||||
if strings.HasPrefix(path, excludePath) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
path := parsedURL.Path
|
||||
|
||||
// Check if the URL is within allowed paths
|
||||
if len(site.AllowedPaths) > 0 {
|
||||
allowed := false
|
||||
for _, allowedPath := range site.AllowedPaths {
|
||||
if strings.HasPrefix(path, allowedPath) {
|
||||
allowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allowed {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
// Check if the URL is in excluded paths
|
||||
for _, excludePath := range site.ExcludePaths {
|
||||
if strings.HasPrefix(path, excludePath) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func getOverrides(urlStr string, site SiteConfig) (string, []string) {
|
||||
parsedURL, _ := url.Parse(urlStr)
|
||||
path := parsedURL.Path
|
||||
parsedURL, _ := url.Parse(urlStr)
|
||||
path := parsedURL.Path
|
||||
|
||||
for _, override := range site.PathOverrides {
|
||||
if strings.HasPrefix(path, override.Path) {
|
||||
if override.CSSLocator != "" {
|
||||
return override.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
return site.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
}
|
||||
for _, override := range site.PathOverrides {
|
||||
if strings.HasPrefix(path, override.Path) {
|
||||
if override.CSSLocator != "" {
|
||||
return override.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
return site.CSSLocator, override.ExcludeSelectors
|
||||
}
|
||||
}
|
||||
|
||||
return site.CSSLocator, site.ExcludeSelectors
|
||||
return site.CSSLocator, site.ExcludeSelectors
|
||||
}
|
||||
|
||||
func scrapeURL(url, cssLocator string, excludeSelectors []string) (string, error) {
|
||||
content, err := FetchWebpageContent(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
content, err := FetchWebpageContent(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if cssLocator != "" {
|
||||
content, err = ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
if cssLocator != "" {
|
||||
content, err = ExtractContentWithCSS(content, cssLocator, excludeSelectors)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return ProcessHTMLContent(content, Config{})
|
||||
return ProcessHTMLContent(content, Config{})
|
||||
}
|
||||
|
||||
func getFilenameFromContent(content, url string) string {
|
||||
@@ -296,7 +369,7 @@ func SetupLogger(verbose bool) {
|
||||
if verbose {
|
||||
logger = log.New(os.Stdout, "SCRAPER: ", log.LstdFlags)
|
||||
} else {
|
||||
logger = log.New(ioutil.Discard, "", 0)
|
||||
logger = log.New(io.Discard, "", 0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -339,6 +412,16 @@ func ClosePlaywright() {
|
||||
}
|
||||
}
|
||||
|
||||
// InitBrowser initializes the browser
|
||||
func InitBrowser() error {
|
||||
return InitPlaywright()
|
||||
}
|
||||
|
||||
// CloseBrowser closes the browser
|
||||
func CloseBrowser() {
|
||||
ClosePlaywright()
|
||||
}
|
||||
|
||||
// FetchWebpageContent retrieves the content of a webpage using Playwright
|
||||
func FetchWebpageContent(urlStr string) (string, error) {
|
||||
logger.Printf("Fetching webpage content for URL: %s\n", urlStr)
|
||||
@@ -377,7 +460,9 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
||||
}
|
||||
|
||||
logger.Println("Waiting for body element")
|
||||
_, err = page.WaitForSelector("body", playwright.PageWaitForSelectorOptions{
|
||||
|
||||
bodyElement := page.Locator("body")
|
||||
err = bodyElement.WaitFor(playwright.LocatorWaitForOptions{
|
||||
State: playwright.WaitForSelectorStateVisible,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -394,7 +479,7 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
||||
|
||||
if content == "" {
|
||||
logger.Println(" content is empty, falling back to body content")
|
||||
content, err = page.InnerHTML("body")
|
||||
content, err = bodyElement.InnerHTML()
|
||||
if err != nil {
|
||||
logger.Printf("Error getting body content: %v\n", err)
|
||||
return "", fmt.Errorf("could not get body content: %v", err)
|
||||
@@ -447,6 +532,8 @@ func scrollPage(page playwright.Page) error {
|
||||
() => {
|
||||
window.scrollTo(0, document.body.scrollHeight);
|
||||
return document.body.scrollHeight;
|
||||
// wait for 500 ms
|
||||
new Promise(resolve => setTimeout(resolve, 500));
|
||||
}
|
||||
`
|
||||
|
||||
@@ -478,7 +565,9 @@ func scrollPage(page playwright.Page) error {
|
||||
|
||||
previousHeight = currentHeight
|
||||
|
||||
page.WaitForTimeout(500)
|
||||
// Wait for a while before scrolling again
|
||||
|
||||
|
||||
}
|
||||
|
||||
logger.Println("Scrolling back to top")
|
||||
@@ -518,7 +607,9 @@ func ExtractLinks(urlStr string) ([]string, error) {
|
||||
|
||||
var result []string
|
||||
for _, link := range links.([]interface{}) {
|
||||
result = append(result, link.(string))
|
||||
// Normalize URL by removing trailing slash
|
||||
normalizedLink := strings.TrimRight(link.(string), "/")
|
||||
result = append(result, normalizedLink)
|
||||
}
|
||||
|
||||
logger.Printf("Extracted %d links\n", len(result))
|
||||
@@ -552,6 +643,34 @@ func ExtractContentWithCSS(content, includeSelector string, excludeSelectors []s
|
||||
return "", fmt.Errorf("error extracting content with CSS selector: %v", err)
|
||||
}
|
||||
|
||||
// Trim leading and trailing whitespace
|
||||
selectedContent = strings.TrimSpace(selectedContent)
|
||||
|
||||
// Normalize newlines
|
||||
selectedContent = strings.ReplaceAll(selectedContent, "\r\n", "\n")
|
||||
selectedContent = strings.ReplaceAll(selectedContent, "\r", "\n")
|
||||
|
||||
// Remove indentation while preserving structure
|
||||
lines := strings.Split(selectedContent, "\n")
|
||||
for i, line := range lines {
|
||||
lines[i] = strings.TrimSpace(line)
|
||||
}
|
||||
selectedContent = strings.Join(lines, "\n")
|
||||
|
||||
// Remove any leading or trailing newlines
|
||||
selectedContent = strings.Trim(selectedContent, "\n")
|
||||
|
||||
logger.Printf("Extracted content length: %d\n", len(selectedContent))
|
||||
return selectedContent, nil
|
||||
}
|
||||
func resolveURL(href, base string) string {
|
||||
parsedBase, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return href
|
||||
}
|
||||
parsedHref, err := url.Parse(href)
|
||||
if err != nil {
|
||||
return href
|
||||
}
|
||||
return parsedBase.ResolveReference(parsedHref).String()
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsAllowedURL(t *testing.T) {
|
||||
@@ -48,9 +51,9 @@ func TestGetOverrides(t *testing.T) {
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
url string
|
||||
expectedLocator string
|
||||
expectedExcludes []string
|
||||
url string
|
||||
expectedLocator string
|
||||
expectedExcludes []string
|
||||
}{
|
||||
{"https://example.com/normal", "main", []string{".ads"}},
|
||||
{"https://example.com/special", ".special-content", []string{".sidebar"}},
|
||||
@@ -69,6 +72,9 @@ func TestGetOverrides(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestExtractContentWithCSS(t *testing.T) {
|
||||
// Initialize logger for testing
|
||||
logger = log.New(io.Discard, "", 0)
|
||||
|
||||
html := `
|
||||
<html>
|
||||
<body>
|
||||
@@ -138,6 +144,12 @@ This is a **test** paragraph.
|
||||
}
|
||||
|
||||
func TestExtractLinks(t *testing.T) {
|
||||
// Initialize Playwright before running the test
|
||||
if err := InitPlaywright(); err != nil {
|
||||
t.Fatalf("Failed to initialize Playwright: %v", err)
|
||||
}
|
||||
defer ClosePlaywright()
|
||||
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
w.Write([]byte(`
|
||||
|
||||
Reference in New Issue
Block a user