mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 15:03:17 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 333b9a366c | |||
|
|
1869dae89a | ||
|
|
d3ff7cb862 | ||
|
|
ea410e4abb | ||
|
|
7d8e25b1ad |
59
README.md
59
README.md
@@ -4,16 +4,18 @@ Rollup aggregates the contents of text-based files and webpages into a markdown
|
||||
|
||||
## Features
|
||||
|
||||
- File type filtering
|
||||
- Ignore patterns for excluding files
|
||||
- Support for code-generated file detection
|
||||
- Advanced web scraping functionality
|
||||
- Verbose logging option for detailed output
|
||||
- Exclusionary CSS selectors for web scraping
|
||||
- Support for multiple URLs in web scraping
|
||||
- File type filtering for targeted content aggregation
|
||||
- Ignore patterns for excluding specific files or directories
|
||||
- Support for code-generated file detection and exclusion
|
||||
- Advanced web scraping functionality with depth control
|
||||
- Verbose logging option for detailed operation insights
|
||||
- Exclusionary CSS selectors for precise web content extraction
|
||||
- Support for multiple URLs in web scraping operations
|
||||
- Configurable output format for web scraping (single file or separate files)
|
||||
- Configuration file support (YAML)
|
||||
- Generation of default configuration file
|
||||
- Flexible configuration file support (YAML)
|
||||
- Automatic generation of default configuration file
|
||||
- Custom output file naming
|
||||
- Concurrent processing for improved performance
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -74,14 +76,27 @@ ignore:
|
||||
code_generated:
|
||||
- **/generated/**
|
||||
scrape:
|
||||
urls:
|
||||
- url: https://example.com
|
||||
sites:
|
||||
- base_url: https://example.com
|
||||
css_locator: .content
|
||||
exclude_selectors:
|
||||
- .ads
|
||||
- .navigation
|
||||
max_depth: 2
|
||||
allowed_paths:
|
||||
- /blog
|
||||
- /docs
|
||||
exclude_paths:
|
||||
- /admin
|
||||
output_alias: example
|
||||
path_overrides:
|
||||
- path: /special-page
|
||||
css_locator: .special-content
|
||||
exclude_selectors:
|
||||
- .special-ads
|
||||
output_type: single
|
||||
requests_per_second: 1.0
|
||||
burst_limit: 3
|
||||
```
|
||||
|
||||
## Examples
|
||||
@@ -92,10 +107,10 @@ scrape:
|
||||
rollup files
|
||||
```
|
||||
|
||||
2. Web scraping with multiple URLs:
|
||||
2. Web scraping with multiple URLs and increased concurrency:
|
||||
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com
|
||||
rollup web --urls=https://example.com,https://another-example.com --concurrent=8
|
||||
```
|
||||
|
||||
3. Generate a default configuration file:
|
||||
@@ -104,15 +119,25 @@ scrape:
|
||||
rollup generate
|
||||
```
|
||||
|
||||
4. Use a custom configuration file:
|
||||
4. Use a custom configuration file and specify output:
|
||||
|
||||
```bash
|
||||
rollup files --config=my-config.yml
|
||||
rollup files --config=my-config.yml --output=project_summary.md
|
||||
```
|
||||
|
||||
5. Web scraping with separate output files:
|
||||
5. Web scraping with separate output files and custom timeout:
|
||||
```bash
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate
|
||||
rollup web --urls=https://example.com,https://another-example.com --output=separate --timeout=60
|
||||
```
|
||||
|
||||
6. Rollup files with specific types and ignore patterns:
|
||||
```bash
|
||||
rollup files --types=.go,.md --ignore=vendor/**,*_test.go
|
||||
```
|
||||
|
||||
7. Web scraping with depth and CSS selector:
|
||||
```bash
|
||||
rollup web --urls=https://example.com --depth=2 --css=.main-content
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
16
cmd/web.go
16
cmd/web.go
@@ -2,7 +2,7 @@ package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -44,7 +44,7 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
scraper.SetupLogger(verbose)
|
||||
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||
if !verbose {
|
||||
logger.SetOutput(ioutil.Discard)
|
||||
logger.SetOutput(io.Discard)
|
||||
}
|
||||
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||
scraperConfig.Verbose = verbose
|
||||
@@ -139,7 +139,7 @@ func writeSingleFile(content map[string]string) error {
|
||||
defer file.Close()
|
||||
|
||||
for url, c := range content {
|
||||
_, err = fmt.Fprintf(file, "# Content from %s\n\n%s\n\n---\n\n", url, c)
|
||||
_, err = fmt.Fprintf(file, "# ::: Content from %s\n\n%s\n\n---\n\n", url, c)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing content to file: %v", err)
|
||||
}
|
||||
@@ -161,7 +161,7 @@ func writeMultipleFiles(content map[string]string) error {
|
||||
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
||||
}
|
||||
|
||||
_, err = file.WriteString(fmt.Sprintf("# Content from %s\n\n%s\n", url, c))
|
||||
_, err = file.WriteString(fmt.Sprintf("# ::: Content from %s\n\n%s\n", url, c))
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return fmt.Errorf("error writing content to file %s: %v", filename, err)
|
||||
@@ -215,8 +215,10 @@ func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error
|
||||
return content, nil
|
||||
}
|
||||
|
||||
var testExtractAndConvertContent = extractAndConvertContent
|
||||
var testExtractLinks = scraper.ExtractLinks
|
||||
var (
|
||||
testExtractAndConvertContent = extractAndConvertContent
|
||||
testExtractLinks = scraper.ExtractLinks
|
||||
)
|
||||
|
||||
func extractAndConvertContent(urlStr string) (string, error) {
|
||||
content, err := scraper.FetchWebpageContent(urlStr)
|
||||
@@ -240,7 +242,7 @@ func extractAndConvertContent(urlStr string) (string, error) {
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing URL: %v", err)
|
||||
}
|
||||
header := fmt.Sprintf("# Content from %s\n\n", parsedURL.String())
|
||||
header := fmt.Sprintf("# ::: Content from %s\n\n", parsedURL.String())
|
||||
|
||||
return header + markdown + "\n\n", nil
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ func mockExtractAndConvertContent(urlStr string) (string, error) {
|
||||
return "Mocked content for " + urlStr, nil
|
||||
}
|
||||
|
||||
func mockExtractLinks(urlStr string) ([]string, error) {
|
||||
func mockExtractLinks() ([]string, error) {
|
||||
return []string{"http://example.com/link1", "http://example.com/link2"}, nil
|
||||
}
|
||||
|
||||
|
||||
21
docs/CHANGELOG.md
Normal file
21
docs/CHANGELOG.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.0.3] - 2024-09-22
|
||||
|
||||
### Added
|
||||
- Implemented web scraping functionality using Playwright
|
||||
- Added support for CSS selectors to extract specific content
|
||||
- Introduced rate limiting for web requests
|
||||
- Created configuration options for scraping settings
|
||||
|
||||
### Changed
|
||||
- Improved error handling and logging throughout the application
|
||||
- Enhanced URL parsing and validation
|
||||
|
||||
### Fixed
|
||||
- Resolved issues with concurrent scraping operations
|
||||
@@ -45,10 +45,10 @@ scrape:
|
||||
}
|
||||
defer os.Remove(tmpfile.Name())
|
||||
|
||||
if _, err := tmpfile.Write(content); err != nil {
|
||||
if _, err = tmpfile.Write(content); err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
if err := tmpfile.Close(); err != nil {
|
||||
if err = tmpfile.Close(); err != nil {
|
||||
t.Fatalf("Failed to close temp file: %v", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"sync"
|
||||
"context"
|
||||
"time"
|
||||
|
||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/playwright-community/playwright-go"
|
||||
md "github.com/JohannesKaufmann/html-to-markdown"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
@@ -81,7 +81,7 @@ func ScrapeSites(config Config) (map[string]string, error) {
|
||||
fullURL := site.BaseURL + path
|
||||
totalURLs++
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, config, results, limiter)
|
||||
scrapeSingleURL(fullURL, site, results, limiter)
|
||||
}
|
||||
}(site)
|
||||
}
|
||||
@@ -108,11 +108,12 @@ func ScrapeSites(config Config) (map[string]string, error) {
|
||||
return scrapedContent, nil
|
||||
}
|
||||
|
||||
func scrapeSingleURL(url string, site SiteConfig, config Config, results chan<- struct {
|
||||
func scrapeSingleURL(url string, site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
}, limiter *rate.Limiter,
|
||||
) {
|
||||
logger.Printf("Starting to scrape URL: %s\n", url)
|
||||
|
||||
// Wait for rate limiter before making the request
|
||||
@@ -155,11 +156,12 @@ func scrapeSingleURL(url string, site SiteConfig, config Config, results chan<-
|
||||
}{url, content, nil}
|
||||
}
|
||||
|
||||
func scrapeSite(site SiteConfig, config Config, results chan<- struct {
|
||||
func scrapeSite(site SiteConfig, results chan<- struct {
|
||||
url string
|
||||
content string
|
||||
err error
|
||||
}, limiter *rate.Limiter) {
|
||||
}, limiter *rate.Limiter,
|
||||
) {
|
||||
visited := make(map[string]bool)
|
||||
queue := []string{site.BaseURL}
|
||||
|
||||
@@ -296,7 +298,7 @@ func SetupLogger(verbose bool) {
|
||||
if verbose {
|
||||
logger = log.New(os.Stdout, "SCRAPER: ", log.LstdFlags)
|
||||
} else {
|
||||
logger = log.New(ioutil.Discard, "", 0)
|
||||
logger = log.New(io.Discard, "", 0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -387,7 +389,9 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
||||
}
|
||||
|
||||
logger.Println("Waiting for body element")
|
||||
_, err = page.WaitForSelector("body", playwright.PageWaitForSelectorOptions{
|
||||
|
||||
bodyElement := page.Locator("body")
|
||||
err = bodyElement.WaitFor(playwright.LocatorWaitForOptions{
|
||||
State: playwright.WaitForSelectorStateVisible,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -404,7 +408,7 @@ func FetchWebpageContent(urlStr string) (string, error) {
|
||||
|
||||
if content == "" {
|
||||
logger.Println(" content is empty, falling back to body content")
|
||||
content, err = page.InnerHTML("body")
|
||||
content, err = bodyElement.InnerHTML()
|
||||
if err != nil {
|
||||
logger.Printf("Error getting body content: %v\n", err)
|
||||
return "", fmt.Errorf("could not get body content: %v", err)
|
||||
@@ -457,6 +461,8 @@ func scrollPage(page playwright.Page) error {
|
||||
() => {
|
||||
window.scrollTo(0, document.body.scrollHeight);
|
||||
return document.body.scrollHeight;
|
||||
// wait for 500 ms
|
||||
new Promise(resolve => setTimeout(resolve, 500));
|
||||
}
|
||||
`
|
||||
|
||||
@@ -488,7 +494,9 @@ func scrollPage(page playwright.Page) error {
|
||||
|
||||
previousHeight = currentHeight
|
||||
|
||||
page.WaitForTimeout(500)
|
||||
// Wait for a while before scrolling again
|
||||
|
||||
|
||||
}
|
||||
|
||||
logger.Println("Scrolling back to top")
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"reflect"
|
||||
"log"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsAllowedURL(t *testing.T) {
|
||||
@@ -73,7 +73,7 @@ func TestGetOverrides(t *testing.T) {
|
||||
|
||||
func TestExtractContentWithCSS(t *testing.T) {
|
||||
// Initialize logger for testing
|
||||
logger = log.New(ioutil.Discard, "", 0)
|
||||
logger = log.New(io.Discard, "", 0)
|
||||
|
||||
html := `
|
||||
<html>
|
||||
|
||||
Reference in New Issue
Block a user