mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-13 06:23:18 +00:00
Compare commits
2 Commits
9341a51d09
...
877a7876c0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
877a7876c0 | ||
|
|
7569aff6ec |
53
CLAUDE.md
Normal file
53
CLAUDE.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Build and Run Commands
|
||||
|
||||
```bash
|
||||
# Build the binary
|
||||
go build -o rollup .
|
||||
|
||||
# Run directly
|
||||
go run main.go [command]
|
||||
|
||||
# Run tests
|
||||
go test ./...
|
||||
|
||||
# Run a single test
|
||||
go test -run TestName ./path/to/package
|
||||
```
|
||||
|
||||
## Project Overview
|
||||
|
||||
Rollup is a Go CLI tool that aggregates text-based files and webpages into markdown files. It has three main commands:
|
||||
- `files` - Rolls up local files into a single markdown file
|
||||
- `web` - Scrapes webpages and converts to markdown using Playwright
|
||||
- `generate` - Creates a default rollup.yml config file
|
||||
|
||||
## Architecture
|
||||
|
||||
**Entry Point**: `main.go` initializes Playwright browser and loads config before executing commands via Cobra.
|
||||
|
||||
**Command Layer** (`cmd/`):
|
||||
- `root.go` - Cobra root command with global flags (--config, --verbose)
|
||||
- `files.go` - File aggregation with glob pattern matching for ignore/codegen detection
|
||||
- `web.go` - Web scraping orchestration, converts config site definitions to scraper configs
|
||||
- `generate.go` - Scans directory for text file types and generates rollup.yml
|
||||
|
||||
**Internal Packages**:
|
||||
- `internal/config` - YAML config loading and validation. Defines `Config`, `SiteConfig`, `PathOverride` structs
|
||||
- `internal/scraper` - Playwright-based web scraping with rate limiting, HTML-to-markdown conversion via goquery and html-to-markdown library
|
||||
|
||||
**Key Dependencies**:
|
||||
- `spf13/cobra` - CLI framework
|
||||
- `playwright-go` - Browser automation for web scraping
|
||||
- `PuerkitoBio/goquery` - HTML parsing and CSS selector extraction
|
||||
- `JohannesKaufmann/html-to-markdown` - HTML to markdown conversion
|
||||
|
||||
## Configuration
|
||||
|
||||
The tool reads from `rollup.yml` by default. Key config fields:
|
||||
- `file_extensions` - File types to include in rollup
|
||||
- `ignore_paths` / `code_generated_paths` - Glob patterns for exclusion
|
||||
- `sites` - Web scraping targets with CSS selectors, path filtering, rate limiting
|
||||
@@ -33,7 +33,7 @@ whose name is <project-directory-name>-rollup-<timestamp>.md.`,
|
||||
|
||||
func init() {
|
||||
filesCmd.Flags().StringVarP(&path, "path", "p", ".", "Path to the project directory")
|
||||
filesCmd.Flags().StringVarP(&fileTypes, "types", "t", ".go,.md,.txt", "Comma-separated list of file extensions to include")
|
||||
filesCmd.Flags().StringVarP(&fileTypes, "types", "t", "go,md,txt", "Comma-separated list of file extensions to include (without leading dot)")
|
||||
filesCmd.Flags().StringVarP(&codeGenPatterns, "codegen", "g", "", "Comma-separated list of glob patterns for code-generated files")
|
||||
filesCmd.Flags().StringVarP(&ignorePatterns, "ignore", "i", "", "Comma-separated list of glob patterns for files to ignore")
|
||||
}
|
||||
|
||||
@@ -88,6 +88,10 @@ func Load(configPath string) (*Config, error) {
|
||||
|
||||
// Validate checks the configuration for any invalid values
|
||||
func (c *Config) Validate() error {
|
||||
if len(c.FileExtensions) == 0 && len(c.Sites) == 0 {
|
||||
return fmt.Errorf("file_extensions or sites must be specified")
|
||||
}
|
||||
|
||||
if c.RequestsPerSecond != nil && *c.RequestsPerSecond <= 0 {
|
||||
return fmt.Errorf("requests_per_second must be positive")
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ func TestLoad(t *testing.T) {
|
||||
// Create a temporary config file
|
||||
content := []byte(`
|
||||
file_extensions:
|
||||
- .go
|
||||
- .md
|
||||
- go
|
||||
- md
|
||||
ignore_paths:
|
||||
- "*.tmp"
|
||||
- "**/*.log"
|
||||
@@ -27,7 +27,7 @@ sites:
|
||||
- "/blog"
|
||||
exclude_paths:
|
||||
- "/admin"
|
||||
file_name_prefix: "example"
|
||||
file_name_prefix: "example"
|
||||
path_overrides:
|
||||
- path: "/special"
|
||||
css_locator: ".special-content"
|
||||
@@ -61,7 +61,7 @@ burst_limit: 5
|
||||
rps := 1.0
|
||||
bl := 5
|
||||
expectedConfig := &Config{
|
||||
FileExtensions: []string{".go", ".md"},
|
||||
FileExtensions: []string{"go", "md"},
|
||||
IgnorePaths: []string{"*.tmp", "**/*.log"},
|
||||
CodeGeneratedPaths: []string{"generated_*.go"},
|
||||
Sites: []SiteConfig{
|
||||
@@ -100,7 +100,7 @@ func TestValidate(t *testing.T) {
|
||||
{
|
||||
name: "Valid config",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
FileExtensions: []string{"go"},
|
||||
Sites: []SiteConfig{
|
||||
{BaseURL: "https://example.com"},
|
||||
},
|
||||
@@ -115,7 +115,7 @@ func TestValidate(t *testing.T) {
|
||||
{
|
||||
name: "Invalid requests per second",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
FileExtensions: []string{"go"},
|
||||
RequestsPerSecond: func() *float64 { f := -1.0; return &f }(),
|
||||
},
|
||||
wantErr: true,
|
||||
@@ -123,7 +123,7 @@ func TestValidate(t *testing.T) {
|
||||
{
|
||||
name: "Invalid burst limit",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
FileExtensions: []string{"go"},
|
||||
BurstLimit: func() *int { i := -1; return &i }(),
|
||||
},
|
||||
wantErr: true,
|
||||
@@ -131,19 +131,11 @@ func TestValidate(t *testing.T) {
|
||||
{
|
||||
name: "Site without base URL",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
FileExtensions: []string{"go"},
|
||||
Sites: []SiteConfig{{}},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Negative max depth",
|
||||
config: Config{
|
||||
FileExtensions: []string{".go"},
|
||||
Sites: []SiteConfig{{BaseURL: "https://example.com"}},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
@@ -74,6 +74,9 @@ func ScrapeSites(config Config) error {
|
||||
|
||||
var wg sync.WaitGroup
|
||||
totalURLs := 0
|
||||
for _, site := range config.Sites {
|
||||
totalURLs += len(site.AllowedPaths)
|
||||
}
|
||||
for _, site := range config.Sites {
|
||||
logger.Printf("Processing site: %s\n", site.BaseURL)
|
||||
wg.Add(1)
|
||||
@@ -81,7 +84,6 @@ func ScrapeSites(config Config) error {
|
||||
defer wg.Done()
|
||||
for _, path := range site.AllowedPaths {
|
||||
fullURL := site.BaseURL + path
|
||||
totalURLs++
|
||||
logger.Printf("Queueing URL for scraping: %s\n", fullURL)
|
||||
scrapeSingleURL(fullURL, site, results, limiter)
|
||||
}
|
||||
@@ -532,8 +534,6 @@ func scrollPage(page playwright.Page) error {
|
||||
() => {
|
||||
window.scrollTo(0, document.body.scrollHeight);
|
||||
return document.body.scrollHeight;
|
||||
// wait for 500 ms
|
||||
new Promise(resolve => setTimeout(resolve, 500));
|
||||
}
|
||||
`
|
||||
|
||||
@@ -565,8 +565,8 @@ func scrollPage(page playwright.Page) error {
|
||||
|
||||
previousHeight = currentHeight
|
||||
|
||||
// Wait for a while before scrolling again
|
||||
|
||||
// Wait for content to load before scrolling again
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
|
||||
logger.Println("Scrolling back to top")
|
||||
|
||||
Reference in New Issue
Block a user