mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 06:53:18 +00:00
flatten scrape config to 'sites:'
* flatten scrape config to 'sites:'. Update unit tests and readme. * remove check for file_extensions configuration. * show progress indication after 5 seconds. * add documentation to functions * fix: remove MaxDepth and link extraction functionality * fix: Remove MaxDepth references from cmd/web.go
This commit is contained in:
64
cmd/files.go
64
cmd/files.go
@@ -108,19 +108,20 @@ func isIgnored(filePath string, patterns []string) bool {
|
||||
|
||||
func runRollup(cfg *config.Config) error {
|
||||
// Use config if available, otherwise use command-line flags
|
||||
var types, codeGenList, ignoreList []string
|
||||
if cfg != nil && len(cfg.FileTypes) > 0 {
|
||||
types = cfg.FileTypes
|
||||
var types []string
|
||||
var codeGenList, ignoreList []string
|
||||
if cfg != nil && len(cfg.FileExtensions) > 0 {
|
||||
types = cfg.FileExtensions
|
||||
} else {
|
||||
types = strings.Split(fileTypes, ",")
|
||||
}
|
||||
if cfg != nil && len(cfg.CodeGenerated) > 0 {
|
||||
codeGenList = cfg.CodeGenerated
|
||||
if cfg != nil && len(cfg.CodeGeneratedPaths) > 0 {
|
||||
codeGenList = cfg.CodeGeneratedPaths
|
||||
} else {
|
||||
codeGenList = strings.Split(codeGenPatterns, ",")
|
||||
}
|
||||
if cfg != nil && cfg.Ignore != nil && len(cfg.Ignore) > 0 {
|
||||
ignoreList = cfg.Ignore
|
||||
if cfg != nil && len(cfg.IgnorePaths) > 0 {
|
||||
ignoreList = cfg.IgnorePaths
|
||||
} else {
|
||||
ignoreList = strings.Split(ignorePatterns, ",")
|
||||
}
|
||||
@@ -145,6 +146,11 @@ func runRollup(cfg *config.Config) error {
|
||||
}
|
||||
defer outputFile.Close()
|
||||
|
||||
startTime := time.Now()
|
||||
showProgress := false
|
||||
progressTicker := time.NewTicker(500 * time.Millisecond)
|
||||
defer progressTicker.Stop()
|
||||
|
||||
// Walk through the directory
|
||||
err = filepath.Walk(absPath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
@@ -160,16 +166,25 @@ func runRollup(cfg *config.Config) error {
|
||||
|
||||
// Check if the file should be ignored
|
||||
if isIgnored(relPath, ignoreList) {
|
||||
if verbose {
|
||||
fmt.Printf("Ignoring file: %s\n", relPath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
ext := filepath.Ext(path)
|
||||
for _, t := range types {
|
||||
if ext == "."+t {
|
||||
// Verbose logging for processed file
|
||||
if verbose {
|
||||
size := humanReadableSize(info.Size())
|
||||
fmt.Printf("Processing file: %s (%s)\n", relPath, size)
|
||||
}
|
||||
|
||||
// Read file contents
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
fmt.Printf("Error reading file %s: %v", path, err)
|
||||
fmt.Printf("Error reading file %s: %v\n", path, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -185,12 +200,43 @@ func runRollup(cfg *config.Config) error {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !showProgress && time.Since(startTime) > 5*time.Second {
|
||||
showProgress = true
|
||||
fmt.Print("This is taking a while (hold tight) ")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-progressTicker.C:
|
||||
if showProgress {
|
||||
fmt.Print(".")
|
||||
}
|
||||
default:
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error walking through directory: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Rollup complete. Output file: %s", outputFileName)
|
||||
if showProgress {
|
||||
fmt.Println() // Print a newline after the progress dots
|
||||
}
|
||||
|
||||
fmt.Printf("Rollup complete. Output file: %s\n", outputFileName)
|
||||
return nil
|
||||
}
|
||||
|
||||
func humanReadableSize(size int64) string {
|
||||
const unit = 1024
|
||||
if size < unit {
|
||||
return fmt.Sprintf("%d B", size)
|
||||
}
|
||||
div, exp := int64(unit), 0
|
||||
for n := size / unit; n >= unit; n /= unit {
|
||||
div *= unit
|
||||
exp++
|
||||
}
|
||||
return fmt.Sprintf("%.1f %cB", float64(size)/float64(div), "KMGTPE"[exp])
|
||||
}
|
||||
|
||||
@@ -112,9 +112,9 @@ func TestRunRollup(t *testing.T) {
|
||||
|
||||
// Set up test configuration
|
||||
cfg = &config.Config{
|
||||
FileTypes: []string{"go", "txt", "md"},
|
||||
Ignore: []string{"*.json", ".git/**", "vendor/**"},
|
||||
CodeGenerated: []string{"generated_*.go"},
|
||||
FileExtensions: []string{"go", "txt", "md"},
|
||||
IgnorePaths: []string{"*.json", ".git/**", "vendor/**"},
|
||||
CodeGeneratedPaths: []string{"generated_*.go"},
|
||||
}
|
||||
|
||||
// Change working directory to the temp directory
|
||||
|
||||
@@ -38,23 +38,23 @@ func runGenerate(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
cfg := config.Config{
|
||||
FileTypes: make([]string, 0, len(fileTypes)),
|
||||
Ignore: []string{"node_modules/**", "vendor/**", ".git/**"},
|
||||
FileExtensions: make([]string, 0, len(fileTypes)),
|
||||
IgnorePaths: []string{"node_modules/**", "vendor/**", ".git/**"},
|
||||
}
|
||||
|
||||
for ext := range fileTypes {
|
||||
cfg.FileTypes = append(cfg.FileTypes, ext)
|
||||
cfg.FileExtensions = append(cfg.FileExtensions, ext)
|
||||
}
|
||||
|
||||
// Sort file types for consistency
|
||||
sort.Strings(cfg.FileTypes)
|
||||
sort.Strings(cfg.FileExtensions)
|
||||
|
||||
yamlData, err := yaml.Marshal(&cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshaling config: %v", err)
|
||||
}
|
||||
|
||||
outputPath := config.DefaultConfigPath()
|
||||
outputPath := "rollup.yml"
|
||||
err = os.WriteFile(outputPath, yamlData, 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing config file: %v", err)
|
||||
|
||||
87
cmd/web.go
87
cmd/web.go
@@ -18,7 +18,6 @@ import (
|
||||
var (
|
||||
urls []string
|
||||
outputType string
|
||||
depth int
|
||||
includeSelector string
|
||||
excludeSelectors []string
|
||||
)
|
||||
@@ -35,7 +34,6 @@ var webCmd = &cobra.Command{
|
||||
func init() {
|
||||
webCmd.Flags().StringSliceVarP(&urls, "urls", "u", []string{}, "URLs of the webpages to scrape (comma-separated)")
|
||||
webCmd.Flags().StringVarP(&outputType, "output", "o", "single", "Output type: 'single' for one file, 'separate' for multiple files")
|
||||
webCmd.Flags().IntVarP(&depth, "depth", "d", 0, "Depth of link traversal (default: 0, only scrape the given URLs)")
|
||||
webCmd.Flags().StringVar(&includeSelector, "css", "", "CSS selector to extract specific content")
|
||||
webCmd.Flags().StringSliceVar(&excludeSelectors, "exclude", []string{}, "CSS selectors to exclude from the extracted content (comma-separated)")
|
||||
}
|
||||
@@ -50,22 +48,21 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
scraperConfig.Verbose = verbose
|
||||
|
||||
var siteConfigs []scraper.SiteConfig
|
||||
if len(cfg.Scrape.Sites) > 0 {
|
||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
||||
for i, site := range cfg.Scrape.Sites {
|
||||
if len(cfg.Sites) > 0 {
|
||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Sites))
|
||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Sites))
|
||||
for i, site := range cfg.Sites {
|
||||
siteConfigs[i] = scraper.SiteConfig{
|
||||
BaseURL: site.BaseURL,
|
||||
CSSLocator: site.CSSLocator,
|
||||
ExcludeSelectors: site.ExcludeSelectors,
|
||||
MaxDepth: site.MaxDepth,
|
||||
AllowedPaths: site.AllowedPaths,
|
||||
ExcludePaths: site.ExcludePaths,
|
||||
OutputAlias: site.OutputAlias,
|
||||
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||
}
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, AllowedPaths=%v",
|
||||
i+1, site.BaseURL, site.CSSLocator, site.AllowedPaths)
|
||||
}
|
||||
} else {
|
||||
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration")
|
||||
@@ -75,10 +72,9 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
BaseURL: u,
|
||||
CSSLocator: includeSelector,
|
||||
ExcludeSelectors: excludeSelectors,
|
||||
MaxDepth: depth,
|
||||
}
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||
i+1, u, includeSelector, depth)
|
||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s",
|
||||
i+1, u, includeSelector)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,13 +88,13 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
defaultBurstLimit := 3
|
||||
|
||||
// Use default values if not set in the configuration
|
||||
requestsPerSecond := cfg.Scrape.RequestsPerSecond
|
||||
if requestsPerSecond == 0 {
|
||||
requestsPerSecond = defaultRequestsPerSecond
|
||||
requestsPerSecond := defaultRequestsPerSecond
|
||||
if cfg.RequestsPerSecond != nil {
|
||||
requestsPerSecond = *cfg.RequestsPerSecond
|
||||
}
|
||||
burstLimit := cfg.Scrape.BurstLimit
|
||||
if burstLimit == 0 {
|
||||
burstLimit = defaultBurstLimit
|
||||
burstLimit := defaultBurstLimit
|
||||
if cfg.BurstLimit != nil {
|
||||
burstLimit = *cfg.BurstLimit
|
||||
}
|
||||
|
||||
scraperConfig := scraper.Config{
|
||||
@@ -114,7 +110,32 @@ func runWeb(cmd *cobra.Command, args []string) error {
|
||||
outputType, requestsPerSecond, burstLimit)
|
||||
|
||||
logger.Println("Starting scraping process")
|
||||
startTime := time.Now()
|
||||
progressTicker := time.NewTicker(time.Second)
|
||||
defer progressTicker.Stop()
|
||||
|
||||
done := make(chan bool)
|
||||
messagePrinted := false
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-progressTicker.C:
|
||||
if time.Since(startTime) > 5*time.Second && !messagePrinted {
|
||||
fmt.Print("This is taking a while (hold tight) ")
|
||||
messagePrinted = true
|
||||
} else if messagePrinted {
|
||||
fmt.Print(".")
|
||||
}
|
||||
case <-done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||
done <- true
|
||||
fmt.Println() // New line after progress indicator
|
||||
|
||||
if err != nil {
|
||||
logger.Printf("Error occurred during scraping: %v", err)
|
||||
return fmt.Errorf("error scraping content: %v", err)
|
||||
@@ -179,45 +200,17 @@ func generateDefaultFilename() string {
|
||||
return fmt.Sprintf("web-%s.rollup.md", timestamp)
|
||||
}
|
||||
|
||||
func scrapeRecursively(urlStr string, depth int) (string, error) {
|
||||
visited := make(map[string]bool)
|
||||
return scrapeURL(urlStr, depth, visited)
|
||||
}
|
||||
|
||||
func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error) {
|
||||
if depth < 0 || visited[urlStr] {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
visited[urlStr] = true
|
||||
|
||||
func scrapeURL(urlStr string) (string, error) {
|
||||
content, err := testExtractAndConvertContent(urlStr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if depth > 0 {
|
||||
links, err := testExtractLinks(urlStr)
|
||||
if err != nil {
|
||||
return content, fmt.Errorf("error extracting links: %v", err)
|
||||
}
|
||||
|
||||
for _, link := range links {
|
||||
subContent, err := scrapeURL(link, depth-1, visited)
|
||||
if err != nil {
|
||||
fmt.Printf("Warning: Error scraping %s: %v\n", link, err)
|
||||
continue
|
||||
}
|
||||
content += "\n\n---\n\n" + subContent
|
||||
}
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
var (
|
||||
testExtractAndConvertContent = extractAndConvertContent
|
||||
testExtractLinks = scraper.ExtractLinks
|
||||
)
|
||||
|
||||
func extractAndConvertContent(urlStr string) (string, error) {
|
||||
|
||||
Reference in New Issue
Block a user