mirror of
https://github.com/tnypxl/rollup.git
synced 2025-12-15 23:13:22 +00:00
refactor: remove redundant scraping functions and update runWeb to utilize scraper.ScrapeSites for improved maintainability
This commit is contained in:
270
cmd/web.go
270
cmd/web.go
@@ -4,9 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"net/url"
|
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -23,8 +21,6 @@ var (
|
|||||||
excludeSelectors []string
|
excludeSelectors []string
|
||||||
)
|
)
|
||||||
|
|
||||||
var scraperConfig scraper.Config
|
|
||||||
|
|
||||||
var webCmd = &cobra.Command{
|
var webCmd = &cobra.Command{
|
||||||
Use: "web",
|
Use: "web",
|
||||||
Short: "Scrape main content from webpages and convert to Markdown",
|
Short: "Scrape main content from webpages and convert to Markdown",
|
||||||
@@ -41,93 +37,83 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func runWeb(cmd *cobra.Command, args []string) error {
|
func runWeb(cmd *cobra.Command, args []string) error {
|
||||||
scraper.SetupLogger(verbose)
|
scraper.SetupLogger(verbose)
|
||||||
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
logger := log.New(os.Stdout, "WEB: ", log.LstdFlags)
|
||||||
if !verbose {
|
if !verbose {
|
||||||
logger.SetOutput(io.Discard)
|
logger.SetOutput(io.Discard)
|
||||||
}
|
}
|
||||||
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
logger.Printf("Starting web scraping process with verbose mode: %v", verbose)
|
||||||
scraperConfig.Verbose = verbose
|
|
||||||
|
|
||||||
var siteConfigs []scraper.SiteConfig
|
// Prepare site configurations
|
||||||
if len(cfg.Scrape.Sites) > 0 {
|
var siteConfigs []scraper.SiteConfig
|
||||||
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
if len(cfg.Scrape.Sites) > 0 {
|
||||||
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
// Use configurations from rollup.yml
|
||||||
for i, site := range cfg.Scrape.Sites {
|
logger.Printf("Using configuration from rollup.yml for %d sites", len(cfg.Scrape.Sites))
|
||||||
siteConfigs[i] = scraper.SiteConfig{
|
siteConfigs = make([]scraper.SiteConfig, len(cfg.Scrape.Sites))
|
||||||
BaseURL: site.BaseURL,
|
for i, site := range cfg.Scrape.Sites {
|
||||||
CSSLocator: site.CSSLocator,
|
siteConfigs[i] = scraper.SiteConfig{
|
||||||
ExcludeSelectors: site.ExcludeSelectors,
|
BaseURL: site.BaseURL,
|
||||||
MaxDepth: site.MaxDepth,
|
CSSLocator: site.CSSLocator,
|
||||||
AllowedPaths: site.AllowedPaths,
|
ExcludeSelectors: site.ExcludeSelectors,
|
||||||
ExcludePaths: site.ExcludePaths,
|
MaxDepth: site.MaxDepth,
|
||||||
OutputAlias: site.OutputAlias,
|
AllowedPaths: site.AllowedPaths,
|
||||||
PathOverrides: convertPathOverrides(site.PathOverrides),
|
ExcludePaths: site.ExcludePaths,
|
||||||
}
|
OutputAlias: site.OutputAlias,
|
||||||
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
PathOverrides: convertPathOverrides(site.PathOverrides),
|
||||||
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
}
|
||||||
}
|
logger.Printf("Site %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d, AllowedPaths=%v",
|
||||||
} else {
|
i+1, site.BaseURL, site.CSSLocator, site.MaxDepth, site.AllowedPaths)
|
||||||
logger.Printf("No sites defined in rollup.yml, falling back to URL-based configuration")
|
}
|
||||||
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
} else {
|
||||||
for i, u := range urls {
|
// Use command-line URLs
|
||||||
siteConfigs[i] = scraper.SiteConfig{
|
if len(urls) == 0 {
|
||||||
BaseURL: u,
|
logger.Println("Error: No URLs provided via --urls flag")
|
||||||
CSSLocator: includeSelector,
|
return fmt.Errorf("no URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
||||||
ExcludeSelectors: excludeSelectors,
|
}
|
||||||
MaxDepth: depth,
|
siteConfigs = make([]scraper.SiteConfig, len(urls))
|
||||||
}
|
for i, u := range urls {
|
||||||
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
siteConfigs[i] = scraper.SiteConfig{
|
||||||
i+1, u, includeSelector, depth)
|
BaseURL: u,
|
||||||
}
|
CSSLocator: includeSelector,
|
||||||
}
|
ExcludeSelectors: excludeSelectors,
|
||||||
|
MaxDepth: depth,
|
||||||
|
AllowedPaths: []string{"/"}, // Allow all paths by default
|
||||||
|
}
|
||||||
|
logger.Printf("URL %d configuration: BaseURL=%s, CSSLocator=%s, MaxDepth=%d",
|
||||||
|
i+1, u, includeSelector, depth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if len(siteConfigs) == 0 {
|
// Set up scraper configuration
|
||||||
logger.Println("Error: No sites or URLs provided")
|
scraperConfig := scraper.Config{
|
||||||
return fmt.Errorf("no sites or URLs provided. Use --urls flag with comma-separated URLs or set 'scrape.sites' in the rollup.yml file")
|
Sites: siteConfigs,
|
||||||
}
|
OutputType: outputType,
|
||||||
|
Verbose: verbose,
|
||||||
|
Scrape: scraper.ScrapeConfig{
|
||||||
|
RequestsPerSecond: cfg.Scrape.RequestsPerSecond,
|
||||||
|
BurstLimit: cfg.Scrape.BurstLimit,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
||||||
|
outputType, scraperConfig.Scrape.RequestsPerSecond, scraperConfig.Scrape.BurstLimit)
|
||||||
|
|
||||||
// Set default values for rate limiting
|
// Start scraping using scraper.ScrapeSites
|
||||||
defaultRequestsPerSecond := 1.0
|
logger.Println("Starting scraping process")
|
||||||
defaultBurstLimit := 3
|
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
||||||
|
if err != nil {
|
||||||
|
logger.Printf("Error occurred during scraping: %v", err)
|
||||||
|
return fmt.Errorf("error scraping content: %v", err)
|
||||||
|
}
|
||||||
|
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
||||||
|
|
||||||
// Use default values if not set in the configuration
|
// Write output to files
|
||||||
requestsPerSecond := cfg.Scrape.RequestsPerSecond
|
if outputType == "single" {
|
||||||
if requestsPerSecond == 0 {
|
logger.Println("Writing content to a single file")
|
||||||
requestsPerSecond = defaultRequestsPerSecond
|
return writeSingleFile(scrapedContent)
|
||||||
}
|
} else {
|
||||||
burstLimit := cfg.Scrape.BurstLimit
|
logger.Println("Writing content to multiple files")
|
||||||
if burstLimit == 0 {
|
return writeMultipleFiles(scrapedContent)
|
||||||
burstLimit = defaultBurstLimit
|
}
|
||||||
}
|
|
||||||
|
|
||||||
scraperConfig := scraper.Config{
|
|
||||||
Sites: siteConfigs,
|
|
||||||
OutputType: outputType,
|
|
||||||
Verbose: verbose,
|
|
||||||
Scrape: scraper.ScrapeConfig{
|
|
||||||
RequestsPerSecond: requestsPerSecond,
|
|
||||||
BurstLimit: burstLimit,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
logger.Printf("Scraper configuration: OutputType=%s, RequestsPerSecond=%f, BurstLimit=%d",
|
|
||||||
outputType, requestsPerSecond, burstLimit)
|
|
||||||
|
|
||||||
logger.Println("Starting scraping process")
|
|
||||||
scrapedContent, err := scraper.ScrapeSites(scraperConfig)
|
|
||||||
if err != nil {
|
|
||||||
logger.Printf("Error occurred during scraping: %v", err)
|
|
||||||
return fmt.Errorf("error scraping content: %v", err)
|
|
||||||
}
|
|
||||||
logger.Printf("Scraping completed. Total content scraped: %d", len(scrapedContent))
|
|
||||||
|
|
||||||
if outputType == "single" {
|
|
||||||
logger.Println("Writing content to a single file")
|
|
||||||
return writeSingleFile(scrapedContent)
|
|
||||||
} else {
|
|
||||||
logger.Println("Writing content to multiple files")
|
|
||||||
return writeMultipleFiles(scrapedContent)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeSingleFile(content map[string]string) error {
|
func writeSingleFile(content map[string]string) error {
|
||||||
@@ -151,11 +137,7 @@ func writeSingleFile(content map[string]string) error {
|
|||||||
|
|
||||||
func writeMultipleFiles(content map[string]string) error {
|
func writeMultipleFiles(content map[string]string) error {
|
||||||
for url, c := range content {
|
for url, c := range content {
|
||||||
filename, err := getFilenameFromContent(c, url)
|
filename := sanitizeFilename(url) + ".rollup.md"
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error generating filename for %s: %v", url, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
file, err := os.Create(filename)
|
file, err := os.Create(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
return fmt.Errorf("error creating output file %s: %v", filename, err)
|
||||||
@@ -179,106 +161,14 @@ func generateDefaultFilename() string {
|
|||||||
return fmt.Sprintf("web-%s.rollup.md", timestamp)
|
return fmt.Sprintf("web-%s.rollup.md", timestamp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeRecursively(urlStr string, depth int) (string, error) {
|
|
||||||
visited := make(map[string]bool)
|
|
||||||
return scrapeURL(urlStr, depth, visited)
|
|
||||||
}
|
|
||||||
|
|
||||||
func scrapeURL(urlStr string, depth int, visited map[string]bool) (string, error) {
|
|
||||||
if depth < 0 || visited[urlStr] {
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
visited[urlStr] = true
|
|
||||||
|
|
||||||
content, err := testExtractAndConvertContent(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if depth > 0 {
|
|
||||||
links, err := testExtractLinks(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return content, fmt.Errorf("error extracting links: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, link := range links {
|
|
||||||
subContent, err := scrapeURL(link, depth-1, visited)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("Warning: Error scraping %s: %v\n", link, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
content += "\n\n---\n\n" + subContent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return content, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
testExtractAndConvertContent = extractAndConvertContent
|
|
||||||
testExtractLinks = scraper.ExtractLinks
|
|
||||||
)
|
|
||||||
|
|
||||||
func extractAndConvertContent(urlStr string) (string, error) {
|
|
||||||
content, err := scraper.FetchWebpageContent(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error fetching webpage content: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if includeSelector != "" {
|
|
||||||
content, err = scraper.ExtractContentWithCSS(content, includeSelector, excludeSelectors)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error extracting content with CSS: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
markdown, err := scraper.ProcessHTMLContent(content, scraper.Config{})
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error processing HTML content: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedURL, err := url.Parse(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error parsing URL: %v", err)
|
|
||||||
}
|
|
||||||
header := fmt.Sprintf("# ::: Content from %s\n\n", parsedURL.String())
|
|
||||||
|
|
||||||
return header + markdown + "\n\n", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFilenameFromContent(content, urlStr string) (string, error) {
|
|
||||||
// Try to extract title from content
|
|
||||||
titleStart := strings.Index(content, "<title>")
|
|
||||||
titleEnd := strings.Index(content, "</title>")
|
|
||||||
if titleStart != -1 && titleEnd != -1 && titleEnd > titleStart {
|
|
||||||
title := strings.TrimSpace(content[titleStart+7 : titleEnd])
|
|
||||||
if title != "" {
|
|
||||||
return sanitizeFilename(title) + ".rollup.md", nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no title found or title is empty, use the URL
|
|
||||||
parsedURL, err := url.Parse(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("invalid URL: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if parsedURL.Host == "" {
|
|
||||||
return "", fmt.Errorf("invalid URL: missing host")
|
|
||||||
}
|
|
||||||
|
|
||||||
filename := parsedURL.Host
|
|
||||||
if parsedURL.Path != "" && parsedURL.Path != "/" {
|
|
||||||
filename += strings.TrimSuffix(parsedURL.Path, "/")
|
|
||||||
}
|
|
||||||
return sanitizeFilename(filename) + ".rollup.md", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func sanitizeFilename(name string) string {
|
func sanitizeFilename(name string) string {
|
||||||
// Remove any character that isn't alphanumeric, dash, or underscore
|
// Remove any character that isn't alphanumeric, dash, or underscore
|
||||||
reg := regexp.MustCompile("[^a-zA-Z0-9-_]+")
|
name = strings.Map(func(r rune) rune {
|
||||||
name = reg.ReplaceAllString(name, "_")
|
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' || r == '_' {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
return '_'
|
||||||
|
}, name)
|
||||||
|
|
||||||
// Trim any leading or trailing underscores
|
// Trim any leading or trailing underscores
|
||||||
name = strings.Trim(name, "_")
|
name = strings.Trim(name, "_")
|
||||||
|
|||||||
Reference in New Issue
Block a user