Files
trends-scraper/main.py
2025-09-11 17:46:14 +03:00

96 lines
2.7 KiB
Python

#!/usr/bin/env python3
"""
Market Trends Scraper - Main Entry Point
This script serves as the main entry point for the Market Trends Scraper application.
It initializes the scraper, processes configuration, and orchestrates the scraping
and analysis workflow.
"""
import sys
import argparse
from pathlib import Path
# Add src directory to Python path
sys.path.insert(0, str(Path(__file__).resolve().parent / "src"))
from scraper import MarketTrendsScraper
from config_manager import ConfigManager
from logger import setup_logger
def parse_arguments():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(
description="Web Scraper for Market Trends - Collect and analyze pricing and product trends"
)
parser.add_argument(
"--config", "-c",
type=str,
default="config/config.yaml",
help="Path to configuration file"
)
parser.add_argument(
"--output", "-o",
type=str,
default="data/output.csv",
help="Path to output file"
)
parser.add_argument(
"--headless",
action="store_true",
help="Run browser in headless mode"
)
parser.add_argument(
"--verbose", "-v",
action="store_true",
help="Enable verbose logging"
)
return parser.parse_args()
def main():
"""Main application entry point."""
args = parse_arguments()
# Setup logging
logger = setup_logger(verbose=args.verbose)
logger.info("Starting Market Trends Scraper")
try:
# Load configuration
config_manager = ConfigManager(args.config)
config = config_manager.load_config()
logger.info("Configuration loaded successfully")
# Initialize and run scraper
scraper = MarketTrendsScraper(config, headless=args.headless)
logger.info("Scraper initialized")
# Run the scraping process
data = scraper.scrape_market_trends()
logger.info(f"Scraped {len(data)} product records")
# Save results
scraper.save_data(data, args.output)
logger.info(f"Data saved to {args.output}")
# Analyze trends
analysis = scraper.analyze_trends(data)
logger.info("Trend analysis completed")
# Save analysis results
analysis_output = args.output.replace('.csv', '_analysis.json')
scraper.save_analysis(analysis, analysis_output)
logger.info(f"Analysis saved to {analysis_output}")
logger.info("Market Trends Scraper completed successfully")
return 0
except Exception as e:
logger.error(f"Error occurred: {str(e)}")
return 1
if __name__ == "__main__":
sys.exit(main())