import logging from course_search.scraper.course_scraper import CourseScraper def test_scraper(): scraper = CourseScraper() # Test 1: Get course links print("\nTesting get_course_links()...") links = scraper.get_course_links() print(f"Found {len(links)} links") print("Sample links:") for link in links[:3]: print(f"- {link}") # Test 2: Extract course info if links: print("\nTesting extract_course_info()...") sample_course = scraper.extract_course_info(links[0]) print("Sample course info:") for key, value in sample_course.items(): print(f"{key}: {value[:100]}...") # Test 3: Scrape all courses print("\nTesting scrape_all_courses()...") df = scraper.scrape_all_courses() print("\nDataFrame Info:") print(df.info()) print("\nFirst few rows:") print(df.head()) if __name__ == "__main__": logging.basicConfig(level=logging.INFO) test_scraper()