正则表达式:爬虫内容提取利器
This commit is contained in:
46
0402+2509165015.JSON
Normal file
46
0402+2509165015.JSON
Normal file
@@ -0,0 +1,46 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import time
|
||||
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
|
||||
}
|
||||
|
||||
movies = []
|
||||
|
||||
for start in range(0, 250, 25):
|
||||
url = f"https://movie.douban.com/top250?start={start}"
|
||||
res = requests.get(url, headers=headers)
|
||||
soup = BeautifulSoup(res.text, "html.parser")
|
||||
items = soup.find_all("div", class_="item")
|
||||
|
||||
for item in items:
|
||||
rank = item.find("em").text
|
||||
title = item.find("span", class_="title").text
|
||||
rating = item.find("span", class_="rating_num").text
|
||||
people = item.find("div", class_="star").find_all("span")[-1].text.replace("人评价", "")
|
||||
quote = item.find("span", class_="inq").text if item.find("span", class_="inq") else "无"
|
||||
info = item.find("p", class_="").text.strip().split("\n")
|
||||
line1 = info[0].strip()
|
||||
line2 = info[1].strip() if len(info) > 1 else ""
|
||||
|
||||
director = line1.split("导演: ")[1].split("主演: ")[0].strip() if "导演: " in line1 else "未知"
|
||||
actor = line1.split("主演: ")[1].strip() if "主演: " in line1 else "未知"
|
||||
parts = line2.split("/") if line2 else []
|
||||
year = parts[0].strip() if len(parts) >= 1 else "未知"
|
||||
area = parts[1].strip() if len(parts) >= 2 else "未知"
|
||||
genre = parts[2].strip() if len(parts) >= 3 else "未知"
|
||||
|
||||
movies.append({
|
||||
"排名": rank, "电影名": title, "评分": rating, "评价人数": people, "经典台词": quote,
|
||||
"导演": director, "主演": actor, "年份": year, "地区": area, "类型": genre
|
||||
})
|
||||
|
||||
time.sleep(1)
|
||||
print(f"已爬取 {start + 25} 条")
|
||||
|
||||
with open("douban_top250.json", "w", encoding="utf-8") as f:
|
||||
json.dump(movies, f, ensure_ascii=False, indent=2)
|
||||
|
||||
print("✅ JSON 导出完成")
|
||||
Reference in New Issue
Block a user