Update: 初学记、佩文韵府 and 五车韵瑞

This commit is contained in:
denglifan
2026-03-22 16:18:35 +08:00
parent df475fd03f
commit 183b842090
553 changed files with 754048 additions and 169 deletions

View File

@@ -0,0 +1,79 @@
import os
import time
import urllib.parse
import requests
from requests.exceptions import RequestException
import concurrent.futures
def download_url(url):
proxies = {"http": "http://127.0.0.1:10808", "https": "http://127.0.0.1:10808"}
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9",
}
decoded_url = urllib.parse.unquote(url)
volume_name = decoded_url.split("/")[-1]
filename = f"html_files/{volume_name}.html"
if os.path.exists(filename):
return True, filename
success = False
retries = 5
while not success and retries > 0:
try:
response = requests.get(url, headers=headers, proxies=proxies, timeout=15)
if response.status_code == 200:
with open(filename, "w", encoding="utf-8") as out_f:
out_f.write(response.text)
success = True
print(f"Successfully downloaded {filename}")
return True, filename
elif response.status_code == 403:
print(
f"HTTP Error 403 for {url}. Waiting longer to avoid rate limit..."
)
time.sleep(5)
else:
print(
f"HTTP Error {response.status_code} for {url}. Retries left: {retries - 1}"
)
except RequestException as e:
pass
if not success:
retries -= 1
time.sleep(3)
if not success:
print(f"Failed to download {url} after all retries.")
return False, filename
return False, filename
def main():
if not os.path.exists("html_files"):
os.makedirs("html_files")
with open("still_missing.txt", "r", encoding="utf-8") as f:
urls = [line.strip() for line in f if line.strip()]
print(f"Starting download for {len(urls)} remaining files...")
# Use ThreadPoolExecutor to download multiple files concurrently
# Reduced max_workers to 5 to avoid triggering 403 Forbidden rate limits
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(download_url, url) for url in urls]
for future in concurrent.futures.as_completed(futures):
try:
future.result()
except Exception as e:
print(f"Exception during download: {e}")
if __name__ == "__main__":
main()