TypeError: __init__() got an unexpected keyword argument 'proxies' bs4 scraping

TypeError: __init__() got an unexpected keyword argument 'proxies' bs4 scraping

#Import
from urllib.request import ProxyDigestAuthHandler, Request, urlopen
from bs4 import BeautifulSoup
from bs4.element import SoupStrainer
import requests

#PreInit
PROXY = {"https": "54.93.88.15:9300", "https": "165.22.81.30:38244", "https": "207.154.205.135:9999", "https": "88.198.26.145:8080", "https": "144.91.86.144:3128"}
url = "https://www.ebay.de/b/Laptops-Notebooks/175672/bn_1618754?LH_ItemCondition=7000&mag=1&rt=nc&_dmd=1&_sop=1"
req = Request(url, headers={"User-Agent": "Mozilla/5.0"}, proxies=PROXY)
webpage = urlopen(req).read()

#Init
with requests.Session() as c:
    all_data = []
    #Web init
    soup = BeautifulSoup(webpage, "html5lib")
    #Data collection...
    print(shipping.text.strip()) 

上面的内容是我在 运行 脚本时遇到的错误的相关部分。我在哪里合并“代理”值? 帮助赞赏

您收到此错误是因为请求的构造函数中没有“代理”参数。实际上有一个使用您正在使用的库的代理 in the source code 的请求示例。

# Note assigning more than one value to a key in a dict will result in only the final value being assigned to the key
PROXY = {"https": "54.93.88.15:9300"}
url = "https://www.ebay.de/b/Laptops-Notebooks/175672/bn_1618754?LH_ItemCondition=7000&mag=1&rt=nc&_dmd=1&_sop=1"
req = Request(url, headers={"User-Agent": "Mozilla/5.0"})

# Create proxy ProxyHandler
proxy_support = urllib.request.ProxyHandler(PROXY)
# Create opener
opener = urllib.request.build_opener(proxy_support)
# Install opener
urllib.request.install_opener(opener)

webpage = urllib.request.urlopen(req)