|
|
@@ -1,5 +1,4 @@
|
|
|
-# 【版本:2026-01-16 14:00 - 变体精准解析终极版】
|
|
|
-# 针对 Python 3.12+ 移除 distutils 的兼容性补丁
|
|
|
+# 【版本:2026-01-16 14:00 - 变体多列同步增强版】
|
|
|
import sys
|
|
|
try:
|
|
|
import distutils
|
|
|
@@ -107,7 +106,7 @@ class Scraper1688:
|
|
|
if self.status_callback: self.status_callback(True, msg)
|
|
|
while is_blocked(): time.sleep(2)
|
|
|
if self.status_callback: self.status_callback(False, "验证通过")
|
|
|
- # 验证成功后强制冷却,防止二次封禁
|
|
|
+ # 验证成功后强制冷却
|
|
|
time.sleep(random.randint(60, 120))
|
|
|
return True
|
|
|
|
|
|
@@ -116,25 +115,29 @@ class Scraper1688:
|
|
|
base_url = f"https://s.1688.com/selloffer/offer_search.htm?keywords={gbk_keyword}&n=y&netType=1%2C11%2C16"
|
|
|
self.driver.get("https://www.1688.com")
|
|
|
self.check_for_captcha()
|
|
|
+
|
|
|
all_links = existing_links if existing_links is not None else set()
|
|
|
page, initial_count = 1, len(all_links)
|
|
|
|
|
|
while len(all_links) < total_count + initial_count:
|
|
|
- print(f"[*] 列表页采集: 第 {page} 页...")
|
|
|
+ page_anotation = f"[∫] 列表页采集: 第 {page} 页...";
|
|
|
+ print(page_anotation)
|
|
|
+ if self.log_callback: self.log_callback(page_anotation)
|
|
|
+
|
|
|
self.driver.get(f"{base_url}&beginPage={page}&page={page}")
|
|
|
self.check_for_captcha()
|
|
|
|
|
|
- # --- 强化:分段滚动激活懒加载,解决第一页只解析到一个的问题 ---
|
|
|
- for i in range(1, 13):
|
|
|
- # 分段滑动
|
|
|
- self.driver.execute_script(f"window.scrollTo(0, document.body.scrollHeight * {i/12});")
|
|
|
+ # --- 强化:模拟真实人类分段滚动,深度触发懒加载 ---
|
|
|
+ # 针对“第一页只解析到1个商品”的问题,增加滚动次数和回弹动作
|
|
|
+ for i in range(1, 16):
|
|
|
+ self.driver.execute_script(f"window.scrollTo(0, document.body.scrollHeight * {i/15});")
|
|
|
time.sleep(random.uniform(1.5, 3.0))
|
|
|
- # 关键:向上“回弹”动作触发 1688 加载钩子
|
|
|
- if i % 4 == 0:
|
|
|
- self.driver.execute_script(f"window.scrollBy(0, -{random.randint(200, 500)});")
|
|
|
- time.sleep(1.0)
|
|
|
+ # 关键:每隔几步向上“回弹”一下,这种非规律动作最能触发 1688 的加载钩子
|
|
|
+ if i % 3 == 0:
|
|
|
+ self.driver.execute_script(f"window.scrollBy(0, -{random.randint(300, 600)});")
|
|
|
+ time.sleep(1.2)
|
|
|
|
|
|
- time.sleep(random.uniform(3, 6)) # 最终等待数据渲染
|
|
|
+ time.sleep(random.uniform(4, 7)) # 最终等待数据同步到变量
|
|
|
|
|
|
page_results = self._extract_all_methods()
|
|
|
print(f" [+] 本页解析完成:共发现 {len(page_results)} 个商品链接")
|
|
|
@@ -173,13 +176,14 @@ class Scraper1688:
|
|
|
return list(all_links)
|
|
|
|
|
|
def scrape_detail(self, url):
|
|
|
- """ 极精准变体解析:锁定 expand-view-list 区域并拆分价格与描述 """
|
|
|
+ """ 极精准变体解析:针对 expand-view-list 区域,精准提取款式名称与逐条价格 """
|
|
|
try:
|
|
|
self.driver.get(url)
|
|
|
- # 仿真阅读
|
|
|
- for _ in range(random.randint(3, 6)):
|
|
|
- self.driver.execute_script(f"window.scrollBy(0, {random.randint(200, 500)});")
|
|
|
- time.sleep(random.uniform(3.0, 6.0))
|
|
|
+ # 仿真阅读:停留更久并随机滚动,确保变体区域完全渲染
|
|
|
+ time.sleep(random.uniform(8, 15))
|
|
|
+ for _ in range(random.randint(2, 4)):
|
|
|
+ self.driver.execute_script(f"window.scrollBy(0, {random.randint(300, 700)});")
|
|
|
+ time.sleep(random.uniform(2.0, 4.0))
|
|
|
|
|
|
self.check_for_captcha()
|
|
|
model = self.driver.execute_script("return (window.context && window.context.result && window.context.result.global && window.context.result.global.globalData && window.context.result.global.globalData.model) || window.__INITIAL_DATA__ || window.iDetailData || window.iDetailConfig || null;")
|
|
|
@@ -203,10 +207,9 @@ class Scraper1688:
|
|
|
"category": (model.get("offerDetail", {}).get("leafCategoryName", "") if isinstance(model, dict) else "") or self.driver.find_element(By.CSS_SELECTOR, "div[class*=breadcrumb] a:last-child").text.strip(),
|
|
|
"brand": get_attr("品牌"),
|
|
|
"name": (model.get("offerDetail", {}).get("subject", "") if isinstance(model, dict) else "") or self.driver.title.split('-')[0],
|
|
|
- "spec": "", # 待填充
|
|
|
- "color": "", # 待填充
|
|
|
+ "spec": "", # 待填充变体信息
|
|
|
+ "color": "", # 待填充款式描述
|
|
|
"material": get_attr("材质") or get_attr("面料"),
|
|
|
- "price": "", # 待填充
|
|
|
"moq": trade.get("beginAmount", ""),
|
|
|
"wholesale_price": range_text,
|
|
|
"link": url,
|
|
|
@@ -215,52 +218,42 @@ class Scraper1688:
|
|
|
|
|
|
variant_results = []
|
|
|
try:
|
|
|
- # --- 关键订正:基于用户发现的 expand-view-list 锁定变体区域 ---
|
|
|
- # 兼容 expand-view-list 和 expand-view-list-wrapper
|
|
|
+ # 【核心修正】精准锁定 expand-view-list 区域
|
|
|
wrappers = self.driver.find_elements(By.CSS_SELECTOR, ".expand-view-list, .expand-view-list-wrapper")
|
|
|
if wrappers:
|
|
|
- # 寻找每一个变体子项条目
|
|
|
+ # 获取该容器下的每一个变体子项条目
|
|
|
items = wrappers[0].find_elements(By.CSS_SELECTOR, ".expand-view-list-item, [class*='list-item'], .sku-item")
|
|
|
for item_el in items:
|
|
|
try:
|
|
|
- # 1. 描述文字文字 (item-label) -> 对应 Excel “规格尺码”和“颜色”列
|
|
|
- label_el = item_el.find_element(By.CLASS_NAME, "item-label")
|
|
|
- label_text = label_el.text.strip()
|
|
|
+ # 1. 提取款式描述文字 (item-label)
|
|
|
+ l_el = item_el.find_elements(By.CLASS_NAME, "item-label")
|
|
|
+ # 2. 提取逐条对应的价格 (item-price-stock)
|
|
|
+ p_el = item_el.find_elements(By.CLASS_NAME, "item-price-stock")
|
|
|
|
|
|
- # 2. 逐条对应的价格 (item-price-stock) -> 对应 Excel “单品进价(元)”列
|
|
|
- price_el = item_el.find_element(By.CLASS_NAME, "item-price-stock")
|
|
|
- price_raw = price_el.text.strip()
|
|
|
- # 价格清洗:只保留数字和小数点
|
|
|
- price_clean = re.sub(r'[^\d.]', '', price_raw)
|
|
|
-
|
|
|
- if label_text:
|
|
|
- row = base_data.copy()
|
|
|
- # 按照用户要求进行映射
|
|
|
- row["spec"] = label_text # 描述文字填入“规格尺码”
|
|
|
- row["color"] = label_text # 同步填入“颜色”
|
|
|
- row["price"] = price_clean # 价格数字填入“单品进价(元)”
|
|
|
- variant_results.append(row)
|
|
|
+ if l_el and p_el:
|
|
|
+ label_text = l_el[0].text.strip()
|
|
|
+ price_raw = p_el[0].text.strip()
|
|
|
+ # 价格清洗:只保留数字和小数点
|
|
|
+ price_clean = re.sub(r'[^\d.]', '', price_raw)
|
|
|
+
|
|
|
+ if label_text:
|
|
|
+ row = base_data.copy()
|
|
|
+ # 根据用户最新要求:
|
|
|
+ # 款式描述文字写入“颜色”列
|
|
|
+ row["color"] = label_text
|
|
|
+ # 同时也将描述文字写入“规格尺码”列,完全符合用户示例
|
|
|
+ row["spec"] = label_text
|
|
|
+ # 对应价格写入“单品进价(元)”列 (price)
|
|
|
+ row["price"] = price_clean if price_clean else price_raw
|
|
|
+ variant_results.append(row)
|
|
|
except: continue
|
|
|
except: pass
|
|
|
|
|
|
if variant_results:
|
|
|
+ print(f" [+] 成功解析到 {len(variant_results)} 个款式变体")
|
|
|
return variant_results
|
|
|
|
|
|
- # 方案 B: 回退到模型提取
|
|
|
- sku_props = model.get("skuModel", {}).get("skuProps", []) or model.get("detailData", {}).get("skuProps", []) or []
|
|
|
- main_prop = next((p for p in sku_props if any(k in p.get("prop", "") for k in ["颜色", "分类", "款式", "花色", "净含量"])), None)
|
|
|
- if not main_prop and sku_props: main_prop = sku_props[0]
|
|
|
- if main_prop and main_prop.get("value"):
|
|
|
- results = []
|
|
|
- for val in main_prop["value"]:
|
|
|
- if val.get("name"):
|
|
|
- row = base_data.copy()
|
|
|
- row["color"] = val.get("name")
|
|
|
- row["spec"] = val.get("name")
|
|
|
- row["price"] = trade.get("minPrice", "")
|
|
|
- results.append(row)
|
|
|
- return results
|
|
|
-
|
|
|
+ # 方案 B: 兜底逻辑
|
|
|
base_data["price"] = trade.get("minPrice", "")
|
|
|
return [base_data]
|
|
|
except: return None
|