You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
91 lines
2.9 KiB
Python
91 lines
2.9 KiB
Python
# Define your item pipelines here
|
|
#
|
|
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
|
|
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
|
|
|
|
|
|
# useful for handling different item types with a single interface
|
|
from __future__ import annotations
|
|
from itemadapter import ItemAdapter
|
|
|
|
|
|
class ScienceArticlCnkiPipeline:
|
|
def process_item(self, item, spider):
|
|
return item
|
|
|
|
|
|
import logging
|
|
from datetime import datetime
|
|
from typing import TYPE_CHECKING, Tuple, Union
|
|
|
|
from pymongo import MongoClient
|
|
from itemadapter import ItemAdapter
|
|
from pymongo.errors import (
|
|
DuplicateKeyError,
|
|
BulkWriteError
|
|
)
|
|
|
|
from science_article_cnki.db_utils.mongo import MongoDBUtils, update_document, build_update_query
|
|
|
|
if TYPE_CHECKING:
|
|
from scrapy.crawler import Crawler
|
|
from scrapy.statscollectors import StatsCollector
|
|
|
|
mongo_logger = logging.getLogger('pymongo')
|
|
mongo_logger.setLevel(logging.WARNING)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class MongoPipeline(MongoDBUtils):
|
|
def __init__(self, mongo_uri, mongo_db, stats: StatsCollector):
|
|
super().__init__(mongo_uri, mongo_db)
|
|
self.stats: StatsCollector = stats
|
|
self.insert_failure_update_enable = True
|
|
|
|
@classmethod
|
|
def from_crawler(cls, crawler: Crawler):
|
|
return cls(
|
|
mongo_uri=crawler.settings.get("MONGO_URI"),
|
|
mongo_db=crawler.settings.get("MONGO_DATABASE", "items"),
|
|
stats=crawler.stats
|
|
)
|
|
|
|
def open_spider(self, spider):
|
|
self.client = MongoClient(self.mongo_uri)
|
|
self.db = self.client[self.mongo_db]
|
|
|
|
def process_item(self, item, spider):
|
|
# 确定Item类型
|
|
adapter = ItemAdapter(item)
|
|
item_type = self._get_item_type(item)
|
|
collection = self.db.get_collection(item_type)
|
|
d = adapter.asdict()
|
|
try:
|
|
insert_result = collection.insert_one(d)
|
|
self.stats.inc_value("item2db_inserted/{}".format(item_type))
|
|
except DuplicateKeyError as duplicate_error:
|
|
if self.insert_failure_update_enable:
|
|
write_error = duplicate_error.details
|
|
key_pattern = write_error.get('keyPattern')
|
|
key_value = write_error.get('keyValue')
|
|
logger.debug("dupKey: %s, keyValue: %s", key_pattern, key_value)
|
|
d.pop("_id", None)
|
|
[d.pop(k, None) for k in key_pattern.keys()]
|
|
up_result = collection.update_one(filter=key_value, update={"$set": d}, upsert=True)
|
|
self.stats.inc_value("item2db_updated/{}".format(item_type))
|
|
except Exception:
|
|
raise
|
|
|
|
return item
|
|
|
|
def close_spider(self, spider):
|
|
self.client.close()
|
|
|
|
@staticmethod
|
|
def _get_item_type(item) -> str:
|
|
"""获取Item类型"""
|
|
if hasattr(item, '__tablename__'):
|
|
return item.__class__.__tablename__
|
|
return 'items_null_table'
|
|
|