diff --git a/science_article_add/science_article_add/settings.py b/science_article_add/science_article_add/settings.py index 9316b79..56869f4 100644 --- a/science_article_add/science_article_add/settings.py +++ b/science_article_add/science_article_add/settings.py @@ -14,9 +14,8 @@ NEWSPIDER_MODULE = "science_article_add.spiders" ADDONS = {} - # Crawl responsibly by identifying yourself (and your website) on the user-agent -#USER_AGENT = "science_article_add (+http://www.yourdomain.com)" +USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36' # Obey robots.txt rules ROBOTSTXT_OBEY = False @@ -48,26 +47,42 @@ DOWNLOAD_DELAY = 1 # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html RETRY_ENABLED = True RETRY_TIMES = 2 # 重试3次 -# RETRY_HTTP_CODES = [500, 502, 503, 504, 408, 400, 403, 404] # 增加了一些常见的错误码 +RETRY_HTTP_CODES = [500, 502, 503, 504, 408, 400, 403, 404] # 增加了一些常见的错误码 +DOWNLOADER_MIDDLEWARES = { + 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 550 + # "org_news.middlewares.OrgNewsDownloaderMiddleware": 543, +} #DOWNLOADER_MIDDLEWARES = { # "science_article_add.middlewares.ScienceArticleAddDownloaderMiddleware": 543, #} # Enable or disable extensions # See https://docs.scrapy.org/en/latest/topics/extensions.html -#EXTENSIONS = { -# "scrapy.extensions.telnet.TelnetConsole": None, -#} +EXTENSIONS = { + # "scrapy.extensions.telnet.TelnetConsole": None, + # "science_article_add.extensions.ackextension.ACKExtension": 0, + # "science_article_add.extensions.dingtalk_extension.DingTalkExtension": 0, +} # Configure item pipelines # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html #ITEM_PIPELINES = { # "science_article_add.pipelines.ScienceArticleAddPipeline": 300, #} -MONGO_URI = "mongodb://root:123456@192.168.1.211:27017/" -MONGO_DATABASE = "science2" +# MONGO_URI = "mongodb://root:123456@192.168.1.211:27017/" +# MONGO_DATABASE = "science2" +MONGO_URI = "mongodb://science-dev:kcidea1509!%25)(@101.43.239.105:27017/?authSource=science&directConnection=true" +MONGO_DATABASE = 'science2' + +# REDIS_URL = 'redis://:kcidea1509@192.168.1.211:6379/10' +REDIS_URL = 'redis://:kcidea1509!%)(@43.140.203.187:6379/10' -REDIS_URL = 'redis://:kcidea1509@192.168.1.211:6379/10' +# mysql配置 +MYSQL_HOST = '43.140.203.187' +MYSQL_PORT = 3306 +MYSQL_DATABASE = 'science_data_dept' +MYSQL_USER = 'science-data-dept' +MYSQL_PASSWORD = 'datadept1509' # Enable and configure the AutoThrottle extension (disabled by default) # See https://docs.scrapy.org/en/latest/topics/autothrottle.html @@ -93,24 +108,25 @@ REDIS_URL = 'redis://:kcidea1509@192.168.1.211:6379/10' # Set settings whose default value is deprecated to a future-proof value FEED_EXPORT_ENCODING = "utf-8" - # 钉钉机器人配置 DINGTALK_WEBHOOK_URL = 'https://oapi.dingtalk.com/robot/send?access_token=1252fe1ef63e95ced11ac87a01e9978670e82036a516c558e524f89e11513f9f' DINGTALK_SECRET = 'SECe77fe7cd6c0dbfcdd9ebe6ba1941ddc376be86ca717e9d68bb177b7eded71091' - - # 自定义消息模板(可选) -DINGTALK_START_MESSAGE = "🚀 爬虫启动啦!\n**爬虫**: {spider_name}\n**时间**: {time}" -DINGTALK_CLOSED_MESSAGE = "✅ 爬虫完成!\n**爬虫**: {spider_name}\n**项目数**: {item_count}" - -# 启用扩展 -EXTENSIONS = { - 'scrapy_example.extensions.dingtalk_extension.DingTalkExtension': 500, - # 'scrapy_example.extensions.advanced_dingtalk_extension.AdvancedDingTalkExtension': 100, -} - +DINGTALK_START_MESSAGE = "🚀 爬虫启动啦!\n**爬虫**: %(spider_name)s\n**时间**: %(started_time)s" +# DINGTALK_CLOSED_MESSAGE = "✅ 爬虫完成!\n**爬虫**: %(spider_name)s\n**项目数**: %(item_scraped_count)s" # 启用/禁用特定通知 -DINGTALK_ENABLE_START = True +DINGTALK_ENABLE_START = False DINGTALK_ENABLE_FINISH = True DINGTALK_ENABLE_ERROR = True - +DINGTALK_CLOSED_MESSAGE = """📊 爬虫完成通知\n +**爬虫名称**: %(spider_name)s\n +**机构名称**: %(org_name)s\n +**任务条件**: %(task_condition)s\n +**任务ID**: %(record_id)s\n +**完成时间**: %(finished_time)s\n +**完成原因**: %(finish_reason)s\n +**采集统计**:\n + - 采集项目: %(item_scraped_count)s 条 + - 请求响应: %(response_count)s 次 + - 错误数量: %(error_count)s 个\n +**状态**: %(state)s"""