标签:except custom 包含 配置 test lin spider general get
class UserInfoItem(scrapy.Item):
uid = scrapy.Field() # 用户ID
name = scrapy.Field() # 用户名
general = scrapy.Field() # 用户性别
def get_insert_sql(self):
insert_sql = (
"""insert into table Valuse()"""
)
params = (xxx)
return insert_sql, params
def distinct_data(self):
query = """select uid from userinfo where uid=%s"""
params = (0,)
return query, params
classs UserinfoPipline(object):
def __init__(self):
self.settings = get_project_settings()
self.host = self.settings[‘MYSQL_HOST‘]
self.port = self.settings[‘MYSQL_PORT‘]
self.user = self.settings[‘MYSQL_USER‘]
self.passwd = self.setting[‘MYSQL_PASSWD‘]
self.db = self.settings[‘MYSQL_DBNAME‘]
# 连接数据库
self.connect = pymysql.connect(host=self.host, db=self.db,user=self.user, passwd=self.passwd, charset=‘utf8‘, use_unicode=False)
# 通过cursor执行增删改查
self.cursor =self.connect.cursor()
def process_item(self, item, spider):
try:
sql, params = item.distinct_data()
self.cursor.excute(sql, params)
data = self.cursor.fetchone()
if data:
pass
else:
# 插入数据
sql, params = item.get_insert_sql()
self.cursor.excute(sql, params)
self.connect.commit()
except:
pass
ITEM_PIPELINES = {
‘UserInfo.piplines.UserinfoPipline‘: 300,
}
如下,在同一个Scarpy项目中存在多个spider,一般情况下,会在piplne中进行item判断,如果是对应的item进行对应的数据库操作
def do_insert(self, cursor, item):
# 执行具体的插入
# 根据不同的item构建不同的sql语句并插入到mysql中
if isinstance(item, UserInfoItem):
pass # 执行插入操作
elif isinstance(item, FansInfoTiem):
pass # 执行插入操作
这个方法随着spider和item的增多变得越来越臃肿,可以采取将多个spider的items单独存入到一个item文件夹中。每个item是包含数据库插入方法
class FansInfoItem(scrapy.Item):
fan_id = scrapy.Field()
fan_name = scrapy.Field()
fan_time = scrapy.Field()
def get_insert_sql(self):
insert_sql = """insert into fan_table (fan_id, fan_name, fan_time) values(%s, %s, %s)"""
params = (self["fan_id"], self["fan_name"], self["fan_time"])
return insert_sql, params
def distinct_data(self):
query = """select fan_id from fan_table where fan_id=%s"""
params = (self["fan_id"])
return query, params
这样,pipline的通用性会提高很多
有些时候,我们可能需要存入不同的数据库中,而settings中只能设置一个数据库的资料,那么这时候我们就需要使用custom_settings参数来为每一个spider配置对应的pipeline。不过scrapy版本必须是1.1以上
class Test1(scrapy.Spider):
name = "test1"
custom_settings = {
‘ITEM_PIPELINES‘:{‘xxxx.piplines.TestPipline1‘: 301},
}
class Test2(scrapy.Spider):
name = "test2"
custom_settings = {
‘ITEM_PIPELINES‘: {‘xxxx.piplines.‘}
}
在settings里面配置pipeline:
ITEM_PIPELINES = {
‘xxxx.piplines.TestPipeline1‘: 301,
‘xxxx.piplines.TestPipeline2‘: 302
}
Scrapy多个spider情况下pipline、item设置
标签:except custom 包含 配置 test lin spider general get
原文地址:https://www.cnblogs.com/zhenyauntg/p/13186542.html