Scrapy连接MySQL存入数据 -《狗嗨默示录》-

settings.py

ITEM_PIPELINES = {
   'IAChina.pipelines.IachinaPipeline': 300,
}

 

DBKWARGS = {'db':'iachina','user':'root','password':'',
            'host':'localhost','use_unicode':True,'charset':'utf8'}

 

pipelines.py

import MySQLdb

class IachinaPipeline(object):
    def process_item(self, item, spider):
        DBKWARGS = spider.settings.get('DBKWARGS')
        con = MySQLdb.connect(**DBKWARGS)
        cur = con.cursor()
        sql = ("insert into info(COMPANY,TYPE,PRODUCT,CLAUSE,CLAUSE_URL)"
            "values(%s,%s,%s,%s,%s)")
        list = (item['COMPANY'],item['TYPE'],item['PRODUCT'],item['CLAUSE'],item['CLAUSE_URL'])
        try:
            cur.execute(sql,list)
        except Exception as e:
            print("Insert error:",e)
            con.rollback() #未进行数据操作就回滚
        else:
            con.commit() #提交后数据操作才生效
        cur.close()
        con.close()
        return item
posted @ 2017-08-21 12:58  李·狗嗨  阅读(292)  评论(0编辑  收藏  举报