python mongodb scrapy例子

来源:转载


#-*-coding:utf8-*-import pymongoconnection = pymongo.MongoClient()tdb = connection.testpost_info = tdb.abccursor = post_info.find()jike = {'name':u'极客', 'age':'5', 'skill': 'Python'}god = {'name': u'玉皇大帝', 'age': 36000, 'skill': 'creatanything', 'other': u'王母娘娘不是他的老婆'}godslaver = {'name': u'月老', 'age': 'unknown', 'other': u'他的老婆叫孟婆'}post_info.insert(jike)post_info.insert(god)post_info.insert(godslaver)for document in cursor: print(document)post_info.remove({'name': u'极客'})print u'操作数据库完成!'

scrapy内

# -*- coding: utf-8 -*-# Define your item pipelines here## Don't forget to add your pipeline to the ITEM_PIPELINES setting# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.htmlfrom items import NovelspiderItem #这里的Novelspider其实是没有用到的,可以删除from scrapy.conf import settingsimport pymongoclass NovelspiderPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] dbName = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) tdb = client[dbName] self.post = tdb[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): bookInfo = dict(item) self.post.insert(bookInfo) return item



分享给朋友:
您可能感兴趣的文章:
随机阅读: