Python 将数据从spider管道传输到sqlalchemy表时发生导入错误

Python 将数据从spider管道传输到sqlalchemy表时发生导入错误,python,sqlalchemy,scrapy,Python,Sqlalchemy,Scrapy,这是项目树的外观: rym_chart_scraper ├───scrapy.cfg ├───rym_chart_scraper │ ├───__init__.py │ ├───items.py │ ├───models.py ├───pipelines.py ├───settings.py ├───spiders ├───my_spider.py ├───__init__.py 管道.py from models impor

这是项目树的外观:

rym_chart_scraper
├───scrapy.cfg
├───rym_chart_scraper
│   ├───__init__.py
│   ├───items.py
│   ├───models.py
    ├───pipelines.py
    ├───settings.py
    ├───spiders
        ├───my_spider.py
        ├───__init__.py
管道.py

from models import TopAlbums, db_connect, create_topalbums_table
from sqlalchemy.orm import sessionmaker


class TopAlbumPipeline:
    def __init__(self):
        engine = db_connect()
        create_topalbums_table(engine)
        self.Session = sessionmaker(bind=engine)

    def process_item(self, item, spider):
        session = self.Session()
        topalbums = TopAlbums(**item)

        try:
            session.add(topalbums)
            session.commit()
        except:
            session.rollback()
            raise
        finally:
            session.close()

        return item
models.py

from sqlalchemy import create_engine
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, DateTime

import settings

Base = declarative_base()


def db_connect():
    return create_engine(URL(**settings.DATABASE))


def create_topalbums_table(engine):
    Base.metadata.create_all(engine)


class TopAlbums(Base):

    __tablename__ = 'top_albums'

    id = Column(Integer, primary_key=True)
    Artist = Column('Artist', String)
    Album = Column('Album', String)
    Chart_year = Column('Chart_year', String)
    Genre = Column('Genre', String)
    Ratings = Column('Ratings', Integer)
    Reviews = Column('Reviews', Integer)
    Date = Column('Date', DateTime)
还有蜘蛛:

from scrapy import Spider, Request
from rym_chart_scraper.utility import find_between, listToString
from rym_chart_scraper.items import TopAlbumChartItem
from datetime import datetime


class TopAlbumChartSpider(Spider):
    name = "top_music_charts"
    allowed_domains = ['rateyourmusic.com']

    start_urls = [
        "https://rateyourmusic.com/charts/top/album/all-time"
    ]

    n_pages = 1

    def parse(self, response):

        for album, stats in zip(response.css('div.chart_main'),
                                response.css('div.chart_stats')):
            ... 

            yield item

        next_page = response.css('a.navlinknext')[0].css(
            'a::attr(href)').extract_first()
        if next_page is not None:
            next_page = response.urljoin(next_page)
            self.n_pages += 1
            if self.n_pages < 31:
                yield Request(next_page, callback=self.parse)
我得到以下导入错误

2016-12-11 17:46:41 [twisted] CRITICAL: 
Traceback (most recent call last):
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/twisted/internet/defer.py", line 1299, in _inlineCallbacks
    result = g.send(result)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/crawler.py", line 72, in crawl
    self.engine = self._create_engine()
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/crawler.py", line 97, in _create_engine
    return ExecutionEngine(self, lambda _: self.stop())
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/core/engine.py", line 69, in __init__
    self.scraper = Scraper(crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/core/scraper.py", line 71, in __init__
    self.itemproc = itemproc_cls.from_crawler(crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/middleware.py", line 58, in from_crawler
    return cls.from_settings(crawler.settings, crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/middleware.py", line 34, in from_settings
    mwcls = load_object(clspath)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/utils/misc.py", line 44, in load_object
    mod = import_module(module)
  File "/Users/baasman/anaconda/lib/python3.5/importlib/__init__.py", line 126, in import_module
    return _bootstrap._gcd_import(name[level:], package, level)
  File "<frozen importlib._bootstrap>", line 986, in _gcd_import
  File "<frozen importlib._bootstrap>", line 969, in _find_and_load
  File "<frozen importlib._bootstrap>", line 958, in _find_and_load_unlocked
  File "<frozen importlib._bootstrap>", line 673, in _load_unlocked
  File "<frozen importlib._bootstrap_external>", line 665, in exec_module
  File "<frozen importlib._bootstrap>", line 222, in _call_with_frames_removed
  File "/Users/baasman/Documents/python-workspace/rym_chart_scraper/rym_chart_scraper/pipelines.py", line 1, in <module>
    from models import TopAlbums, db_connect, create_topalbums_table
ImportError: No module named 'models'
2016-12-11 17:46:41[twisted]严重:
回溯(最近一次呼叫最后一次):
文件“/Users/baasman/anaconda/lib/python3.5/site packages/twisted/internet/defer.py”,第1299行,在内联回调中
结果=g.send(结果)
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/crawler.py”,第72行,在爬网中
self.engine=self.\u创建\u引擎()
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/crawler.py”,第97行,在创建引擎中
返回ExecutionEngine(self,lambda:self.stop())
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/core/engine.py”,第69行,在__
self.scraper=铲运机(履带式)
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/core/scraper.py”,第71行,在__
self.itemproc=itemproc\u cls.from\u爬虫程序(爬虫程序)
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/middleware.py”,第58行,摘自爬虫程序
返回cls.from_设置(crawler.settings,crawler)
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/middleware.py”,第34行,在from_设置中
mwcls=加载对象(clspath)
文件“/Users/baasman/anaconda/lib/python3.5/site packages/scrapy/utils/misc.py”,第44行,在load_对象中
mod=导入模块(模块)
文件“/Users/baasman/anaconda/lib/python3.5/importlib/_init__.py”,第126行,在导入模块中
return _bootstrap._gcd_import(名称[级别:],包,级别)
文件“”,第986行,在_gcd_import中
文件“”,第969行,在“查找”和“加载”中
文件“”,第958行,在“查找”和“加载”中解锁
文件“”,第673行,在“加载”中
exec_模块中第665行的文件“”
文件“”,第222行,在已删除帧的调用中
文件“/Users/baasman/Documents/python workspace/rym_chart_scraper/rym_chart_scraper/pipelines.py”,第1行,在
从模型导入TopAlbums、数据库连接、创建TopAlbums表
ImportError:没有名为“models”的模块

仅当从命令行运行实际的spider时,尝试从main以交互方式导入“模型”不会出现错误。这个项目的结构有问题吗?还是其他愚蠢的错误?由于某些原因,我无法克服此问题。

您是否使用创建了scrapy项目?是的,我自己在那里添加了models.py,正如我在教程中看到的那样:。但是我在文档中找不到任何关于它的信息。关于rym_chart_Slaper.models的
呢…
很好,它确实导入了模型模块,但现在无法导入设置,这很有趣,因为我能够在添加项目管道之前很好地导入它们。请从rym_chart_scraper导入设置中执行相同的
,或者更好地导入所需的设置。创建scrapy项目时使用了哪些设置?是的,我自己在那里添加了models.py,就像我在教程中看到的那样:。但是我在文档中找不到任何关于它的信息。关于rym_chart_Slaper.models的
呢…
很好,它确实导入了模型模块,但现在无法导入设置,这很有趣,因为我能够在添加项目管道之前很好地导入它们。请从rym_chart_scraper导入设置执行相同的
,或者最好只导入您需要的
2016-12-11 17:46:41 [twisted] CRITICAL: 
Traceback (most recent call last):
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/twisted/internet/defer.py", line 1299, in _inlineCallbacks
    result = g.send(result)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/crawler.py", line 72, in crawl
    self.engine = self._create_engine()
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/crawler.py", line 97, in _create_engine
    return ExecutionEngine(self, lambda _: self.stop())
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/core/engine.py", line 69, in __init__
    self.scraper = Scraper(crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/core/scraper.py", line 71, in __init__
    self.itemproc = itemproc_cls.from_crawler(crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/middleware.py", line 58, in from_crawler
    return cls.from_settings(crawler.settings, crawler)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/middleware.py", line 34, in from_settings
    mwcls = load_object(clspath)
  File "/Users/baasman/anaconda/lib/python3.5/site-packages/scrapy/utils/misc.py", line 44, in load_object
    mod = import_module(module)
  File "/Users/baasman/anaconda/lib/python3.5/importlib/__init__.py", line 126, in import_module
    return _bootstrap._gcd_import(name[level:], package, level)
  File "<frozen importlib._bootstrap>", line 986, in _gcd_import
  File "<frozen importlib._bootstrap>", line 969, in _find_and_load
  File "<frozen importlib._bootstrap>", line 958, in _find_and_load_unlocked
  File "<frozen importlib._bootstrap>", line 673, in _load_unlocked
  File "<frozen importlib._bootstrap_external>", line 665, in exec_module
  File "<frozen importlib._bootstrap>", line 222, in _call_with_frames_removed
  File "/Users/baasman/Documents/python-workspace/rym_chart_scraper/rym_chart_scraper/pipelines.py", line 1, in <module>
    from models import TopAlbums, db_connect, create_topalbums_table
ImportError: No module named 'models'