You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/bin/scrapy", line 8, in
sys.exit(execute())
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/commands/crawl.py", line 57, in run
self.crawler_process.crawl(spname, **opts.spargs)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 167, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 195, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 199, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 71, in load
raise KeyError("Spider not found: {}".format(spider_name))
KeyError: 'Spider not found: facebook'
(scrapy-splash) user@user-desktop:/mss$ ls
data mss README.md requirements.txt scrapy.cfg
(scrapy-splash) user@user-desktop:/mss$ cd mss/
(scrapy-splash) user@user-desktop:/mss/mss$ ls init.py items.py pipelines.py pycache settings.py spiders utils
(scrapy-splash) user@user-desktop:/mss/mss$ cd spiders/
(scrapy-splash) user@user-desktop:~/mss/mss/spiders$ scrapy crawl facebook
2022-10-11 08:40:41 [scrapy.utils.log] INFO: Scrapy 1.4.0 started (bot: mss)
2022-10-11 08:40:41 [scrapy.utils.log] INFO: Overridden settings: {'BOT_NAME': 'mss', 'NEWSPIDER_MODULE': 'mss.spiders', 'SPIDER_MODULES': ['mss.spiders'], 'USER_AGENT': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0'}
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 69, in load
return self._spiders[spider_name]
KeyError: 'facebook'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/bin/scrapy", line 8, in
sys.exit(execute())
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/commands/crawl.py", line 57, in run
self.crawler_process.crawl(spname, **opts.spargs)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 167, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 195, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 199, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 71, in load
raise KeyError("Spider not found: {}".format(spider_name))
KeyError: 'Spider not found: facebook'
Running on conda with python==3.8 on ubuntu 22.04LTS kernel 5.15.0-48-generic
The text was updated successfully, but these errors were encountered:
(scrapy-splash) user@user-desktop:~/mss$ scrapy crawl facebook
2022-10-11 08:39:55 [scrapy.utils.log] INFO: Scrapy 1.4.0 started (bot: mss)
2022-10-11 08:39:55 [scrapy.utils.log] INFO: Overridden settings: {'BOT_NAME': 'mss', 'NEWSPIDER_MODULE': 'mss.spiders', 'SPIDER_MODULES': ['mss.spiders'], 'USER_AGENT': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0'}
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 69, in load
return self._spiders[spider_name]
KeyError: 'facebook'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/bin/scrapy", line 8, in
sys.exit(execute())
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/commands/crawl.py", line 57, in run
self.crawler_process.crawl(spname, **opts.spargs)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 167, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 195, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 199, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 71, in load
raise KeyError("Spider not found: {}".format(spider_name))
KeyError: 'Spider not found: facebook'
(scrapy-splash) user@user-desktop:
/mss$ ls/mss$ cd mss/data mss README.md requirements.txt scrapy.cfg
(scrapy-splash) user@user-desktop:
(scrapy-splash) user@user-desktop:
/mss/mss$ ls/mss/mss$ cd spiders/init.py items.py pipelines.py pycache settings.py spiders utils
(scrapy-splash) user@user-desktop:
(scrapy-splash) user@user-desktop:~/mss/mss/spiders$ scrapy crawl facebook
2022-10-11 08:40:41 [scrapy.utils.log] INFO: Scrapy 1.4.0 started (bot: mss)
2022-10-11 08:40:41 [scrapy.utils.log] INFO: Overridden settings: {'BOT_NAME': 'mss', 'NEWSPIDER_MODULE': 'mss.spiders', 'SPIDER_MODULES': ['mss.spiders'], 'USER_AGENT': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0'}
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 69, in load
return self._spiders[spider_name]
KeyError: 'facebook'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/anaconda3/envs/scrapy-splash/bin/scrapy", line 8, in
sys.exit(execute())
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/commands/crawl.py", line 57, in run
self.crawler_process.crawl(spname, **opts.spargs)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 167, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 195, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/crawler.py", line 199, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File "/home/user/anaconda3/envs/scrapy-splash/lib/python3.8/site-packages/scrapy/spiderloader.py", line 71, in load
raise KeyError("Spider not found: {}".format(spider_name))
KeyError: 'Spider not found: facebook'
Running on conda with python==3.8 on ubuntu 22.04LTS kernel 5.15.0-48-generic
The text was updated successfully, but these errors were encountered: