Every line of 'scrapy shell' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
49 def run(self, args, opts): 50 url = args[0] if args else None 51 if url: 52 # first argument may be a local file 53 url = guess_scheme(url) 54 55 spider_loader = self.crawler_process.spider_loader 56 57 spidercls = DefaultSpider 58 if opts.spider: 59 spidercls = spider_loader.load(opts.spider) 60 elif url: 61 spidercls = spidercls_for_request(spider_loader, Request(url), 62 spidercls, log_multiple=True) 63 64 # The crawler is created this way since the Shell manually handles the 65 # crawling engine, so the set up in the crawl method won't work 66 crawler = self.crawler_process._create_crawler(spidercls) 67 # The Shell class needs a persistent engine in the crawler 68 crawler.engine = crawler._create_engine() 69 crawler.engine.start() 70 71 self._start_crawler_thread() 72 73 shell = Shell(crawler, update_vars=self.update_vars, code=opts.code) 74 shell.start(url=url, redirect=not opts.no_redirect)