Friday, 17 May 2019

Unable to pass arguments between methods without sending requests

I've created a script in python using scrapy in combination with selenium to parse the links of different restaurants from it's main page and then parse the name of each restaurant from their inner page.

How callback (or pass arguments between methods) works without sending requests when I use scrapy in association with selenium?

The following script works overriding the callback using self.driver.get(response.url) which I can't get rid of:

import scrapy
from selenium import webdriver
from scrapy.crawler import CrawlerProcess
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support import expected_conditions as EC

class YPageSpider(scrapy.Spider):
    name = "yellowpages"
    link = 'https://www.yellowpages.com/search?search_terms=Pizza+Hut&geo_location_terms=San+Francisco%2C+CA'

    def start_requests(self):
        self.driver = webdriver.Chrome()
        self.wait = WebDriverWait(self.driver, 10)
        yield scrapy.Request(self.link,callback=self.parse)

    def parse(self,response):
        self.driver.get(response.url)
        for elem in self.wait.until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".v-card .info a.business-name"))):
            yield scrapy.Request(elem.get_attribute("href"),callback=self.parse_info)

    def parse_info(self,response):
        self.driver.get(response.url)
        elem = self.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, ".sales-info > h1"))).text
        yield {"title":elem}

if __name__ == '__main__':
    c = CrawlerProcess()
    c.crawl(YPageSpider)
    c.start()



from Unable to pass arguments between methods without sending requests

No comments:

Post a Comment