Answer the question
In order to leave comments, you need to log in
How to generate Grab with own proxies?
There is a task to implement the work of asynchronous Grab::Spider through a proxy.
Now it looks like this
spider.py
from grab import Grab
from grab.spider import Spider, Task
class LinkSpider(Spider):
def __init__(self, extrapage, homepage):
super(LinkSpider, self).__init__()
self.initial_urls = ['some_url']
self.homepage = homepage
self.matches = []
self.OUdict = {}
def task_initial(self, grab, task):
do_smthng
link = 'https://api.ipify.org/?format=json'
g = Grab(url=link)
yield Task('parser', grab=g, link=link)
def task_parser(self,grab,task):
response = grab.response.body
print(response)
from grab import Grab
from grab.spider import Spider, Task
class LinkSpider(Spider):
def __init__(self, extrapage, homepage):
super(LinkSpider, self).__init__()
self.initial_urls = ['some_url']
self.homepage = homepage
self.matches = []
self.OUdict = {}
def task_initial(self, grab, task):
do_smthng
link = 'https://api.ipify.org/?format=json'
g = Grab()
g.setup(url=link, proxy="120.52.72.47:80", proxy_type="http")
yield Task('parser', grab=g, link=link)
def task_parser(self,grab,task):
response = grab.response.body
print(response)
Answer the question
In order to leave comments, you need to log in
Didn't find what you were looking for?
Ask your questionAsk a Question
731 491 924 answers to any question