Skip to content

Commit

Permalink
[style] Fix pylint style (#246)
Browse files Browse the repository at this point in the history
* convert string to fstring for src/

* convert string to fstring for tests/

* remove u-string prefix
  • Loading branch information
LuckyPigeon authored Apr 9, 2022
1 parent 4db36d4 commit cade7d3
Show file tree
Hide file tree
Showing 7 changed files with 23 additions and 26 deletions.
16 changes: 8 additions & 8 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@
master_doc = 'index'

# General information about the project.
project = u'Scrapy-Redis'
copyright = u'2011-2016, Rolando Espinoza'
project = 'Scrapy-Redis'
copyright = '2011-2016, Rolando Espinoza'

# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
Expand Down Expand Up @@ -205,8 +205,8 @@
# [howto/manual]).
latex_documents = [
('index', 'scrapy_redis.tex',
u'Scrapy-Redis Documentation',
u'Rolando Espinoza', 'manual'),
'Scrapy-Redis Documentation',
'Rolando Espinoza', 'manual'),
]

# The name of an image file (relative to this directory) to place at
Expand Down Expand Up @@ -236,8 +236,8 @@
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'scrapy_redis',
u'Scrapy-Redis Documentation',
[u'Rolando Espinoza'], 1)
'Scrapy-Redis Documentation',
['Rolando Espinoza'], 1)
]

# If true, show URL addresses after external links.
Expand All @@ -251,8 +251,8 @@
# dir menu entry, description, category)
texinfo_documents = [
('index', 'scrapy_redis',
u'Scrapy-Redis Documentation',
u'Rolando Espinoza',
'Scrapy-Redis Documentation',
'Rolando Espinoza',
'scrapy-redis',
'One line description of project.',
'Miscellaneous'),
Expand Down
6 changes: 2 additions & 4 deletions src/scrapy_redis/queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,9 @@ def __init__(self, server, spider, key, serializer=None):
# TODO: deprecate pickle.
serializer = picklecompat
if not hasattr(serializer, 'loads'):
raise TypeError("serializer does not implement 'loads' function: %r"
% serializer)
raise TypeError(f"serializer does not implement 'loads' function: {serializer}")
if not hasattr(serializer, 'dumps'):
raise TypeError("serializer '%s' does not implement 'dumps' function: %r"
% serializer)
raise TypeError(f"serializer does not implement 'dumps' function: {serializer}")

self.server = server
self.spider = spider
Expand Down
5 changes: 2 additions & 3 deletions src/scrapy_redis/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,16 +131,15 @@ def open(self, spider):
serializer=self.serializer,
)
except TypeError as e:
raise ValueError("Failed to instantiate queue class '%s': %s",
self.queue_cls, e)
raise ValueError(f"Failed to instantiate queue class '{self.queue_cls}': {e}")

self.df = load_object(self.dupefilter_cls).from_spider(spider)

if self.flush_on_start:
self.flush()
# notice if there are requests already in the queue to resume the crawl
if len(self.queue):
spider.log("Resuming crawl (%d requests scheduled)" % len(self.queue))
spider.log(f"Resuming crawl ({len(self.queue)} requests scheduled)")

def close(self, reason):
if not self.persist:
Expand Down
10 changes: 5 additions & 5 deletions src/scrapy_redis/spiders.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,10 @@ def next_requests(self):
yield reqs
found += 1
else:
self.logger.debug("Request not made from data: %r", data)
self.logger.debug(f"Request not made from data: {data}")

if found:
self.logger.debug("Read %s requests from '%s'", found, self.redis_key)
self.logger.debug(f"Read {found} requests from '{self.redis_key}'")

def make_request_from_data(self, data):
"""
Expand Down Expand Up @@ -176,12 +176,12 @@ def make_request_from_data(self, data):
if is_dict(formatted_data):
parameter = json.loads(formatted_data)
else:
self.logger.warning(TextColor.WARNING + "WARNING: String request is deprecated, please use JSON data format. \
Detail information, please check https://github.com/rmax/scrapy-redis#features" + TextColor.ENDC)
self.logger.warning(f"{TextColor.WARNING}WARNING: String request is deprecated, please use JSON data format. \
Detail information, please check https://github.com/rmax/scrapy-redis#features{TextColor.ENDC}")
return FormRequest(formatted_data, dont_filter=True)

if parameter.get('url', None) is None:
self.logger.warning(TextColor.WARNING + "The data from Redis has no url key in push data" + TextColor.ENDC)
self.logger.warning(f"{TextColor.WARNING}The data from Redis has no url key in push data{TextColor.ENDC}")
return []

url = parameter.pop("url")
Expand Down
4 changes: 2 additions & 2 deletions tests/test_picklecompat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ def test_picklecompat():
'dont_filter': False,
'errback': None,
'headers': {'Referer': ['http://www.dmoz.org/']},
'meta': {'depth': 1, 'link_text': u'Fran\xe7ais', 'rule': 0},
'meta': {'depth': 1, 'link_text': 'Fran\xe7ais', 'rule': 0},
'method': 'GET',
'priority': 0,
'url': u'http://www.dmoz.org/World/Fran%C3%A7ais/',
'url': 'http://www.dmoz.org/World/Fran%C3%A7ais/',
}
assert obj == picklecompat.loads(picklecompat.dumps(obj))
4 changes: 2 additions & 2 deletions tests/test_scrapy_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class QueueTestMixin(RedisTestMixin):

def setUp(self):
self.spider = get_spider(name='myspider')
self.key = 'scrapy_redis:tests:%s:queue' % self.spider.name
self.key = f'scrapy_redis:tests:{self.spider.name}:queue'
self.q = self.queue_cls(self.server, Spider('myspider'), self.key)

def tearDown(self):
Expand All @@ -80,7 +80,7 @@ def test_clear(self):
# duplication filter whenever the serielized requests are the same.
# This might be unwanted on repetitive requests to the same page
# even with dont_filter=True flag.
req = Request('http://example.com/?page=%s' % i)
req = Request(f'http://example.com/?page={i}')
self.q.push(req)
self.assertEqual(len(self.q), 10)

Expand Down
4 changes: 2 additions & 2 deletions tests/test_spiders.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def __hash__(self):
return hash(self.url)

def __repr__(self):
return '<%s(%s)>' % (self.__class__.__name__, self.url)
return f'<{self.__class__.__name__}({self.url})>'


@pytest.mark.parametrize('spider_cls', [
Expand All @@ -132,7 +132,7 @@ def test_consume_urls_from_redis(start_urls_as_zset, start_urls_as_set, spider_c
spider = spider_cls.from_crawler(crawler)
with flushall(spider.server):
urls = [
'http://example.com/%d' % i for i in range(batch_size * 2)
f'http://example.com/{i}' for i in range(batch_size * 2)
]
reqs = []
if start_urls_as_set:
Expand Down

0 comments on commit cade7d3

Please sign in to comment.