Skip to content

Commit

Permalink
Fix builds (#69)
Browse files Browse the repository at this point in the history
  • Loading branch information
fcanobrash authored Nov 27, 2019
1 parent e10b6fb commit 208e887
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 14 deletions.
3 changes: 3 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ environment:
- PYTHON: "C:\\Python36"
TOX_ENV: py36

matrix:
fast_finish: true

build: false

install:
Expand Down
2 changes: 1 addition & 1 deletion scrapy_autounit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def write_test(path, test_name, url):
class AutoUnit(unittest.TestCase):
def test__{test_name}(self):
files = os.listdir(Path.resolve(Path(__file__).parent))
files = os.listdir(str(Path.resolve(Path(__file__).parent)))
files = [f for f in files if f.endswith('.bin')]
self.maxDiff = None
for f in files:
Expand Down
16 changes: 3 additions & 13 deletions tests/test_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,12 @@ def second_callback(self, response):

def run(*pargs, **kwargs):
proc = subprocess.Popen(*pargs, **kwargs)
proc.wait()
stdout, stderr = proc.communicate()
out = {
'returncode': proc.returncode,
'stdout': proc.stdout.read(),
'stderr': proc.stderr.read(),
'stdout': stdout,
'stderr': stderr,
}
proc.stderr.close()
proc.stdout.close()
return out


Expand Down Expand Up @@ -258,20 +256,16 @@ def test_spider_attributes_recursive(self):
with CaseSpider() as spider:
spider.start_requests("""
self.__page = 0
self.param = 0
self._base_url = 'www.nothing.com'
yield scrapy.Request('data:text/plain,', callback=self.parse)
""")
spider.parse("""
self.param += 1
reqs = self.second_callback(response)
for r in reqs:
yield r
""")
spider.second_callback("""
self.__page += 1
if self.__page > 3:
self.end = True
yield {'a': 4}
return
for i in range(3):
Expand All @@ -285,20 +279,16 @@ def test_spider_attributes_recursive(self):
# Recursive calls including private variables using getattr
with CaseSpider() as spider:
spider.start_requests("""
self.param = 0
self._base_url = 'www.nothing.com'
yield scrapy.Request('data:text/plain,', callback=self.parse)
""")
spider.parse("""
self.param += 1
reqs = self.second_callback(response)
for r in reqs:
yield r
""")
spider.second_callback("""
self.__page = getattr(self, '_MySpider__page', 0) + 1
if self.__page > 3:
self.end = True
yield {'a': 4}
return
for i in range(3):
Expand Down

0 comments on commit 208e887

Please sign in to comment.