Skip to content

Commit 692f105

Browse files
committed
drop remaining six imports
1 parent 1d6b1cc commit 692f105

16 files changed

+14
-29
lines changed

Diff for: requirements-base.txt

-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,2 @@
11
requests>=1.0
22
retrying>=1.3.3
3-
six>=1.10.0

Diff for: scrapinghub/client/collections.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
from six import string_types
2-
from six.moves import collections_abc
1+
from collections.abc import Iterable
32

43
from ..hubstorage.collectionsrt import Collection as _Collection
54

@@ -183,7 +182,7 @@ def delete(self, keys):
183182
The method returns ``None`` (original method returns an empty generator).
184183
"""
185184
if (not isinstance(keys, str) and
186-
not isinstance(keys, collections_abc.Iterable)):
185+
not isinstance(keys, Iterable)):
187186
raise ValueError("You should provide string key or iterable "
188187
"object providing string keys")
189188
self._origin.delete(keys)

Diff for: scrapinghub/client/frontiers.py

-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
from functools import partial
22
from collections import defaultdict
33

4-
from six import string_types
5-
64
from ..hubstorage.frontier import Frontier as _Frontier
75
from ..hubstorage.utils import urlpathjoin
86

Diff for: scrapinghub/client/proxy.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import six
21
import json
32

43
from ..hubstorage import ValueTooLarge as _ValueTooLarge
@@ -165,7 +164,7 @@ def iter(self):
165164
:return: an iterator over key/value pairs.
166165
:rtype: :class:`collections.abc.Iterable`
167166
"""
168-
return next(self._origin.apiget()).items()
167+
return iter(next(self._origin.apiget()).items())
169168

170169

171170
def _format_iter_filters(params):

Diff for: scrapinghub/client/utils.py

-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,6 @@
55
import warnings
66
from codecs import decode
77

8-
import six
9-
108

119
class LogLevel:
1210
DEBUG = logging.DEBUG

Diff for: scrapinghub/hubstorage/batchuploader.py

-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import random
44
import logging
55
import warnings
6-
import six
76
from queue import Queue
87
from io import BytesIO
98
from gzip import GzipFile

Diff for: scrapinghub/hubstorage/resourcetype.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,8 @@
33
import socket
44
import time
55

6-
import six
76
import requests.exceptions as rexc
8-
from six.moves import collections_abc
7+
from collections.abc import MutableMapping
98

109
from .utils import urlpathjoin, xauth
1110
from .serialization import jlencode, jldecode, mpdecode
@@ -226,7 +225,7 @@ def stats(self):
226225
return next(self.apiget('stats', chunk_size=STATS_CHUNK_SIZE))
227226

228227

229-
class MappingResourceType(ResourceType, collections_abc.MutableMapping):
228+
class MappingResourceType(ResourceType, MutableMapping):
230229

231230
_cached = None
232231
ignore_fields = ()

Diff for: scrapinghub/hubstorage/serialization.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import six
21
from json import dumps, loads
32
from datetime import datetime
43

Diff for: scrapinghub/hubstorage/utils.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import six
21
import time
32
from queue import Empty
43

@@ -80,7 +79,7 @@ class iterqueue:
8079
8180
it exposes an attribute "count" with the number of messages read
8281
83-
>>> from six.moves.queue import Queue
82+
>>> from queue import Queue
8483
>>> q = Queue()
8584
>>> for x in range(10):
8685
... q.put(x)

Diff for: setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
platforms=['Any'],
2727
packages=['scrapinghub', 'scrapinghub.client', 'scrapinghub.hubstorage'],
2828
package_data={'scrapinghub': ['VERSION']},
29-
install_requires=['requests>=1.0', 'retrying>=1.3.3', 'six>=1.10.0'],
29+
install_requires=['requests>=1.0', 'retrying>=1.3.3'],
3030
extras_require={'msgpack': mpack_required},
3131
python_requires='>=3.8',
3232
classifiers=[

Diff for: tests/client/test_frontiers.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
import time
22
from types import GeneratorType
33

4-
from six import string_types
5-
from six.moves import collections_abc
4+
from collections.abc import Iterable
65

76
from scrapinghub.client.frontiers import Frontiers, Frontier, FrontierSlot
87
from ..conftest import TEST_FRONTIER_SLOT
@@ -36,7 +35,7 @@ def test_frontiers(project, frontier, frontier_name):
3635

3736
# test for iter() method
3837
frontiers_names = frontiers.iter()
39-
assert isinstance(frontiers_names, collections_abc.Iterable)
38+
assert isinstance(frontiers_names, Iterable)
4039
assert frontier_name in list(frontiers_names)
4140

4241
# test for list() method
@@ -58,7 +57,7 @@ def test_frontier(project, frontier):
5857
_add_test_requests_to_frontier(frontier)
5958

6059
slots = frontier.iter()
61-
assert isinstance(slots, collections_abc.Iterable)
60+
assert isinstance(slots, Iterable)
6261
assert TEST_FRONTIER_SLOT in list(slots)
6362

6463
slots = frontier.list()

Diff for: tests/client/test_job.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import pytest
2-
from six.moves import collections_abc
2+
from collections.abc import Iterator
33

44
from scrapinghub.client.items import Items
55
from scrapinghub.client.jobs import Job
@@ -223,7 +223,7 @@ def test_metadata_delete(spider):
223223
def test_metadata_iter_list(spider):
224224
job = spider.jobs.run(meta={'meta1': 'data1', 'meta2': 'data2'})
225225
meta_iter = job.metadata.iter()
226-
assert isinstance(meta_iter, collections_abc.Iterator)
226+
assert isinstance(meta_iter, Iterator)
227227
meta_list = job.metadata.list()
228228
assert ('meta1', 'data1') in meta_list
229229
assert ('meta2', 'data2') in meta_list

Diff for: tests/client/test_projects.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import types
22
from collections import defaultdict
3+
from collections.abc import Iterator
34

45
import pytest
56
import responses
67
from requests.compat import urljoin
7-
from six.moves import collections_abc
88

99
from scrapinghub import ScrapinghubClient
1010
from scrapinghub.client.activity import Activity
@@ -288,7 +288,7 @@ def test_settings_delete(project):
288288
def test_settings_iter_list(project):
289289
project.settings.set('job_runtime_limit', 24)
290290
settings_iter = project.settings.iter()
291-
assert isinstance(settings_iter, collections_abc.Iterator)
291+
assert isinstance(settings_iter, Iterator)
292292
settings_list = project.settings.list()
293293
assert ('job_runtime_limit', 24) in settings_list
294294
assert settings_list == list(settings_iter)

Diff for: tests/client/test_spiders.py

-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
from collections import defaultdict
33

44
import pytest
5-
from six import string_types
65

76
from scrapinghub.client.exceptions import DuplicateJobError
87
from scrapinghub.client.exceptions import BadRequest

Diff for: tests/hubstorage/test_jobq.py

-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
Test JobQ
33
"""
44
import os
5-
import six
65
import pytest
76

87
from scrapinghub.hubstorage.jobq import DuplicateJobError

Diff for: tests/hubstorage/test_project.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
"""
22
Test Project
33
"""
4-
import six
54
import json
65
import pytest
76
from requests.exceptions import HTTPError

0 commit comments

Comments
 (0)