Skip to content

drop six requirement #174

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 7 additions & 12 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# python-scrapinghub documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 24 12:28:40 2017.
Expand Down Expand Up @@ -54,9 +53,9 @@
master_doc = 'index'

# General information about the project.
project = u'scrapinghub'
copyright = u'2010-{}, Scrapinghub'.format(YEAR)
author = u'Scrapinghub'
project = 'scrapinghub'
copyright = f'2010-{YEAR}, Scrapinghub'
author = 'Scrapinghub'

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
Expand Down Expand Up @@ -135,8 +134,8 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'python-scrapinghub.tex', u'python-scrapinghub Documentation',
u'Pablo Hoffman, Daniel Graña', 'manual'),
(master_doc, 'python-scrapinghub.tex', 'python-scrapinghub Documentation',
'Pablo Hoffman, Daniel Graña', 'manual'),
]


Expand All @@ -145,7 +144,7 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'python-scrapinghub', u'python-scrapinghub Documentation',
(master_doc, 'python-scrapinghub', 'python-scrapinghub Documentation',
[author], 1)
]

Expand All @@ -156,13 +155,9 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'python-scrapinghub', u'python-scrapinghub Documentation',
(master_doc, 'python-scrapinghub', 'python-scrapinghub Documentation',
author, 'python-scrapinghub', 'One line description of project.',
'Miscellaneous'),
]

# Set Sphinx Read The Docs theme
import sphinx_rtd_theme

html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
1 change: 0 additions & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
requests>=1.0
retrying>=1.3.3
six>=1.10.0
6 changes: 3 additions & 3 deletions scrapinghub/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,17 @@ class Connection(_Connection):

@_wrap_http_errors
def _request(self, *args, **kwargs):
return super(Connection, self)._request(*args, **kwargs)
return super()._request(*args, **kwargs)


class HubstorageClient(_HubstorageClient):

@_wrap_http_errors
def request(self, *args, **kwargs):
return super(HubstorageClient, self).request(*args, **kwargs)
return super().request(*args, **kwargs)


class ScrapinghubClient(object):
class ScrapinghubClient:
"""Main class to work with the Scrapy Cloud API.

:param auth: (optional) Scrapy Cloud API key or other Scrapy Cloud auth
Expand Down
2 changes: 0 additions & 2 deletions scrapinghub/client/activity.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

from .proxy import _Proxy
from .utils import parse_job_key, update_kwargs

Expand Down
11 changes: 4 additions & 7 deletions scrapinghub/client/collections.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
from __future__ import absolute_import

from six import string_types
from six.moves import collections_abc
from collections.abc import Iterable

from ..hubstorage.collectionsrt import Collection as _Collection

Expand Down Expand Up @@ -96,7 +93,7 @@ def list(self):
return list(self.iter())


class Collection(object):
class Collection:
"""Representation of a project collection object.

Not a public constructor: use :class:`Collections` instance to get a
Expand Down Expand Up @@ -184,8 +181,8 @@ def delete(self, keys):

The method returns ``None`` (original method returns an empty generator).
"""
if (not isinstance(keys, string_types) and
not isinstance(keys, collections_abc.Iterable)):
if (not isinstance(keys, str) and
not isinstance(keys, Iterable)):
raise ValueError("You should provide string key or iterable "
"object providing string keys")
self._origin.delete(keys)
Expand Down
4 changes: 1 addition & 3 deletions scrapinghub/client/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from functools import wraps

from requests import HTTPError
Expand Down Expand Up @@ -29,7 +27,7 @@ def __init__(self, message=None, http_error=None):
self.http_error = http_error
if not message:
message = _get_http_error_msg(http_error)
super(ScrapinghubAPIError, self).__init__(message)
super().__init__(message)


class BadRequest(ScrapinghubAPIError):
Expand Down
17 changes: 7 additions & 10 deletions scrapinghub/client/frontiers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
from __future__ import absolute_import
from functools import partial
from collections import defaultdict

from six import string_types

from ..hubstorage.frontier import Frontier as _Frontier
from ..hubstorage.utils import urlpathjoin

Expand All @@ -15,7 +12,7 @@ class _HSFrontier(_Frontier):
"""Modified hubstorage Frontier with newcount per slot."""

def __init__(self, *args, **kwargs):
super(_HSFrontier, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.newcount = defaultdict(int)

def _get_writer(self, frontier, slot):
Expand Down Expand Up @@ -84,7 +81,7 @@ class Frontiers(_Proxy):
>>> project.frontiers.close()
"""
def __init__(self, *args, **kwargs):
super(Frontiers, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)

def get(self, name):
"""Get a frontier by name.
Expand Down Expand Up @@ -125,7 +122,7 @@ def close(self):
self._origin.close()


class Frontier(object):
class Frontier:
"""Representation of a frontier object.

Not a public constructor: use :class:`Frontiers` instance to get a
Expand Down Expand Up @@ -201,7 +198,7 @@ def newcount(self):
if frontier == self.key)


class FrontierSlot(object):
class FrontierSlot:
"""Representation of a frontier slot object.

Not a public constructor: use :class:`Frontier` instance to get a
Expand Down Expand Up @@ -295,7 +292,7 @@ def newcount(self):
return newcount_values.get((self._frontier.key, self.key), 0)


class FrontierSlotFingerprints(object):
class FrontierSlotFingerprints:
"""Representation of request fingerprints collection stored in slot."""

def __init__(self, slot):
Expand All @@ -311,7 +308,7 @@ def add(self, fps):
origin = self._frontier._frontiers._origin
writer = origin._get_writer(self._frontier.key, self.key)
fps = list(fps) if not isinstance(fps, list) else fps
if not all(isinstance(fp, string_types) for fp in fps):
if not all(isinstance(fp, str) for fp in fps):
raise ValueError('Fingerprint should be of a string type')
for fp in fps:
writer.write({'fp': fp})
Expand All @@ -338,7 +335,7 @@ def list(self, **params):
return list(self.iter(**params))


class FrontierSlotQueue(object):
class FrontierSlotQueue:
"""Representation of request batches queue stored in slot."""

def __init__(self, slot):
Expand Down
6 changes: 2 additions & 4 deletions scrapinghub/client/items.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import sys

from .proxy import _ItemsResourceProxy, _DownloadableProxyMixin
Expand Down Expand Up @@ -84,10 +82,10 @@ def _modify_iter_params(self, params):
:return: a dict with updated set of params.
:rtype: :class:`dict`
"""
params = super(Items, self)._modify_iter_params(params)
params = super()._modify_iter_params(params)
offset = params.pop('offset', None)
if offset:
params['start'] = '{}/{}'.format(self.key, offset)
params['start'] = f'{self.key}/{offset}'
return params

def list_iter(self, chunksize=1000, *args, **kwargs):
Expand Down
8 changes: 3 additions & 5 deletions scrapinghub/client/jobs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import json

from ..hubstorage.job import JobMeta as _JobMeta
Expand All @@ -17,7 +15,7 @@
from .utils import get_tags_for_update, parse_job_key, update_kwargs


class Jobs(object):
class Jobs:
"""Class representing a collection of jobs for a project/spider.

Not a public constructor: use :class:`~scrapinghub.client.projects.Project`
Expand Down Expand Up @@ -445,7 +443,7 @@ def update_tags(self, add=None, remove=None, spider=None):
return result['count']


class Job(object):
class Job:
"""Class representing a job object.

Not a public constructor: use :class:`~scrapinghub.client.ScrapinghubClient`
Expand Down Expand Up @@ -568,7 +566,7 @@ def update(self, state, **params):
job = next(self._project.jobq.update(self, state=state, **params))
return job['prevstate']
except StopIteration:
raise NotFound("Job {} doesn't exist".format(self.key))
raise NotFound(f"Job {self.key} doesn't exist")

def cancel(self):
"""Schedule a running job for cancellation.
Expand Down
8 changes: 3 additions & 5 deletions scrapinghub/client/logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import json
import logging

Expand Down Expand Up @@ -90,15 +88,15 @@ def _modify_iter_params(self, params):
:return: a modified dictionary with params.
:rtype: :class:`dict`
"""
params = super(Logs, self)._modify_iter_params(params)
params = super()._modify_iter_params(params)
offset = params.pop('offset', None)
if offset:
params['start'] = '{}/{}'.format(self.key, offset)
params['start'] = f'{self.key}/{offset}'
level = params.pop('level', None)
if level:
minlevel = getattr(LogLevel, level, None)
if minlevel is None:
raise ValueError("Unknown log level: {}".format(level))
raise ValueError(f"Unknown log level: {level}")
level_filter = json.dumps(['level', '>=', [minlevel]])
# there can already be some filters handled by super class method
params['filter'] = params.get('filter', []) + [level_filter]
Expand Down
6 changes: 2 additions & 4 deletions scrapinghub/client/projects.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

from ..hubstorage.activity import Activity as _Activity
from ..hubstorage.collectionsrt import Collections as _Collections
from ..hubstorage.project import Settings as _Settings
Expand All @@ -13,7 +11,7 @@
from .utils import parse_project_id


class Projects(object):
class Projects:
"""Collection of projects available to current user.

Not a public constructor: use :class:`~scrapinghub.client.ScrapinghubClient`
Expand Down Expand Up @@ -95,7 +93,7 @@ def summary(self, state=None, **params):
return self._client._hsclient.projects.jobsummaries(**params)


class Project(object):
class Project:
"""Class representing a project object and its resources.

Not a public constructor: use :class:`~scrapinghub.client.ScrapinghubClient`
Expand Down
13 changes: 5 additions & 8 deletions scrapinghub/client/proxy.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
from __future__ import absolute_import

import six
import json

from ..hubstorage import ValueTooLarge as _ValueTooLarge
from .utils import update_kwargs
from .exceptions import ValueTooLarge


class _Proxy(object):
class _Proxy:
"""A helper to create a class instance and proxy its methods to origin.

The internal proxy class is useful to link class attributes from its
Expand Down Expand Up @@ -99,7 +96,7 @@ def close(self, block=True):
self._origin.close(block)


class _DownloadableProxyMixin(object):
class _DownloadableProxyMixin:

def iter(self, _path=None, count=None, requests_params=None, **apiparams):
"""A general method to iterate through elements.
Expand Down Expand Up @@ -150,7 +147,7 @@ def update(self, values):
raise TypeError("values should be a dict")
data = next(self._origin.apiget())
data.update(values)
self._origin.apipost(jl={k: v for k, v in six.iteritems(data)
self._origin.apipost(jl={k: v for k, v in data.items()
if k not in self._origin.ignore_fields},
is_idempotent=True)

Expand All @@ -167,7 +164,7 @@ def iter(self):
:return: an iterator over key/value pairs.
:rtype: :class:`collections.abc.Iterable`
"""
return six.iteritems(next(self._origin.apiget()))
return iter(next(self._origin.apiget()).items())


def _format_iter_filters(params):
Expand All @@ -179,7 +176,7 @@ def _format_iter_filters(params):
if filters and isinstance(filters, list):
filter_data = []
for elem in params.pop('filter'):
if isinstance(elem, six.string_types):
if isinstance(elem, str):
filter_data.append(elem)
elif isinstance(elem, (list, tuple)):
filter_data.append(json.dumps(elem))
Expand Down
2 changes: 0 additions & 2 deletions scrapinghub/client/requests.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

from .proxy import _ItemsResourceProxy, _DownloadableProxyMixin


Expand Down
2 changes: 0 additions & 2 deletions scrapinghub/client/samples.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

from .proxy import _ItemsResourceProxy


Expand Down
Loading
Loading