Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ruff update #308

Merged
merged 5 commits into from
Jul 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 5 additions & 21 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,9 @@ repos:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: 'https://github.com/asottile/pyupgrade'
rev: v3.15.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.8
hooks:
- id: pyupgrade
args:
- '--py37-plus'
- repo: 'https://github.com/PyCQA/isort'
rev: 5.12.0
hooks:
- id: isort
- repo: 'https://github.com/psf/black'
rev: 23.11.0
hooks:
- id: black
- repo: 'https://github.com/pycqa/flake8'
rev: 6.1.0
hooks:
- id: flake8
- repo: 'https://github.com/codespell-project/codespell'
rev: v2.1.0
hooks:
- id: codespell
- id: ruff
args: [ --fix ]
- id: ruff-format
18 changes: 8 additions & 10 deletions CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ ideal report includes:

Codestyle
---------
This project uses flake8 to enforce codstyle requirements. We've codified this
process using a tool called `pre-commit <https://pre-commit.com/>`__. pre-commit
allows us to specify a config file with all tools required for code linting,
and surfaces either a git commit hook, or single command, for enforcing these.
This project uses `ruff <https://github.com/astral-sh/ruff>`__ to enforce
codstyle requirements. We've codified this process using a tool called
`pre-commit <https://pre-commit.com/>`__. pre-commit allows us to specify a
config file with all tools required for code linting, and surfaces either a
git commit hook, or single command, for enforcing these.

To validate your PR prior to publishing, you can use the following
`installation guide <https://pre-commit.com/#install>`__ to setup pre-commit.
Expand All @@ -51,10 +52,7 @@ to automatically perform the codestyle validation:
$ pre-commit run

This will automatically perform simple updates (such as white space clean up)
and provide a list of any failing flake8 checks. After these are addressed,
and provide a list of any failing checks. After these are addressed,
you can commit the changes prior to publishing the PR.
These checks are also included in our CI setup under the "Lint" workflow which will provide output on Github for anything missed locally.

See the `flake8` section of the
`setup.cfg <https://github.com/boto/s3transfer/blob/develop/setup.cfg>`__ for the
currently enforced rules.
These checks are also included in our CI setup under the "Lint" workflow which
will provide output on Github for anything missed locally.
65 changes: 58 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,63 @@ markers = [
"slow: marks tests as slow",
]

[tool.isort]
profile = "black"
line_length = 79
honor_noqa = true
src_paths = ["s3transfer", "tests"]
[tool.ruff]
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]

[tool.black]
# Format same as Black.
line-length = 79
skip_string_normalization = true
indent-width = 4

target-version = "py38"

[tool.ruff.lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E4", "E7", "E9", "F", "I", "UP"]
ignore = []

# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []

# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"

[tool.ruff.format]
# Like Black, use double quotes for strings, spaces for indents
# and trailing commas.
quote-style = "preserve"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "auto"

docstring-code-format = false
docstring-code-line-length = "dynamic"
5 changes: 3 additions & 2 deletions s3transfer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def __call__(self, bytes_amount):


"""

import concurrent.futures
import functools
import logging
Expand Down Expand Up @@ -813,8 +814,8 @@ def _validate_all_known_args(self, actual, allowed):
for kwarg in actual:
if kwarg not in allowed:
raise ValueError(
"Invalid extra_args key '%s', "
"must be one of: %s" % (kwarg, ', '.join(allowed))
f"Invalid extra_args key '{kwarg}', "
f"must be one of: {', '.join(allowed)}"
)

def _ranged_download(
Expand Down
4 changes: 1 addition & 3 deletions s3transfer/bandwidth.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ def __init__(self, requested_amt, retry_time):
"""
self.requested_amt = requested_amt
self.retry_time = retry_time
msg = 'Request amount {} exceeded the amount available. Retry in {}'.format(
requested_amt, retry_time
)
msg = f'Request amount {requested_amt} exceeded the amount available. Retry in {retry_time}'
super().__init__(msg)


Expand Down
4 changes: 2 additions & 2 deletions s3transfer/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,5 @@
'ExpectedBucketOwner',
]

USER_AGENT = 's3transfer/%s' % s3transfer.__version__
PROCESS_USER_AGENT = '%s processpool' % USER_AGENT
USER_AGENT = f's3transfer/{s3transfer.__version__}'
PROCESS_USER_AGENT = f'{USER_AGENT} processpool'
2 changes: 1 addition & 1 deletion s3transfer/copies.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def _get_head_object_request_from_copy_source(self, copy_source):
raise TypeError(
'Expecting dictionary formatted: '
'{"Bucket": bucket_name, "Key": key} '
'but got %s or type %s.' % (copy_source, type(copy_source))
f'but got {copy_source} or type {type(copy_source)}.'
)

def _extra_upload_part_args(self, extra_args):
Expand Down
4 changes: 1 addition & 3 deletions s3transfer/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,7 @@ def _get_download_output_manager_cls(self, transfer_future, osutil):
if download_manager_cls.is_compatible(fileobj, osutil):
return download_manager_cls
raise RuntimeError(
'Output {} of type: {} is not supported.'.format(
fileobj, type(fileobj)
)
f'Output {fileobj} of type: {type(fileobj)} is not supported.'
)

def _submit(
Expand Down
14 changes: 5 additions & 9 deletions s3transfer/futures.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,9 +175,7 @@ def __init__(self, transfer_id=None):
self._failure_cleanups_lock = threading.Lock()

def __repr__(self):
return '{}(transfer_id={})'.format(
self.__class__.__name__, self.transfer_id
)
return f'{self.__class__.__name__}(transfer_id={self.transfer_id})'

@property
def exception(self):
Expand Down Expand Up @@ -295,8 +293,8 @@ def _transition_to_non_done_state(self, desired_state):
with self._lock:
if self.done():
raise RuntimeError(
'Unable to transition from done state %s to non-done '
'state %s.' % (self.status, desired_state)
f'Unable to transition from done state {self.status} to non-done '
f'state {desired_state}.'
)
self._status = desired_state

Expand All @@ -316,9 +314,7 @@ def submit(self, executor, task, tag=None):
:returns: A future representing the submitted task
"""
logger.debug(
"Submitting task {} to executor {} for transfer request: {}.".format(
task, executor, self.transfer_id
)
f"Submitting task {task} to executor {executor} for transfer request: {self.transfer_id}."
)
future = executor.submit(task, tag=tag)
# Add this created future to the list of associated future just
Expand Down Expand Up @@ -400,7 +396,7 @@ def _run_callback(self, callback):
# We do not want a callback interrupting the process, especially
# in the failure cleanups. So log and catch, the exception.
except Exception:
logger.debug("Exception raised in %s." % callback, exc_info=True)
logger.debug(f"Exception raised in {callback}.", exc_info=True)


class BoundedExecutor:
Expand Down
12 changes: 6 additions & 6 deletions s3transfer/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,8 @@ def _validate_attrs_are_nonzero(self):
for attr, attr_val in self.__dict__.items():
if attr_val is not None and attr_val <= 0:
raise ValueError(
'Provided parameter %s of value %s must be greater than '
'0.' % (attr, attr_val)
f'Provided parameter {attr} of value {attr_val} must '
'be greater than 0.'
)


Expand Down Expand Up @@ -492,16 +492,16 @@ def _validate_if_bucket_supported(self, bucket):
match = pattern.match(bucket)
if match:
raise ValueError(
'TransferManager methods do not support %s '
'resource. Use direct client calls instead.' % resource
f'TransferManager methods do not support {resource} '
'resource. Use direct client calls instead.'
)

def _validate_all_known_args(self, actual, allowed):
for kwarg in actual:
if kwarg not in allowed:
raise ValueError(
"Invalid extra_args key '%s', "
"must be one of: %s" % (kwarg, ', '.join(allowed))
"Invalid extra_args key '{}', "
"must be one of: {}".format(kwarg, ', '.join(allowed))
)

def _add_operation_defaults(self, bucket, extra_args):
Expand Down
1 change: 1 addition & 0 deletions s3transfer/processpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@
are using ``us-west-2`` as their region.

"""

import collections
import contextlib
import logging
Expand Down
9 changes: 4 additions & 5 deletions s3transfer/subscribers.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,20 +30,19 @@ def __new__(cls, *args, **kwargs):
return super().__new__(cls)

@classmethod
@lru_cache()
@lru_cache
def _validate_subscriber_methods(cls):
for subscriber_type in cls.VALID_SUBSCRIBER_TYPES:
subscriber_method = getattr(cls, 'on_' + subscriber_type)
if not callable(subscriber_method):
raise InvalidSubscriberMethodError(
'Subscriber method %s must be callable.'
% subscriber_method
f'Subscriber method {subscriber_method} must be callable.'
)

if not accepts_kwargs(subscriber_method):
raise InvalidSubscriberMethodError(
'Subscriber method %s must accept keyword '
'arguments (**kwargs)' % subscriber_method
f'Subscriber method {subscriber_method} must accept keyword '
'arguments (**kwargs)'
)

def on_queued(self, future, **kwargs):
Expand Down
6 changes: 1 addition & 5 deletions s3transfer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,7 @@ def __repr__(self):
main_kwargs_to_display = self._get_kwargs_with_params_to_include(
self._main_kwargs, params_to_display
)
return '{}(transfer_id={}, {})'.format(
self.__class__.__name__,
self._transfer_coordinator.transfer_id,
main_kwargs_to_display,
)
return f'{self.__class__.__name__}(transfer_id={self._transfer_coordinator.transfer_id}, {main_kwargs_to_display})'

@property
def transfer_id(self):
Expand Down
4 changes: 1 addition & 3 deletions s3transfer/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,9 +550,7 @@ def _get_upload_input_manager_cls(self, transfer_future):
if upload_manager_cls.is_compatible(fileobj):
return upload_manager_cls
raise RuntimeError(
'Input {} of type: {} is not supported.'.format(
fileobj, type(fileobj)
)
f'Input {fileobj} of type: {type(fileobj)} is not supported.'
)

def _submit(
Expand Down
19 changes: 8 additions & 11 deletions s3transfer/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,7 @@ def __init__(self, func, *args, **kwargs):
self._kwargs = kwargs

def __repr__(self):
return 'Function: {} with args {} and kwargs {}'.format(
self._func, self._args, self._kwargs
)
return f'Function: {self._func} with args {self._args} and kwargs {self._kwargs}'

def __call__(self):
return self._func(*self._args, **self._kwargs)
Expand Down Expand Up @@ -636,7 +634,7 @@ def acquire(self, tag, blocking=True):
"""
logger.debug("Acquiring %s", tag)
if not self._semaphore.acquire(blocking):
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'")

def release(self, tag, acquire_token):
"""Release the semaphore
Expand Down Expand Up @@ -694,7 +692,7 @@ def acquire(self, tag, blocking=True):
try:
if self._count == 0:
if not blocking:
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'")
else:
while self._count == 0:
self._condition.wait()
Expand All @@ -716,7 +714,7 @@ def release(self, tag, acquire_token):
self._condition.acquire()
try:
if tag not in self._tag_sequences:
raise ValueError("Attempted to release unknown tag: %s" % tag)
raise ValueError(f"Attempted to release unknown tag: {tag}")
max_sequence = self._tag_sequences[tag]
if self._lowest_sequence[tag] == sequence_number:
# We can immediately process this request and free up
Expand All @@ -743,7 +741,7 @@ def release(self, tag, acquire_token):
else:
raise ValueError(
"Attempted to release unknown sequence number "
"%s for tag: %s" % (sequence_number, tag)
f"{sequence_number} for tag: {tag}"
)
finally:
self._condition.release()
Expand Down Expand Up @@ -781,13 +779,13 @@ def _adjust_for_chunksize_limits(self, current_chunksize):
if current_chunksize > self.max_size:
logger.debug(
"Chunksize greater than maximum chunksize. "
"Setting to %s from %s." % (self.max_size, current_chunksize)
f"Setting to {self.max_size} from {current_chunksize}."
)
return self.max_size
elif current_chunksize < self.min_size:
logger.debug(
"Chunksize less than minimum chunksize. "
"Setting to %s from %s." % (self.min_size, current_chunksize)
f"Setting to {self.min_size} from {current_chunksize}."
)
return self.min_size
else:
Expand All @@ -804,8 +802,7 @@ def _adjust_for_max_parts(self, current_chunksize, file_size):
if chunksize != current_chunksize:
logger.debug(
"Chunksize would result in the number of parts exceeding the "
"maximum. Setting to %s from %s."
% (chunksize, current_chunksize)
f"maximum. Setting to {chunksize} from {current_chunksize}."
)

return chunksize
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/install
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,4 @@ if __name__ == "__main__":
package = os.path.join('dist', wheel_dist)
if args.extras:
package = f"'{package}[{args.extras}]'"
run('pip install %s' % package)
run(f'pip install {package}')
Loading
Loading