Skip to content

Commit

Permalink
Merge pull request #653 from onekey-sec/update-ruff
Browse files Browse the repository at this point in the history
Update ruff
  • Loading branch information
vlaci authored Sep 22, 2023
2 parents 4328cb7 + d7b063a commit 5d5aa23
Show file tree
Hide file tree
Showing 14 changed files with 23 additions and 17 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/update-vendored-nix-dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ def create_pr(
def update_dependencies():
with NamedTemporaryFile() as log:
subprocess.run(
["nvfetcher", "--build-dir", "nix/_sources", "--changelog", log.name]
["nvfetcher", "--build-dir", "nix/_sources", "--changelog", log.name],
check=True,
)
return Path(log.name).read_text()

Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ repos:
- id: check-added-large-files

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.0.259"
rev: "v0.0.290"
hooks:
- id: ruff
name: Check python (ruff)
Expand Down
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,11 @@ ignore = [
"N818", # error-suffix-on-exception-name: Exception names can be meaningful without smurfs
"PLC0414", # useless-import-alias: Enable explicitly re-exporting public symbols
"PLR09", # too-many-{arguments,branches,...}: We do not want to impose hard limits
"PTH201", # path-constructor-current-directory: Having "." explicitly passed expresses intent more clearly
"RUF012", # mutable-class-default: Wants to annotate things like `Handler.PATTERNS` with `ClassVar`, producing noise
"S101", # assert: Enable usage of asserts
"S603", # subprocess-without-shell-equals-true: This check just highlights, that we should check for untrusted inputs
"S607", # start-process-with-partial-path: In our case, the convenience of allowing partial paths outweighs the risks
"TRY003", # raise-vanilla-args: We are adding contextual information in exception messages
"TRY400", # error-instead-of-exception: It is okay to log without backtrace in except blocks
]
Expand Down
1 change: 0 additions & 1 deletion tests/handlers/filesystem/test_romfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def test_get_string(content, expected):
(b"\x00\x00\x00\x00", True),
(b"\x00\x00\x00\x00\x00\x00\x00\x00", True),
(b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", True),
(b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", True),
(b"\x00\x00\x00", False),
(b"\x00\x00\x00\x00\x00\x00\x00", False),
(b"\x00\x00\x00\x00\x00\x00\x00\x00\x00", False),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def test_non_existing_file(tmp_path: Path):
result = runner.invoke(unblob.cli.cli, ["--extract-dir", str(tmp_path), str(path)])
assert result.exit_code == 2
assert "Invalid value for 'FILE'" in result.output
assert f"File '{str(path)}' does not exist" in result.output
assert f"File '{path}' does not exist" in result.output


def test_dir_for_file(tmp_path: Path):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_iter_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
[
([], set()),
([0, 0], {0}),
([0, 0, 0], {0, 0}),
([1, 2, 3], {1, 1}),
([0, 0, 0], {0}),
([1, 2, 3], {1}),
([1, 5, 8, 8, 10, 15], {4, 3, 0, 2, 5}),
],
)
Expand Down
1 change: 0 additions & 1 deletion tests/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
"value, expected",
[
(10, "0xa"),
(0xA, "0xa"),
("10", "10"),
(noformat(10), 10),
(noformat(Path("/absolute/path")), Path("/absolute/path")),
Expand Down
3 changes: 2 additions & 1 deletion unblob/extractors/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ def no_op():
stdout=stdout_file,
stderr=subprocess.PIPE,
timeout=COMMAND_TIMEOUT,
check=False,
)
if res.returncode != 0:
error_report = ExtractCommandFailedReport(
Expand All @@ -64,7 +65,7 @@ def no_op():
)

logger.error("Extract command failed", **error_report.asdict())
raise ExtractError(error_report) # noqa: TRY301
raise ExtractError(error_report)
except FileNotFoundError:
error_report = ExtractorDependencyNotFoundReport(
dependencies=self.get_dependencies()
Expand Down
2 changes: 1 addition & 1 deletion unblob/handlers/archive/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def has_encrypted_files(
end_of_central_directory: Instance,
) -> bool:
file.seek(start_offset + end_of_central_directory.offset_of_cd, io.SEEK_SET)
for _ in range(0, end_of_central_directory.total_entries):
for _ in range(end_of_central_directory.total_entries):
cd_header = self.cparser_le.cd_file_header_t(file)
if cd_header.flags & self.ENCRYPTED_FLAG:
return True
Expand Down
4 changes: 2 additions & 2 deletions unblob/handlers/compression/lz4.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@

logger = get_logger()

SKIPPABLE_FRAMES_MAGIC = [0x184D2A50 + i for i in range(0, 16)]
SKIPPABLE_FRAMES_MAGIC = [0x184D2A50 + i for i in range(16)]
FRAME_MAGIC = 0x184D2204
LEGACY_FRAME_MAGIC = 0x184C2102
FRAME_MAGICS = [*SKIPPABLE_FRAMES_MAGIC, FRAME_MAGIC] + [LEGACY_FRAME_MAGIC]
FRAME_MAGICS = [*SKIPPABLE_FRAMES_MAGIC, FRAME_MAGIC, LEGACY_FRAME_MAGIC]

_1BIT = 0x01
_2BITS = 0x03
Expand Down
4 changes: 1 addition & 3 deletions unblob/handlers/compression/lzma.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,7 @@ def calculate_chunk(self, file: File, start_offset: int) -> Optional[ValidChunk]
data = file.read(DEFAULT_BUFSIZE)
if not data:
if read_size < (uncompressed_size * MIN_READ_RATIO):
raise InvalidInputFormat( # noqa: TRY301
"Very early truncated LZMA stream"
)
raise InvalidInputFormat("Very early truncated LZMA stream")

logger.debug(
"LZMA stream is truncated.",
Expand Down
2 changes: 1 addition & 1 deletion unblob/handlers/compression/xz.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def get_stream_size(footer_offset: int, file: File) -> int:

# read Record 'Unpadded Size' and 'Uncompressed Size' for every Record
blocks_size = 0
for _ in range(0, num_records):
for _ in range(num_records):
size, unpadded_size = read_multibyte_int(file)
index_size += size

Expand Down
2 changes: 1 addition & 1 deletion unblob/processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -738,7 +738,7 @@ def calculate_block_size(
block_size = file_size // chunk_count
block_size = max(min_limit, block_size)
block_size = min(block_size, max_limit)
return block_size
return block_size # noqa: RET504


def format_entropy_plot(percentages: List[float], block_size: int):
Expand Down
6 changes: 5 additions & 1 deletion unblob/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,12 @@ def configure_logging(tmp_path_factory): # noqa: PT004


def gather_integration_tests(test_data_path: Path):
# Path.glob() trips on some invalid files
test_input_dirs = [
Path(p) for p in glob.iglob(f"{test_data_path}/**/__input__", recursive=True)
Path(p)
for p in glob.iglob( # noqa: PTH207
f"{test_data_path}/**/__input__", recursive=True
)
]
test_case_dirs = [p.parent for p in test_input_dirs]
test_output_dirs = [p / "__output__" for p in test_case_dirs]
Expand Down

0 comments on commit 5d5aa23

Please sign in to comment.