Skip to content

Commit

Permalink
Fix type ignore comments
Browse files Browse the repository at this point in the history
Signed-off-by: Samuel Giffard <[email protected]>
  • Loading branch information
Samuel Giffard committed Oct 11, 2023
1 parent a37601a commit 7b575b8
Show file tree
Hide file tree
Showing 14 changed files with 27 additions and 27 deletions.
2 changes: 1 addition & 1 deletion rohmu/atomic_opener.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal # type: ignore [assignment]
from typing_extensions import Literal # type: ignore[assignment]

import errno
import os
Expand Down
2 changes: 1 addition & 1 deletion rohmu/encryptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def writable(self) -> bool:
self._check_not_closed()
return True

def write(self, data: BinaryData) -> int: # type: ignore [override]
def write(self, data: BinaryData) -> int: # type: ignore[override]
"""Encrypt and write the given bytes"""
self._check_not_closed()
if not data:
Expand Down
2 changes: 1 addition & 1 deletion rohmu/filewrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def writable(self) -> bool:
self._check_not_closed()
return False

def write(self, data: BinaryData) -> int: # type: ignore [override]
def write(self, data: BinaryData) -> int: # type: ignore[override]
"""Encrypt and write the given bytes"""
self._check_not_closed()
raise io.UnsupportedOperation("Write not supported")
Expand Down
8 changes: 4 additions & 4 deletions rohmu/object_storage/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def _stream_blob(
allows reading entire blob into memory at once or returning data from random offsets"""
file_size = None
start_range = byte_range[0] if byte_range else 0
chunk_size = self.conn._config.max_chunk_get_size # type: ignore [attr-defined] # pylint: disable=protected-access
chunk_size = self.conn._config.max_chunk_get_size # type: ignore[attr-defined] # pylint: disable=protected-access
end_range = chunk_size - 1
blob = self.conn.get_blob_client(self.container_name, key)
while True:
Expand Down Expand Up @@ -362,13 +362,13 @@ def progress_callback(pipeline_response: Any) -> None:
seekable = hasattr(fd, "seekable") and fd.seekable()
if not seekable:
original_tell = getattr(fd, "tell", None)
fd.tell = lambda: None # type: ignore [assignment,method-assign,return-value]
fd.tell = lambda: None # type: ignore[assignment,method-assign,return-value]
sanitized_metadata = self.sanitize_metadata(metadata, replace_hyphen_with="_")
try:
blob_client = self.conn.get_blob_client(self.container_name, path)
blob_client.upload_blob(
fd,
blob_type=BlobType.BlockBlob, # type: ignore [arg-type]
blob_type=BlobType.BlockBlob, # type: ignore[arg-type]
content_settings=content_settings,
metadata=sanitized_metadata,
raw_response_hook=progress_callback,
Expand All @@ -378,7 +378,7 @@ def progress_callback(pipeline_response: Any) -> None:
finally:
if not seekable:
if original_tell is not None:
fd.tell = original_tell # type: ignore [method-assign]
fd.tell = original_tell # type: ignore[method-assign]
else:
delattr(fd, "tell")

Expand Down
16 changes: 8 additions & 8 deletions rohmu/object_storage/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,9 @@ def _init_google_client(self) -> Resource:
http = build_http()
if self.proxy_info:
if self.proxy_info.get("type") == "socks5":
proxy_type = httplib2.socks.PROXY_TYPE_SOCKS5 # type: ignore [attr-defined]
proxy_type = httplib2.socks.PROXY_TYPE_SOCKS5 # type: ignore[attr-defined]
else:
proxy_type = httplib2.socks.PROXY_TYPE_HTTP # type: ignore [attr-defined]
proxy_type = httplib2.socks.PROXY_TYPE_HTTP # type: ignore[attr-defined]

http.proxy_info = httplib2.ProxyInfo(
proxy_type,
Expand Down Expand Up @@ -235,7 +235,7 @@ def _object_client(self, *, not_found: Optional[str] = None) -> Iterator[Any]:
if self.gs is None:
self.gs = self._init_google_client()
# https://googleapis.github.io/google-api-python-client/docs/dyn/storage_v1.objects.html
self.gs_object_client = self.gs.objects() # type: ignore [attr-defined] # pylint: disable=no-member
self.gs_object_client = self.gs.objects() # type: ignore[attr-defined] # pylint: disable=no-member
try:
yield self.gs_object_client
except HttpError as ex:
Expand Down Expand Up @@ -589,7 +589,7 @@ def get_or_create_bucket(self, bucket_name: str) -> str:
invalid bucket names ("Invalid bucket name") as well as for invalid
project ("Invalid argument"), try to handle both gracefully."""
start_time = time.time()
gs_buckets = self.gs.buckets() # type: ignore [union-attr] # pylint: disable=no-member
gs_buckets = self.gs.buckets() # type: ignore[union-attr] # pylint: disable=no-member
try:
request = gs_buckets.get(bucket=bucket_name)
reporter = Reporter(StorageOperation.head_request)
Expand Down Expand Up @@ -638,13 +638,13 @@ def __init__(self, fd: BinaryIO, *, chunk_size: int, mime_type: str, name: str)
self._name = name
self._position: Optional[int] = None

def chunksize(self) -> int: # type: ignore [override]
def chunksize(self) -> int: # type: ignore[override]
return self._chunk_size

def mimetype(self) -> str:
return self._mime_type

def size(self) -> Optional[int]: # type: ignore [override]
def size(self) -> Optional[int]: # type: ignore[override]
self.peek()
if len(self._next_chunk) < self.peeksize:
# The total file size should be returned if we have hit the final chunk.
Expand All @@ -666,7 +666,7 @@ def peek(self) -> None:
self._next_chunk = self._read_bytes(self.peeksize - len(self._next_chunk), initial_data=self._next_chunk)

# second parameter is length but baseclass incorrectly names it end
def getbytes(self, begin: int, length: int) -> bytes: # type: ignore [override] # pylint: disable=arguments-renamed
def getbytes(self, begin: int, length: int) -> bytes: # type: ignore[override] # pylint: disable=arguments-renamed
if begin < (self._position or 0):
msg = f"Requested position {begin} for {repr(self._name)} precedes already fulfilled position {self._position}"
raise IndexError(msg)
Expand Down Expand Up @@ -703,7 +703,7 @@ def getbytes(self, begin: int, length: int) -> bytes: # type: ignore [override]
def has_stream(self) -> bool:
return False

def stream(self) -> BinaryIO: # type: ignore [override]
def stream(self) -> BinaryIO: # type: ignore[override]
raise NotImplementedError

def _read_bytes(self, length: int, *, initial_data: Optional[bytes] = None) -> bytes:
Expand Down
2 changes: 1 addition & 1 deletion rohmu/object_storage/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ def multipart_upload_file_object(
bytes_sent += len(data)
if progress_fn:
# TODO: change this to incremental progress. Size parameter is currently unused.
progress_fn(bytes_sent, size) # type: ignore [arg-type]
progress_fn(bytes_sent, size) # type: ignore[arg-type]
break

self.stats.operation(StorageOperation.multipart_complete)
Expand Down
6 changes: 3 additions & 3 deletions rohmu/object_storage/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def iter_key(
metadata = None

last_modified = datetime.datetime.fromtimestamp(
attr.st_mtime, tz=datetime.timezone.utc # type: ignore [arg-type]
attr.st_mtime, tz=datetime.timezone.utc # type: ignore[arg-type]
)
yield IterKeyItem(
type=KEY_TYPE_OBJECT,
Expand All @@ -160,7 +160,7 @@ def iter_key(
continue

file_key = os.path.join(key.strip("/"), attr.filename)
if S_ISDIR(attr.st_mode): # type: ignore [arg-type]
if S_ISDIR(attr.st_mode): # type: ignore[arg-type]
if deep:
yield from self.iter_key(file_key, with_metadata=with_metadata, deep=True)
else:
Expand All @@ -175,7 +175,7 @@ def iter_key(
metadata = None

last_modified = datetime.datetime.fromtimestamp(
attr.st_mtime, tz=datetime.timezone.utc # type: ignore [arg-type]
attr.st_mtime, tz=datetime.timezone.utc # type: ignore[arg-type]
)
yield IterKeyItem(
type=KEY_TYPE_OBJECT,
Expand Down
2 changes: 1 addition & 1 deletion rohmu/rohmufile.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def _callback_wrapper(progress_callback: IncrementalProgressCallbackType) -> Inc
return None
sig = signature(progress_callback)
if len(sig.parameters) == 0:
return lambda f: progress_callback() # type: ignore [misc,call-arg]
return lambda f: progress_callback() # type: ignore[misc,call-arg]
return progress_callback


Expand Down
2 changes: 1 addition & 1 deletion rohmu/snappyfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def close(self) -> None:
self.next_fp.flush()
super().close()

def write(self, data: BinaryData) -> int: # type: ignore [override]
def write(self, data: BinaryData) -> int: # type: ignore[override]
self._check_not_closed()
if self.encr is None:
raise io.UnsupportedOperation("file not open for writing")
Expand Down
2 changes: 1 addition & 1 deletion rohmu/typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# Remove when dropping support for Python 3.7
from pickle import PickleBuffer
except ImportError:
PickleBuffer = bytes # type: ignore [misc,assignment]
PickleBuffer = bytes # type: ignore[misc,assignment]
import mmap

if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion rohmu/zstdfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def close(self) -> None:
self.next_fp.flush()
super().close()

def write(self, data: BinaryData) -> int: # type: ignore [override]
def write(self, data: BinaryData) -> int: # type: ignore[override]
self._check_not_closed()
data_as_bytes = bytes(data)
compressed_data = self._zstd.compress(data_as_bytes)
Expand Down
4 changes: 2 additions & 2 deletions test/test_atomic_opener.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def test_error_thrown_if_final_path_parent_doesnt_exist(tmp_path: Path) -> None:

def test_error_mode_doesnt_contain_write(tmp_path: Path) -> None:
with pytest.raises(ValueError):
with atomic_opener(tmp_path, mode="r"): # type: ignore [call-overload]
with atomic_opener(tmp_path, mode="r"): # type: ignore[call-overload]
pass


Expand Down Expand Up @@ -138,7 +138,7 @@ def test_no_fd_leak_if_fdopen_fails_because_of_unknown_mode(tmp_path: Path) -> N
try:
with atomic_opener(
final_path, mode="somethingrandomw", encoding="ascii", _fd_spy=opened_fd.append
): # type: ignore [call-overload]
): # type: ignore[call-overload]
pass
pytest.fail("should fail, mode is wrong")
except ValueError:
Expand Down
2 changes: 1 addition & 1 deletion test/test_encryptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""
from __future__ import annotations

from py.path import LocalPath # type: ignore [import] # pylint: disable=import-error
from py.path import LocalPath # type: ignore[import] # pylint: disable=import-error
from rohmu.common.constants import IO_BLOCK_SIZE
from rohmu.encryptor import Decryptor, DecryptorFile, Encryptor, EncryptorFile, EncryptorStream
from typing import cast, IO
Expand Down
2 changes: 1 addition & 1 deletion test/test_rohmufile.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

def test_fileobj_name(tmpdir: Any) -> None:
with NamedTemporaryFile(dir=tmpdir, suffix="foo") as raw_output_obj:
result = rohmufile._fileobj_name(raw_output_obj) # type: ignore # pylint: disable=protected-access
result = rohmufile._fileobj_name(raw_output_obj) # type: ignore# pylint: disable=protected-access
assert result.startswith("open file ")
assert "foo" in result

Expand Down

0 comments on commit 7b575b8

Please sign in to comment.