Skip to content

Commit

Permalink
Merge pull request #622 from lazka/random-annotations
Browse files Browse the repository at this point in the history
Add some type annotations
  • Loading branch information
lazka committed Jul 30, 2023
2 parents c0f5457 + b5008ce commit fd71e21
Show file tree
Hide file tree
Showing 9 changed files with 60 additions and 47 deletions.
14 changes: 8 additions & 6 deletions mutagen/_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
# (at your option) any later version.

import warnings
from typing import List

from mutagen._util import DictMixin, loadfile

Expand Down Expand Up @@ -83,7 +84,7 @@ def __delitem__(self, key):
else:
del self.tags[key]

def keys(self):
def keys(self) -> list:
"""Return a list of keys in the metadata tag.
If the file has no tags at all, an empty list is returned.
Expand Down Expand Up @@ -130,12 +131,13 @@ def save(self, filething=None, **kwargs):
if self.tags is not None:
return self.tags.save(filething, **kwargs)

def pprint(self):
def pprint(self) -> str:
"""
Returns:
text: stream information and comment key=value pairs.
"""

assert self.info is not None
stream = "%s (%s)" % (self.info.pprint(), self.mime[0])
try:
tags = self.tags.pprint()
Expand All @@ -144,7 +146,7 @@ def pprint(self):
else:
return stream + ((tags and "\n" + tags) or "")

def add_tags(self):
def add_tags(self) -> None:
"""Adds new tags to the file.
Raises:
Expand All @@ -155,7 +157,7 @@ def add_tags(self):
raise NotImplementedError

@property
def mime(self):
def mime(self) -> List[str]:
"""A list of mime types (:class:`mutagen.text`)"""

mimes = []
Expand All @@ -166,7 +168,7 @@ def mime(self):
return mimes

@staticmethod
def score(filename, fileobj, header):
def score(filename, fileobj, header) -> int:
"""Returns a score for how likely the file can be parsed by this type.
Args:
Expand Down Expand Up @@ -194,7 +196,7 @@ class StreamInfo(object):

__module__ = "mutagen"

def pprint(self):
def pprint(self) -> str:
"""
Returns:
text: Print stream information
Expand Down
16 changes: 8 additions & 8 deletions mutagen/_iff.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class EmptyChunk(InvalidChunk):
pass


def is_valid_chunk_id(id):
def is_valid_chunk_id(id: str) -> bool:
""" is_valid_chunk_id(FOURCC)
Arguments:
Expand All @@ -56,7 +56,7 @@ def is_valid_chunk_id(id):


# Assert FOURCC formatted valid
def assert_valid_chunk_id(id):
def assert_valid_chunk_id(id: str) -> None:
if not is_valid_chunk_id(id):
raise ValueError("IFF chunk ID must be four ASCII characters.")

Expand Down Expand Up @@ -126,13 +126,13 @@ def __repr__(self):
% (type(self).__name__, self.id, self.offset, self.size,
self.data_offset, self.data_size))

def read(self):
def read(self) -> bytes:
"""Read the chunks data"""

self._fileobj.seek(self.data_offset)
return self._fileobj.read(self.data_size)

def write(self, data):
def write(self, data: bytes) -> None:
"""Write the chunk data"""

if len(data) > self.data_size:
Expand All @@ -146,7 +146,7 @@ def write(self, data):
self._fileobj.seek(self.data_offset + self.data_size)
self._fileobj.write(b'\x00' * padding)

def delete(self):
def delete(self) -> None:
"""Removes the chunk from the file"""

delete_bytes(self._fileobj, self.size, self.offset)
Expand All @@ -172,7 +172,7 @@ def _calculate_size(self):
self.size = self.HEADER_SIZE + self.data_size + self.padding()
assert self.size % 2 == 0

def resize(self, new_data_size):
def resize(self, new_data_size: int) -> None:
"""Resize the file and update the chunk sizes"""

old_size = self._get_actual_data_size()
Expand All @@ -183,14 +183,14 @@ def resize(self, new_data_size):
self._update_size(size_diff)
self._fileobj.flush()

def padding(self):
def padding(self) -> int:
"""Returns the number of padding bytes (0 or 1).
IFF chunks are required to be a even number in total length. If
data_size is odd a padding byte will be added at the end.
"""
return self.data_size % 2

def _get_actual_data_size(self):
def _get_actual_data_size(self) -> int:
"""Returns the data size that is actually possible.
Some files have chunks that are truncated and their reported size
would be outside of the file's actual size."""
Expand Down
4 changes: 2 additions & 2 deletions mutagen/_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ def my_callback(info: PaddingInfo):
size (`int`): The amount of data following the padding
"""

def __init__(self, padding, size):
def __init__(self, padding: int, size: int):
self.padding = padding
self.size = size

def get_default_padding(self):
def get_default_padding(self) -> int:
"""The default implementation which tries to select a reasonable
amount of padding and which might change in future versions.
Expand Down
44 changes: 25 additions & 19 deletions mutagen/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import errno
import decimal
from io import BytesIO
from typing import Tuple, List

from collections import namedtuple
from contextlib import contextmanager
Expand Down Expand Up @@ -50,7 +51,7 @@ def iterbytes(b):
return (bytes([v]) for v in b)


def intround(value):
def intround(value: float) -> int:
"""Given a float returns a rounded int. Should give the same result on
both Py2/3
"""
Expand All @@ -59,7 +60,7 @@ def intround(value):
value).to_integral_value(decimal.ROUND_HALF_EVEN))


def is_fileobj(fileobj):
def is_fileobj(fileobj) -> bool:
"""Returns:
bool: if an argument passed ot mutagen should be treated as a
file object
Expand Down Expand Up @@ -614,7 +615,7 @@ class cdata(object):
_fill_cdata(cdata)


def get_size(fileobj):
def get_size(fileobj) -> int:
"""Returns the size of the file.
The position when passed in will be preserved if no error occurs.
Expand All @@ -634,7 +635,7 @@ def get_size(fileobj):
fileobj.seek(old_pos, 0)


def read_full(fileobj, size):
def read_full(fileobj, size: int) -> None:
"""Like fileobj.read but raises IOError if not all requested data is
returned.
Expand All @@ -657,7 +658,7 @@ def read_full(fileobj, size):
return data


def seek_end(fileobj, offset):
def seek_end(fileobj, offset: int) -> None:
"""Like fileobj.seek(-offset, 2), but will not try to go beyond the start
Needed since file objects from BytesIO will not raise IOError and
Expand All @@ -682,7 +683,7 @@ def seek_end(fileobj, offset):
fileobj.seek(-offset, 2)


def resize_file(fobj, diff, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
def resize_file(fobj, diff: int, BUFFER_SIZE: int = _DEFAULT_BUFFER_SIZE) -> None:
"""Resize a file by `diff`.
New space will be filled with zeros.
Expand Down Expand Up @@ -719,7 +720,8 @@ def resize_file(fobj, diff, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
raise


def move_bytes(fobj, dest, src, count, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
def move_bytes(fobj, dest: int, src: int, count: int,
BUFFER_SIZE: int = _DEFAULT_BUFFER_SIZE) -> None:
"""Moves data around using read()/write().
Args:
Expand Down Expand Up @@ -762,7 +764,8 @@ def move_bytes(fobj, dest, src, count, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
fobj.flush()


def insert_bytes(fobj, size, offset, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
def insert_bytes(fobj, size: int, offset: int,
BUFFER_SIZE: int = _DEFAULT_BUFFER_SIZE) -> None:
"""Insert size bytes of empty space starting at offset.
fobj must be an open file object, open rb+ or
Expand Down Expand Up @@ -790,7 +793,8 @@ def insert_bytes(fobj, size, offset, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
move_bytes(fobj, offset + size, offset, movesize, BUFFER_SIZE)


def delete_bytes(fobj, size, offset, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
def delete_bytes(fobj, size: int, offset: int,
BUFFER_SIZE: int = _DEFAULT_BUFFER_SIZE) -> None:
"""Delete size bytes of empty space starting at offset.
fobj must be an open file object, open rb+ or
Expand Down Expand Up @@ -818,7 +822,7 @@ def delete_bytes(fobj, size, offset, BUFFER_SIZE=_DEFAULT_BUFFER_SIZE):
resize_file(fobj, -size, BUFFER_SIZE)


def resize_bytes(fobj, old_size, new_size, offset):
def resize_bytes(fobj, old_size: int, new_size: int, offset: int) -> None:
"""Resize an area in a file adding and deleting at the end of it.
Does nothing if no resizing is needed.
Expand Down Expand Up @@ -864,7 +868,8 @@ def dict_match(d, key, default=None):
return default


def encode_endian(text, encoding, errors="strict", le=True):
def encode_endian(text: str, encoding: str,
errors: str = "strict", le: bool = True) -> bytes:
"""Like text.encode(encoding) but always returns little endian/big endian
BOMs instead of the system one.
Expand Down Expand Up @@ -896,7 +901,8 @@ def encode_endian(text, encoding, errors="strict", le=True):
return text.encode(encoding, errors)


def decode_terminated(data, encoding, strict=True):
def decode_terminated(data: bytes, encoding: str,
strict: bool = True) -> Tuple[str, bytes]:
"""Returns the decoded data until the first NULL terminator
and all data after it.
Expand Down Expand Up @@ -936,7 +942,7 @@ def decode_terminated(data, encoding, strict=True):

# slow path
decoder = codec_info.incrementaldecoder()
r = []
r: List[str] = []
for i, b in enumerate(iterbytes(data)):
c = decoder.decode(b)
if c == u"\x00":
Expand All @@ -962,7 +968,7 @@ def __init__(self, fileobj):
self._bits = 0
self._pos = fileobj.tell()

def bits(self, count):
def bits(self, count: int) -> int:
"""Reads `count` bits and returns an uint, MSB read first.
May raise BitReaderError if not enough data could be read or
Expand All @@ -987,7 +993,7 @@ def bits(self, count):
assert self._bits < 8
return value

def bytes(self, count):
def bytes(self, count: int) -> bytes:
"""Returns a bytearray of length `count`. Works unaligned."""

if count < 0:
Expand All @@ -1002,7 +1008,7 @@ def bytes(self, count):

return bytes(bytearray(self.bits(8) for _ in range(count)))

def skip(self, count):
def skip(self, count: int) -> None:
"""Skip `count` bits.
Might raise BitReaderError if there wasn't enough data to skip,
Expand All @@ -1021,20 +1027,20 @@ def skip(self, count):
count -= n_bytes * 8
self.bits(count)

def get_position(self):
def get_position(self) -> int:
"""Returns the amount of bits read or skipped so far"""

return (self._fileobj.tell() - self._pos) * 8 - self._bits

def align(self):
def align(self) -> int:
"""Align to the next byte, returns the amount of bits skipped"""

bits = self._bits
self._buffer = 0
self._bits = 0
return bits

def is_aligned(self):
def is_aligned(self) -> bool:
"""If we are currently aligned to bytes and nothing is buffered"""

return self._bits == 0
6 changes: 3 additions & 3 deletions mutagen/_vorbis.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from mutagen._util import DictMixin, cdata, MutagenError, reraise


def is_valid_key(key):
def is_valid_key(key: str) -> bool:
"""Return true if a string is a valid Vorbis comment key.
Valid Vorbis comment keys are printable ASCII between 0x20 (space)
Expand Down Expand Up @@ -158,7 +158,7 @@ def validate(self):

return True

def clear(self):
def clear(self) -> None:
"""Clear all keys from the comment."""

for i in list(self):
Expand Down Expand Up @@ -196,7 +196,7 @@ def _encode(value):
f.write(b"\x01")
return f.getvalue()

def pprint(self):
def pprint(self) -> str:

def _decode(value):
if not isinstance(value, str):
Expand Down
7 changes: 4 additions & 3 deletions mutagen/asf/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# (at your option) any later version.

import struct
from typing import List

from mutagen._util import MutagenError

Expand All @@ -23,7 +24,7 @@ class ASFHeaderError(error):
pass


def guid2bytes(s):
def guid2bytes(s: str) -> bytes:
"""Converts a GUID to the serialized bytes representation"""

assert isinstance(s, str)
Expand All @@ -37,13 +38,13 @@ def guid2bytes(s):
])


def bytes2guid(s):
def bytes2guid(s: bytes) -> str:
"""Converts a serialized GUID to a text GUID"""

assert isinstance(s, bytes)

u = struct.unpack
v = []
v: List[int] = []
v.extend(u("<IHH", s[:8]))
v.extend(u(">HQ", s[8:10] + b"\x00\x00" + s[10:]))
return "%08X-%04X-%04X-%04X-%012X" % tuple(v)
Expand Down
Loading

0 comments on commit fd71e21

Please sign in to comment.