Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _parse_tag(self, fh):
self.__tag_parsed = True
guid = fh.read(16) # 128 bit GUID
if guid != b'0&\xb2u\x8ef\xcf\x11\xa6\xd9\x00\xaa\x00b\xcel':
return # not a valid ASF container! see: http://www.garykessler.net/library/file_sigs.html
struct.unpack('Q', fh.read(8))[0] # size
struct.unpack('I', fh.read(4))[0] # obj_count
if fh.read(2) != b'\x01\x02':
# http://web.archive.org/web/20131203084402/http://msdn.microsoft.com/en-us/library/bb643323.aspx#_Toc521913958
return # not a valid asf header!
while True:
object_id = fh.read(16)
object_size = _bytes_to_int_le(fh.read(8))
if object_size == 0 or object_size > self.filesize:
break # invalid object, stop parsing.
if object_id == Wma.ASF_CONTENT_DESCRIPTION_OBJECT:
len_blocks = self.read_blocks(fh, [
('title_length', 2, True),
('author_length', 2, True),
('copyright_length', 2, True),
('description_length', 2, True),
('rating_length', 2, True),
])
data_blocks = self.read_blocks(fh, [
('title', len_blocks['title_length'], False),
('artist', len_blocks['author_length'], False),
('', len_blocks['copyright_length'], True),
('comment', len_blocks['description_length'], False),
('', len_blocks['rating_length'], True),
])
for field_name, bytestring in data_blocks.items():
if field_name:
('author_length', 2, True),
('copyright_length', 2, True),
('description_length', 2, True),
('rating_length', 2, True),
])
data_blocks = self.read_blocks(fh, [
('title', len_blocks['title_length'], False),
('artist', len_blocks['author_length'], False),
('', len_blocks['copyright_length'], True),
('comment', len_blocks['description_length'], False),
('', len_blocks['rating_length'], True),
])
for field_name, bytestring in data_blocks.items():
if field_name:
self._set_field(field_name, bytestring, self.__decode_string)
elif object_id == Wma.ASF_EXTENDED_CONTENT_DESCRIPTION_OBJECT:
mapping = {
'WM/TrackNumber': 'track',
'WM/PartOfSet': 'disc',
'WM/Year': 'year',
'WM/AlbumArtist': 'albumartist',
'WM/Genre': 'genre',
'WM/AlbumTitle': 'album',
'WM/Composer': 'composer',
}
# see: http://web.archive.org/web/20131203084402/http://msdn.microsoft.com/en-us/library/bb643323.aspx#_Toc509555195
descriptor_count = _bytes_to_int_le(fh.read(2))
for _ in range(descriptor_count):
name_len = _bytes_to_int_le(fh.read(2))
name = self.__decode_string(fh.read(name_len))
value_type = _bytes_to_int_le(fh.read(2))
value_len = _bytes_to_int_le(fh.read(2))
'WM/AlbumTitle': 'album',
'WM/Composer': 'composer',
}
# see: http://web.archive.org/web/20131203084402/http://msdn.microsoft.com/en-us/library/bb643323.aspx#_Toc509555195
descriptor_count = _bytes_to_int_le(fh.read(2))
for _ in range(descriptor_count):
name_len = _bytes_to_int_le(fh.read(2))
name = self.__decode_string(fh.read(name_len))
value_type = _bytes_to_int_le(fh.read(2))
value_len = _bytes_to_int_le(fh.read(2))
value = fh.read(value_len)
field_name = mapping.get(name)
if field_name:
field_value = self.__decode_ext_desc(value_type, value)
self._set_field(field_name, field_value)
elif object_id == Wma.ASF_FILE_PROPERTY_OBJECT:
blocks = self.read_blocks(fh, [
('file_id', 16, False),
('file_size', 8, False),
('creation_date', 8, True),
('data_packets_count', 8, True),
('play_duration', 8, True),
('send_duration', 8, True),
('preroll', 8, True),
('flags', 4, False),
('minimum_data_packet_size', 4, True),
('maximum_data_packet_size', 4, True),
('maximum_bitrate', 4, False),
])
self.duration = blocks.get('play_duration') / float(10000000)
elif object_id == Wma.ASF_STREAM_PROPERTIES_OBJECT:
blocks = self.read_blocks(fh, [
('maximum_data_packet_size', 4, True),
('maximum_bitrate', 4, False),
])
self.duration = blocks.get('play_duration') / float(10000000)
elif object_id == Wma.ASF_STREAM_PROPERTIES_OBJECT:
blocks = self.read_blocks(fh, [
('stream_type', 16, False),
('error_correction_type', 16, False),
('time_offset', 8, True),
('type_specific_data_length', 4, True),
('error_correction_data_length', 4, True),
('flags', 2, True),
('reserved', 4, False)
])
already_read = 0
if blocks['stream_type'] == Wma.STREAM_TYPE_ASF_AUDIO_MEDIA:
stream_info = self.read_blocks(fh, [
('codec_id_format_tag', 2, True),
('number_of_channels', 2, True),
('samples_per_second', 4, True),
('avg_bytes_per_second', 4, True),
('block_alignment', 2, True),
('bits_per_sample', 2, True),
])
self.samplerate = stream_info['samples_per_second']
self.bitrate = stream_info['avg_bytes_per_second'] * 8 / float(1000)
already_read = 16
fh.seek(blocks['type_specific_data_length'] - already_read, os.SEEK_CUR)
fh.seek(blocks['error_correction_data_length'], os.SEEK_CUR)
else:
fh.seek(object_size - 24, os.SEEK_CUR) # read over onknown object ids
elif object_id == Wma.ASF_FILE_PROPERTY_OBJECT:
blocks = self.read_blocks(fh, [
('file_id', 16, False),
('file_size', 8, False),
('creation_date', 8, True),
('data_packets_count', 8, True),
('play_duration', 8, True),
('send_duration', 8, True),
('preroll', 8, True),
('flags', 4, False),
('minimum_data_packet_size', 4, True),
('maximum_data_packet_size', 4, True),
('maximum_bitrate', 4, False),
])
self.duration = blocks.get('play_duration') / float(10000000)
elif object_id == Wma.ASF_STREAM_PROPERTIES_OBJECT:
blocks = self.read_blocks(fh, [
('stream_type', 16, False),
('error_correction_type', 16, False),
('time_offset', 8, True),
('type_specific_data_length', 4, True),
('error_correction_data_length', 4, True),
('flags', 2, True),
('reserved', 4, False)
])
already_read = 0
if blocks['stream_type'] == Wma.STREAM_TYPE_ASF_AUDIO_MEDIA:
stream_info = self.read_blocks(fh, [
('codec_id_format_tag', 2, True),
('number_of_channels', 2, True),
('samples_per_second', 4, True),
('avg_bytes_per_second', 4, True),
def _get_parser_for_filename(cls, filename, exception=False):
mapping = {
('.mp3',): ID3,
('.oga', '.ogg', '.opus'): Ogg,
('.wav',): Wave,
('.flac',): Flac,
('.wma',): Wma,
('.m4b', '.m4a', '.mp4'): MP4,
}
for fileextension, tagclass in mapping.items():
if filename.lower().endswith(fileextension):
return tagclass
if exception:
raise TinyTagException('No tag reader found to support filetype! ')