Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if dataset_name in CUSTOM_TEST_TRACKS:
trackid = CUSTOM_TEST_TRACKS[dataset_name]
else:
trackid = dataset.track_ids()[0]
track_default = dataset.Track(trackid)
assert track_default._data_home == os.path.join(
DEFAULT_DATA_HOME, dataset.DATASET_DIR
)
# test data home specified
data_home = os.path.join(data_home_dir, dataset.DATASET_DIR)
track_test = dataset.Track(trackid, data_home=data_home)
assert isinstance(track_test, track.Track)
assert hasattr(track_test, 'to_jams')
# Validate JSON schema
jam = track_test.to_jams()
assert jam.validate()
# will fail if something goes wrong with __repr__
print(track_test)
with pytest.raises(ValueError):
dataset.Track('~faketrackid~?!')
track_custom = dataset.Track(trackid, data_home='casa/de/data')
assert track_custom._data_home == 'casa/de/data'
url='https://zenodo.org/record/3371780/files/audio_mono-pickup_mix.zip?download=1',
checksum='aecce79f425a44e2055e46f680e10f6a',
destination_dir='audio_mono-pickup_mix',
)
_STYLE_DICT = {
'Jazz': 'Jazz',
'BN': 'Bossa Nova',
'Rock': 'Rock',
'SS': 'Singer-Songwriter',
'Funk': 'Funk',
}
_GUITAR_STRINGS = ['E', 'A', 'D', 'G', 'B', 'e']
DATA = utils.LargeData('guitarset_index.json')
class Track(track.Track):
"""guitarset Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_hex_cln_path (str): path to the debleeded hex wave file
audio_hex_path (str): path to the original hex wave file
audio_mic_path (str): path to the mono wave via microphone
audio_mix_path (str): path to the mono wave via downmixing hex pickup
jams_path (str): path to the jams file
mode (str): one of ['solo', 'comp']
For each excerpt, players are asked to first play in 'comp' mode
and later play a 'solo' version on top of the already recorded comp.
"Dynamics ID": int(row[10]),
"Instance ID": int(row[11]),
"Resampled": (row[13] == "TRUE"),
}
if len(row[12]) > 0:
metadata_index[key]["String ID"] = int(float(row[12]))
metadata_index["data_home"] = data_home
return metadata_index
DATA = utils.LargeData("tinysol_index.json", _load_metadata)
class Track(track.Track):
"""tinysol Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_path (str): path of the audio file
dynamics (str): dynamics abbreviation. Ex: pp, mf, ff, etc.
dynamics_id (int): pp=0, p=1, mf=2, f=3, ff=4
family (str): instrument family encoded by its English name
instance_id (int): instance ID. Either equal to 0, 1, 2, or 3.
instrument_abbr (str): instrument abbreviation
instrument_full (str): instrument encoded by its English name
is_resampled (bool): True if this sample was pitch-shifted from a neighbor; False if it was genuinely recorded.
metadata_index[str(track_id)] = {
"subset": str(subset),
"instrument": str(instrument_str),
"instrument_id": int(instrument_id),
"song_id": int(song_id),
}
metadata_index["data_home"] = data_home
return metadata_index
DATA = utils.LargeData("medley_solos_db_index.json", _load_metadata)
class Track(track.Track):
"""medley_solos_db Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_path (str): path to the track's audio file
instrument (str): instrument encoded by its English name
instrument_id (int): instrument encoded as an integer
song_id (int): song encoded as an integer
subset (str): either equal to 'train', 'validation', or 'test'
track_id (str): track id
"""
if not os.path.exists(metadata_path):
logging.info('Metadata file {} not found.'.format(metadata_path))
return None
with open(metadata_path, 'r') as fhandle:
metadata = json.load(fhandle)
metadata['data_home'] = data_home
return metadata
DATA = utils.LargeData('medleydb_pitch_index.json', _load_metadata)
class Track(track.Track):
"""medleydb_pitch Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
artist (str): artist
audio_path (str): path to the audio file
genre (str): genre
instrument (str): instrument of the track
pitch_path (str): path to the pitch annotation file
title (str): title
track_id (str): track id