Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_g2p(self):
for word, ipa in G2P_TESTS:
sq_ipa = sequitur_gen_ipa (MODELFN, word)
self.assertEqual (sq_ipa, ipa)
tts.engine = 'mary'
tts.voice = 'bits3'
lex_token = token
lex_base = token.split('_')[0]
if lex_token in lex:
lex_entry = lex[lex_token]
else:
if DEFAULT_MARY:
ipas = tts.gen_ipa (lex_base)
else:
ipas = sequitur_gen_ipa (SEQUITUR_MODEL, lex_base)
lex_entry = {'ipa': ipas}
lex[lex_token] = lex_entry
ipas = lex_entry['ipa']
try:
tts.say_ipa(ipas)
except:
logging.error('EXCEPTION CAUGHT %s' % traceback.format_exc())
def lex_gen_ipa (lex_base, locale, engine, voice, speak=False):
global tts
ipas = u''
try:
if engine == 'sequitur':
ipas = sequitur_gen_ipa (SEQUITUR_MODEL, lex_base)
else:
tts.locale = locale
tts.engine = engine
tts.voice = voice
ipas = tts.gen_ipa (lex_base)
if speak:
tts.locale = 'de'
tts.engine = 'mary'
tts.voice = 'dfki-pavoque-neutral-hsmm'
tts.say_ipa(ipas, async=True)
except:
logging.error('EXCEPTION CAUGHT %s' % traceback.format_exc())
# generate fr-mary
elif c == 'k':
tts.locale ='fr'
tts.engine ='mary'
tts.voice ='upmc-pierre-hsmm'
ipas = tts.gen_ipa (lex_base)
tts.say_ipa(ipas)
lex_entry['ipa'] = ipas
# generate de-sequitur
elif c == 'j':
ipas = sequitur_gen_ipa (SEQUITUR_MODEL, lex_base)
tts.locale ='de'
tts.engine ='mary'
tts.voice ='bits3'
tts.say_ipa(ipas)
lex_entry['ipa'] = ipas
# speak de mary unitsel
elif c == 'p':
if len(lex_entry['ipa']) == 0:
continue
ipas = lex_entry['ipa']
tts.locale = 'de'
tts.engine = 'mary'
def lex_gen_ipa (locale, engine, voice, speak=False):
global tts
if engine == 'sequitur':
ipas = sequitur_gen_ipa (SEQUITUR_MODEL, lex_base)
else:
tts.locale = locale
tts.engine = engine
tts.voice = voice
ipas = tts.gen_ipa (lex_base)
if speak:
say_ipa ('de', 'mary', 'dfki-pavoque-neutral-hsmm', ipas)
return ipas
if ts['quality'] > 0:
continue
for word in tokenize(ts['prompt']):
if word in lex:
continue
if word in missing:
missing[word] += 1
else:
missing[word] = 1
cnt = 0
for item in reversed(sorted(missing.items(), key=lambda x: x[1])):
lex_base = item[0]
ipas = sequitur_gen_ipa(sequitur_model_path, lex_base)
logging.info(u"%5d/%5d Adding missing word : %s [ %s ]" % (
cnt, len(missing), item[0], ipas))
lex_entry = {'ipa': ipas}
lex[lex_base] = lex_entry
cnt += 1
return lex
with codecs.open(options.output_file, 'a', 'utf8') as outf:
outf.write(u'%s\n' % item[0])
if options.generate:
if wiktionary:
if not item[0] in wiktionary:
logging.info(u"%4d/%4d not generating phonemes for entry %s because it is not covered by wiktionary" % (cnt, options.num_words, item[0]))
continue
if options.max_occurences:
if item[1] > options.max_occurences:
logging.info(u"%4d/%4d not generating phonemes for entry %s because it is too common" % (cnt, options.num_words, item[0]))
continue
ipas = sequitur_gen_ipa (sequitur_model, item[0])
logging.info(u"%4d/%4d generated lex entry: %s -> %s" % (cnt, options.num_words, item[0], ipas))
lex[item[0]] = {'ipa': ipas}
logging.info("%d missing words total. %d submissions lack at least one word, %d are covered fully by the lexicon." % (len(missing), num_ts_lacking, num_ts_complete))
if options.generate:
logging.info('saving lexicon...')
lex.save()
logging.info('saving lexicon...done.')