fix ja bert path
This commit is contained in:
@@ -566,7 +566,7 @@ def distribute_phone(n_phone, n_word):
|
||||
|
||||
# tokenizer = AutoTokenizer.from_pretrained('cl-tohoku/bert-base-japanese-v3')
|
||||
|
||||
model_id = 'cl-tohoku/bert-base-japanese-v3'
|
||||
model_id = 'tohoku-nlp/bert-base-japanese-v3'
|
||||
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
||||
def g2p(norm_text):
|
||||
|
||||
|
||||
@@ -3,13 +3,9 @@ from transformers import AutoTokenizer, AutoModelForMaskedLM
|
||||
import sys
|
||||
|
||||
|
||||
|
||||
# model = None
|
||||
# model_id = 'cl-tohoku/bert-base-japanese-v3'
|
||||
# tokenizer = AutoTokenizer.from_pretrained(model_id)
|
||||
models = {}
|
||||
tokenizers = {}
|
||||
def get_bert_feature(text, word2ph, device=None, model_id='cl-tohoku/bert-base-japanese-v3'):
|
||||
def get_bert_feature(text, word2ph, device=None, model_id='tohoku-nlp/bert-base-japanese-v3'):
|
||||
global model
|
||||
global tokenizer
|
||||
|
||||
|
||||
Reference in New Issue
Block a user