fix ja bert path
This commit is contained in:
@@ -3,13 +3,9 @@ from transformers import AutoTokenizer, AutoModelForMaskedLM
|
||||
import sys
|
||||
|
||||
|
||||
|
||||
# model = None
|
||||
# model_id = 'cl-tohoku/bert-base-japanese-v3'
|
||||
# tokenizer = AutoTokenizer.from_pretrained(model_id)
|
||||
models = {}
|
||||
tokenizers = {}
|
||||
def get_bert_feature(text, word2ph, device=None, model_id='cl-tohoku/bert-base-japanese-v3'):
|
||||
def get_bert_feature(text, word2ph, device=None, model_id='tohoku-nlp/bert-base-japanese-v3'):
|
||||
global model
|
||||
global tokenizer
|
||||
|
||||
|
||||
Reference in New Issue
Block a user