fix: Tokenizer dependency (#30)
This commit is contained in:
@@ -135,14 +135,14 @@ class LLMEvaluator {
|
|||||||
|
|
||||||
enum LoadState {
|
enum LoadState {
|
||||||
case idle
|
case idle
|
||||||
case loaded(LLMModel, LLM.Tokenizer)
|
case loaded(LLMModel, Tokenizers.Tokenizer)
|
||||||
}
|
}
|
||||||
|
|
||||||
var loadState = LoadState.idle
|
var loadState = LoadState.idle
|
||||||
|
|
||||||
/// load and return the model -- can be called multiple times, subsequent calls will
|
/// load and return the model -- can be called multiple times, subsequent calls will
|
||||||
/// just return the loaded model
|
/// just return the loaded model
|
||||||
func load() async throws -> (LLMModel, LLM.Tokenizer) {
|
func load() async throws -> (LLMModel, Tokenizers.Tokenizer) {
|
||||||
switch loadState {
|
switch loadState {
|
||||||
case .idle:
|
case .idle:
|
||||||
// limit the buffer cache
|
// limit the buffer cache
|
||||||
|
|||||||
Reference in New Issue
Block a user