Skip to content

Commit

Permalink
Add cpu_threads support (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
goldyfruit authored May 30, 2023
1 parent 4251e8e commit 15e4d3d
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 4 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ To use Whisper as STT
"model": "large-v2",
"use_cuda": true,
"compute_type": "float16",
"beam_size": 5
"beam_size": 5,
"cpu_threads": 4
}
}

```

To use Whisper for lang detection (ovos-dinkum-listener only)
Expand All @@ -41,7 +41,6 @@ To use Whisper for lang detection (ovos-dinkum-listener only)
}
}
}

```

## Models
Expand Down
4 changes: 3 additions & 1 deletion ovos_stt_plugin_fasterwhisper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(self, config=None):
self.compute_type = self.config.get("compute_type", "int8")
self.use_cuda = self.config.get("use_cuda", False)
self.beam_size = self.config.get("beam_size", 5)
self.cpu_threads = self.config.get("cpu_threads", 4)

if self.use_cuda:
device = "cuda"
Expand Down Expand Up @@ -187,12 +188,13 @@ def __init__(self, *args, **kwargs):
self.beam_size = self.config.get("beam_size", 5)
self.compute_type = self.config.get("compute_type", "int8")
self.use_cuda = self.config.get("use_cuda", False)
self.cpu_threads = self.config.get("cpu_threads", 4)

if self.use_cuda:
device = "cuda"
else:
device = "cpu"
self.engine = WhisperModel(model, device=device, compute_type=self.compute_type)
self.engine = WhisperModel(model, device=device, compute_type=self.compute_type, cpu_threads=self.cpu_threads)

@staticmethod
def audiodata2array(audio_data):
Expand Down

0 comments on commit 15e4d3d

Please sign in to comment.