Spaces:
Running
Running
Commit
•
b5dab78
1
Parent(s):
aa3b8d4
Update processing_whisper.py
Browse files- processing_whisper.py +4 -4
processing_whisper.py
CHANGED
@@ -74,13 +74,13 @@ class WhisperPrePostProcessor(WhisperProcessor):
|
|
74 |
ratio = 1
|
75 |
|
76 |
if not isinstance(inputs, np.ndarray):
|
77 |
-
raise ValueError(f"We expect a numpy ndarray as input, got `{type(inputs)}
|
78 |
if len(inputs.shape) != 1:
|
79 |
-
raise ValueError("We expect a single channel audio input for
|
80 |
|
81 |
if stride is not None:
|
82 |
if stride[0] + stride[1] > inputs.shape[0]:
|
83 |
-
raise ValueError("Stride is too large for input")
|
84 |
|
85 |
# Stride needs to get the chunk length here, it's going to get
|
86 |
# swallowed by the `feature_extractor` later, and then batching
|
@@ -100,7 +100,7 @@ class WhisperPrePostProcessor(WhisperProcessor):
|
|
100 |
stride_right = round(stride_length_s[1] * self.feature_extractor.sampling_rate)
|
101 |
|
102 |
if chunk_len < stride_left + stride_right:
|
103 |
-
raise ValueError("Chunk length must be superior to stride length")
|
104 |
|
105 |
for item in self.chunk_iter_with_batch(
|
106 |
inputs,
|
|
|
74 |
ratio = 1
|
75 |
|
76 |
if not isinstance(inputs, np.ndarray):
|
77 |
+
raise ValueError(f"We expect a numpy ndarray as input, got `{type(inputs)}`.")
|
78 |
if len(inputs.shape) != 1:
|
79 |
+
raise ValueError(f"We expect a single channel audio input for the Flax Whisper API, got {len(inputs.shape)} channels.")
|
80 |
|
81 |
if stride is not None:
|
82 |
if stride[0] + stride[1] > inputs.shape[0]:
|
83 |
+
raise ValueError("Stride is too large for input.")
|
84 |
|
85 |
# Stride needs to get the chunk length here, it's going to get
|
86 |
# swallowed by the `feature_extractor` later, and then batching
|
|
|
100 |
stride_right = round(stride_length_s[1] * self.feature_extractor.sampling_rate)
|
101 |
|
102 |
if chunk_len < stride_left + stride_right:
|
103 |
+
raise ValueError("Chunk length must be superior to stride length.")
|
104 |
|
105 |
for item in self.chunk_iter_with_batch(
|
106 |
inputs,
|