@@ -277,11 +277,11 @@ def predict(
277277 :type transcription_output: `bool`
278278 :return: Probability (if transcription_output is None or False) or transcription (if transcription_output is
279279 True) predictions:
280- - Probability return is a tuple of (probs, sizes), where:
281- - probs is the probability of characters of shape (nb_samples, seq_length, nb_classes).
282- - sizes is the real sequence length of shape (nb_samples,).
283- - Transcription return is a numpy array of characters. A possible example of a transcription return
284- is `np.array(['SIXTY ONE', 'HELLO'])`.
280+ - Probability return is a tuple of (probs, sizes), where `probs` is the probability of characters of
281+ shape (nb_samples, seq_length, nb_classes) and `sizes` is the real sequence length of shape
282+ (nb_samples,).
283+ - Transcription return is a numpy array of characters. A possible example of a transcription return
284+ is `np.array(['SIXTY ONE', 'HELLO'])`.
285285 """
286286 import torch # lgtm [py/repeated-import]
287287
@@ -529,11 +529,11 @@ def transform_model_input(
529529 :param real_lengths: Real lengths of original sequences.
530530 :return: A tuple of inputs and targets in the model space with the original index
531531 `(inputs, targets, input_percentages, target_sizes, batch_idx)`, where:
532- - inputs: model inputs of shape (nb_samples, nb_frequencies, seq_length).
533- - targets: ground truth targets of shape (sum over nb_samples of real seq_lengths).
534- - input_percentages: percentages of real inputs in inputs.
535- - target_sizes: list of real seq_lengths.
536- - batch_idx: original index of inputs.
532+ - inputs: model inputs of shape (nb_samples, nb_frequencies, seq_length).
533+ - targets: ground truth targets of shape (sum over nb_samples of real seq_lengths).
534+ - input_percentages: percentages of real inputs in inputs.
535+ - target_sizes: list of real seq_lengths.
536+ - batch_idx: original index of inputs.
537537 """
538538 import torch # lgtm [py/repeated-import]
539539 import torchaudio
0 commit comments