decoders.py ``` # Copyright (c) 2019, Myrtle Software Limited. All rights reserved. # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
import torch import torch.nn.functional as F from model_rnnt import label_collate class TransducerDecoder: """Decoder base class. Args: alphabet: An Alphabet object. blank_symbol: The symbol in `alphabet` to use as the blank during CTC decoding. model: Model to use for prediction. """ def __init__(self, blank_index, model): self._model = model self._SOS = -1 # start of sequence self._blank_id = blank_index def _pred_step(self, label, hidden, device): if label == self._SOS: return self._model.predict(None, hidden, add_sos=False) # return self._model.prediction(None, hidden, add_sos=False) if label > self._blank_id: label -= 1 label = label_collate([[label]]).to(device) return self._model.predict(label, hidden, add_sos=False) # return self._model.prediction(label, hidden, add_sos=False) def _joint_step(self, enc, pred, log_normalize=False): logits = self._model.joint(enc, pred)[:, 0, 0, :] if not log_normalize: return logits probs = F.log_softmax(logits, dim=len(logits.shape) - 1) return probs def _get_last_symb(self, labels): return self._SOS if labels == [] else labels[-1] ``` --- [Visit Topic](https://discuss.tvm.apache.org/t/import-rnn-t-pytorch-model-into-tvm/7874/8) to respond. You are receiving this because you enabled mailing list mode. To unsubscribe from these emails, [click here](https://discuss.tvm.apache.org/email/unsubscribe/3ecbf060fc819c913eb41dfec0ca50cf8ba926dfb61042a67c3c8a45edbd93e9).