| | |
| | | symbol_table = read_symbol_table(args.token_list) |
| | | if hasattr(args, "seg_dict_file") and args.seg_dict_file is not None: |
| | | seg_dict = load_seg_dict(args.seg_dict_file) |
| | | if hasattr(args, "punc_dict_file") and args.punc_dict_file is not None: |
| | | punc_dict = read_symbol_table(args.punc_dict_file) |
| | | if hasattr(args, "punc_list") and args.punc_list is not None: |
| | | punc_dict = read_symbol_table(args.punc_list) |
| | | if hasattr(args, "bpemodel") and args.bpemodel is not None: |
| | | bpe_tokenizer = SentencepiecesTokenizer(args.bpemodel) |
| | | self.dataset_conf = args.dataset_conf |
| | | self.frontend_conf = args.frontend_conf |
| | | if "frontend_conf" not in args: |
| | | self.frontend_conf = None |
| | | else: |
| | | self.frontend_conf = args.frontend_conf |
| | | self.speed_perturb = args.speed_perturb if hasattr(args, "speed_perturb") else None |
| | | logging.info("dataloader config: {}".format(self.dataset_conf)) |
| | | batch_mode = self.dataset_conf.get("batch_mode", "padding") |