dataset.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. # Copyright (c) Meta Platforms, Inc. and affiliates
  2. # All rights reserved.
  3. #
  4. # This source code is licensed under the license found in the
  5. # LICENSE file in the root directory of this source tree.
  6. import argparse
  7. import dataclasses
  8. import json
  9. import logging
  10. import os
  11. from argparse import Namespace
  12. from pathlib import Path
  13. from stopes.hub import load_config
  14. from stopes.speech.tokenizers import SpeechTokenizer, SpeechTokenizerConfig
  15. from seamless_communication.datasets.hugginface import \
  16. Speech2SpeechFleursDatasetBuilder
  17. logging.basicConfig(
  18. level=logging.INFO,
  19. format="%(asctime)s %(levelname)s -- %(name)s: %(message)s",
  20. )
  21. logger = logging.getLogger("dataset")
  22. # List of FLEURS langcodes is available at https://huggingface.co/datasets/google/fleurs
  23. # List of M4T langcodes is available in yaml: src/seamless_communication/assets/cards/unity_nllb-100.yaml
  24. UNITY_TO_FLEURS_LANG_MAPPING = {
  25. "eng": "en_us",
  26. "ita": "it_it",
  27. "kor": "ko_kr",
  28. }
  29. def download_fleurs_dataset(
  30. source_lang: str,
  31. target_lang: str,
  32. split: str,
  33. unit_extractor_config: str,
  34. save_directory: str,
  35. ) -> str:
  36. tokenizer_conf: SpeechTokenizerConfig = load_config(
  37. unit_extractor_config, namespace=""
  38. )
  39. tokenizer: SpeechTokenizer = SpeechTokenizer.build(tokenizer_conf)
  40. dataset_iterator = Speech2SpeechFleursDatasetBuilder(
  41. source_lang=UNITY_TO_FLEURS_LANG_MAPPING[source_lang],
  42. target_lang=UNITY_TO_FLEURS_LANG_MAPPING[target_lang],
  43. dataset_cache_dir=save_directory,
  44. speech_tokenizer=tokenizer,
  45. skip_source_audio=True, # don't extract units from source audio
  46. skip_target_audio=False,
  47. split=split,
  48. )
  49. manifest_path: str = os.path.join(save_directory, f"{split}_manifest.json")
  50. with open(manifest_path, "w") as fp_out:
  51. for idx, sample in enumerate(dataset_iterator, start=1):
  52. # correction as FleursDatasetBuilder return fleurs lang codes
  53. sample.source.lang = source_lang
  54. sample.target.lang = target_lang
  55. sample.target.waveform = None # already extracted units
  56. fp_out.write(json.dumps(dataclasses.asdict(sample)) + "\n")
  57. logger.info(f"Saved {idx} samples for split={split} to {manifest_path}")
  58. return manifest_path
  59. def init_parser() -> argparse.ArgumentParser:
  60. parser = argparse.ArgumentParser(
  61. description=(
  62. "Helper script to download training/evaluation dataset (FLEURS),"
  63. "extract units from target audio and save the dataset as a manifest "
  64. "consumable by `finetune.py`."
  65. )
  66. )
  67. parser.add_argument(
  68. "--source_lang",
  69. type=str,
  70. required=True,
  71. help="M4T langcode of the dataset SOURCE language",
  72. )
  73. parser.add_argument(
  74. "--target_lang",
  75. type=str,
  76. required=True,
  77. help="M4T langcode of the dataset TARGET language",
  78. )
  79. parser.add_argument(
  80. "--split",
  81. type=str,
  82. required=True,
  83. help="Dataset split/shard to download (`train`, `test`)",
  84. )
  85. parser.add_argument(
  86. "--save_dir",
  87. type=Path,
  88. required=True,
  89. help="Directory where the datastets will be stored with HuggingFace datasets cache files",
  90. )
  91. return parser
  92. def main(args: Namespace) -> None:
  93. manifest_path = download_fleurs_dataset(
  94. source_lang=args.source_lang,
  95. target_lang=args.target_lang,
  96. # TODO: remove hardcoded path
  97. unit_extractor_config="/checkpoint/krs/unit_extraction/xlsr1b/lang41_10k_xlsr_lyr35.yaml",
  98. split=args.split,
  99. save_directory=args.save_dir,
  100. )
  101. logger.info(f"Manifest saved to: {manifest_path}")
  102. if __name__ == "__main__":
  103. args = init_parser().parse_args()
  104. main(args)