Update libritts-aligned.py
Browse files- libritts-aligned.py +8 -3
libritts-aligned.py
CHANGED
|
@@ -24,6 +24,7 @@ _PHONESET = "arpabet"
|
|
| 24 |
_VERBOSE = os.environ.get("LIBRITTS_VERBOSE", True)
|
| 25 |
_MAX_WORKERS = os.environ.get("LIBRITTS_MAX_WORKERS", cpu_count())
|
| 26 |
_MAX_WORKERS = int(_MAX_WORKERS)
|
|
|
|
| 27 |
_MAX_PHONES = os.environ.get("LIBRITTS_MAX_PHONES", 512)
|
| 28 |
_PATH = os.environ.get("LIBRITTS_PATH", os.environ.get("HF_DATASETS_CACHE", None))
|
| 29 |
_DOWNLOAD_SPLITS = os.environ.get(
|
|
@@ -262,9 +263,10 @@ class LibriTTSAlign(datasets.GeneratorBasedBuilder):
|
|
| 262 |
open(os.path.join(_PATH, f"{hashes[i]}-entries.pkl"), "rb")
|
| 263 |
)
|
| 264 |
else:
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
|
|
|
| 268 |
self._create_entry,
|
| 269 |
zip([i] * len(ds), np.arange(len(ds))),
|
| 270 |
chunksize=100,
|
|
@@ -272,6 +274,9 @@ class LibriTTSAlign(datasets.GeneratorBasedBuilder):
|
|
| 272 |
desc=f"processing dataset {hashes[i]}",
|
| 273 |
tqdm_class=tqdm,
|
| 274 |
)
|
|
|
|
|
|
|
|
|
|
| 275 |
if entry is not None
|
| 276 |
]
|
| 277 |
pickle.dump(
|
|
|
|
| 24 |
_VERBOSE = os.environ.get("LIBRITTS_VERBOSE", True)
|
| 25 |
_MAX_WORKERS = os.environ.get("LIBRITTS_MAX_WORKERS", cpu_count())
|
| 26 |
_MAX_WORKERS = int(_MAX_WORKERS)
|
| 27 |
+
_NO_MP = _MAX_WORKERS <= 1
|
| 28 |
_MAX_PHONES = os.environ.get("LIBRITTS_MAX_PHONES", 512)
|
| 29 |
_PATH = os.environ.get("LIBRITTS_PATH", os.environ.get("HF_DATASETS_CACHE", None))
|
| 30 |
_DOWNLOAD_SPLITS = os.environ.get(
|
|
|
|
| 263 |
open(os.path.join(_PATH, f"{hashes[i]}-entries.pkl"), "rb")
|
| 264 |
)
|
| 265 |
else:
|
| 266 |
+
if _NO_MP:
|
| 267 |
+
_entries = [self._create_entry(x) for x in tqdm(zip([i] * len(ds), np.arange(len(ds))), desc=f"processing dataset {hashes[i]}")]
|
| 268 |
+
else:
|
| 269 |
+
_entries = process_map(
|
| 270 |
self._create_entry,
|
| 271 |
zip([i] * len(ds), np.arange(len(ds))),
|
| 272 |
chunksize=100,
|
|
|
|
| 274 |
desc=f"processing dataset {hashes[i]}",
|
| 275 |
tqdm_class=tqdm,
|
| 276 |
)
|
| 277 |
+
add_entries = [
|
| 278 |
+
entry
|
| 279 |
+
for entry in _entries
|
| 280 |
if entry is not None
|
| 281 |
]
|
| 282 |
pickle.dump(
|