import json import os from tqdm import tqdm from datasets import Dataset, load_dataset, Image def load_jsonl(file_path): """ Loads a JSONL file and returns a list of Python dictionaries. Each dictionary represents a JSON object from a line in the file. """ data = [] with open(file_path, 'r', encoding='utf-8') as f: for line in f: try: # Parse each line as a JSON object json_object = json.loads(line.strip()) data.append(json_object) except json.JSONDecodeError as e: print(f"Error decoding JSON on line: {line.strip()} - {e}") return data def main(): dset = "val" workdir = "./coco" # Load Annotions from Official Coco with open( os.path.join(workdir, "annotations", f"captions_{dset}2017.json"), "r", encoding="utf-8" ) as reader: data = json.load(reader) # Format dict of image elements images = {} for item in tqdm(data["images"]): _idx = item["id"] images[_idx] = { "file_name": item["file_name"], "height": item["height"], "width": item["width"], "id": _idx, "image": os.path.join(workdir, f"{dset}2017", item["file_name"]), "captions": [], "narratives": [] } # Assign official annotations for item in tqdm(data["annotations"]): _idx = item["image_id"] images[_idx]["captions"].append(item["caption"]) # Load Narratives data_narr = load_jsonl(os.path.join(workdir, "localized_narratives", f"coco_{dset}_captions.jsonl")) for item in tqdm(data_narr): _idx = int(item["image_id"]) images[_idx]["narratives"].append(item["caption"]) def gen(): for k, v in images.items(): yield v ds = Dataset.from_generator(gen) ds = ds.cast_column("image", Image()) ds.save_to_disk(f"coco/datasets/data/{dset}", max_shard_size="400MB") return def test_coco(): ds = load_dataset("./coco/datasets", split="val") print(ds.info) if __name__ == "__main__": # main() test_coco()