Optimize the inspection of the dataset
#1
by
albertvillanova
HF Staff
- opened
- gqa-lxmert.py +32 -21
gqa-lxmert.py
CHANGED
|
@@ -88,20 +88,47 @@ class GqaLxmert(datasets.GeneratorBasedBuilder):
|
|
| 88 |
def _split_generators(self, dl_manager):
|
| 89 |
"""Returns SplitGenerators."""
|
| 90 |
dl_dir = dl_manager.download_and_extract(_URLS)
|
| 91 |
-
|
| 92 |
-
self.id2features = self._load_features(os.path.join(dl_dir["feat"], _FEAT_PATH))
|
| 93 |
-
|
| 94 |
return [
|
| 95 |
datasets.SplitGenerator(
|
| 96 |
name=datasets.Split.TRAIN,
|
| 97 |
-
gen_kwargs={
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
),
|
| 99 |
datasets.SplitGenerator(
|
| 100 |
name=datasets.Split.VALIDATION,
|
| 101 |
-
gen_kwargs={
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
),
|
| 103 |
]
|
| 104 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 105 |
def _load_features(self, filepath):
|
| 106 |
"""Returns a dictionary mapping an image id to the corresponding image's objects features."""
|
| 107 |
id2features = {}
|
|
@@ -126,19 +153,3 @@ class GqaLxmert(datasets.GeneratorBasedBuilder):
|
|
| 126 |
normalized_boxes[:, (0, 2)] /= img_w
|
| 127 |
normalized_boxes[:, (1, 3)] /= img_h
|
| 128 |
return normalized_boxes
|
| 129 |
-
|
| 130 |
-
def _generate_examples(self, filepath):
|
| 131 |
-
""" Yields examples as (key, example) tuples."""
|
| 132 |
-
with open(filepath, encoding="utf-8") as f:
|
| 133 |
-
gqa = json.load(f)
|
| 134 |
-
for id_, d in enumerate(gqa):
|
| 135 |
-
img_features = self.id2features[d["img_id"]]
|
| 136 |
-
label = self.ans2label[next(iter(d["label"]))]
|
| 137 |
-
yield id_, {
|
| 138 |
-
"question": d["sent"],
|
| 139 |
-
"question_id": d["question_id"],
|
| 140 |
-
"image_id": d["img_id"],
|
| 141 |
-
"features": img_features["features"],
|
| 142 |
-
"normalized_boxes": img_features["normalized_boxes"],
|
| 143 |
-
"label": label,
|
| 144 |
-
}
|
|
|
|
| 88 |
def _split_generators(self, dl_manager):
|
| 89 |
"""Returns SplitGenerators."""
|
| 90 |
dl_dir = dl_manager.download_and_extract(_URLS)
|
| 91 |
+
features_path = os.path.join(dl_dir["feat"], _FEAT_PATH)
|
|
|
|
|
|
|
| 92 |
return [
|
| 93 |
datasets.SplitGenerator(
|
| 94 |
name=datasets.Split.TRAIN,
|
| 95 |
+
gen_kwargs={
|
| 96 |
+
"filepath": dl_dir["train"],
|
| 97 |
+
"ans2label_path": dl_dir["ans2label"],
|
| 98 |
+
"features_path": features_path,
|
| 99 |
+
},
|
| 100 |
),
|
| 101 |
datasets.SplitGenerator(
|
| 102 |
name=datasets.Split.VALIDATION,
|
| 103 |
+
gen_kwargs={
|
| 104 |
+
"filepath": dl_dir["dev"],
|
| 105 |
+
"ans2label_path": dl_dir["ans2label"],
|
| 106 |
+
"features_path": features_path,
|
| 107 |
+
},
|
| 108 |
),
|
| 109 |
]
|
| 110 |
|
| 111 |
+
def _generate_examples(self, filepath, ans2label_path, features_path):
|
| 112 |
+
""" Yields examples as (key, example) tuples."""
|
| 113 |
+
if not hasattr(self, "ans2label"):
|
| 114 |
+
with open(ans2label_path, encoding="utf-8") as f:
|
| 115 |
+
self.ans2label = json.load(f)
|
| 116 |
+
if not hasattr(self, "id2features"):
|
| 117 |
+
self.id2features = self._load_features(features_path)
|
| 118 |
+
with open(filepath, encoding="utf-8") as f:
|
| 119 |
+
gqa = json.load(f)
|
| 120 |
+
for id_, d in enumerate(gqa):
|
| 121 |
+
img_features = self.id2features[d["img_id"]]
|
| 122 |
+
label = self.ans2label[next(iter(d["label"]))]
|
| 123 |
+
yield id_, {
|
| 124 |
+
"question": d["sent"],
|
| 125 |
+
"question_id": d["question_id"],
|
| 126 |
+
"image_id": d["img_id"],
|
| 127 |
+
"features": img_features["features"],
|
| 128 |
+
"normalized_boxes": img_features["normalized_boxes"],
|
| 129 |
+
"label": label,
|
| 130 |
+
}
|
| 131 |
+
|
| 132 |
def _load_features(self, filepath):
|
| 133 |
"""Returns a dictionary mapping an image id to the corresponding image's objects features."""
|
| 134 |
id2features = {}
|
|
|
|
| 153 |
normalized_boxes[:, (0, 2)] /= img_w
|
| 154 |
normalized_boxes[:, (1, 3)] /= img_h
|
| 155 |
return normalized_boxes
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|