isaaccorley commited on
Commit
8eb8b81
·
verified ·
1 Parent(s): 6e1a90b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -302
README.md CHANGED
@@ -2,305 +2,4 @@
2
  license: mit
3
  ---
4
 
5
- Download the AEF GTI
6
-
7
- ```bash
8
- wget https://data.source.coop/tge-labs/aef/v1/annual/aef_index.parquet
9
- ```
10
-
11
- Convert using the following code:
12
-
13
- ```python
14
- import polars as pl
15
- import shapely
16
- from shapely import wkb
17
- import pyarrow as pa
18
- import pyarrow.parquet as pq
19
- import json
20
-
21
- pl.Config.set_verbose(True)
22
-
23
- path = "aef_index.parquet"
24
- table = pq.read_table(path)
25
-
26
- new_cols = []
27
- for field, col in zip(table.schema, table.columns):
28
- if isinstance(field.type, pa.ExtensionType):
29
- new_cols.append(col.cast(field.type.storage_type))
30
- else:
31
- new_cols.append(col)
32
-
33
- clean_table = pa.Table.from_arrays(
34
- new_cols,
35
- schema=pa.schema([
36
- pa.field(
37
- field.name,
38
- field.type.storage_type if isinstance(field.type, pa.ExtensionType) else field.type
39
- )
40
- for field in table.schema
41
- ])
42
- )
43
-
44
- df = pl.from_arrow(clean_table)
45
-
46
- def make_geom(row: dict) -> bytes:
47
- geom = shapely.box(
48
- row["wgs84_west"],
49
- row["wgs84_south"],
50
- row["wgs84_east"],
51
- row["wgs84_north"],
52
- )
53
- return wkb.dumps(geom, hex=False)
54
-
55
- df = df.with_columns([
56
- pl.struct([
57
- pl.col("wgs84_west").alias("xmin"),
58
- pl.col("wgs84_south").alias("ymin"),
59
- pl.col("wgs84_east").alias("xmax"),
60
- pl.col("wgs84_north").alias("ymax"),
61
- ]).alias("bbox"),
62
- pl.struct(["wgs84_west", "wgs84_south", "wgs84_east", "wgs84_north"])
63
- .map_elements(make_geom, return_dtype=pl.Binary)
64
- .alias("geometry"),
65
- ])
66
-
67
- def make_assets(row: dict) -> str:
68
- assets = {
69
- "data": {
70
- "href": row["path"],
71
- "type": "image/tiff; application=geotiff",
72
- "roles": ["data"]
73
- }
74
- }
75
- return json.dumps(assets)
76
-
77
- df_lazy = df.lazy()
78
-
79
- df_stac_lazy = df_lazy.with_columns([
80
- pl.col("fid").cast(pl.String).alias("id"),
81
- pl.lit("Feature").alias("type"),
82
- pl.col("bbox"),
83
- pl.col("geometry"),
84
- pl.concat_str([
85
- pl.col("year").cast(pl.String),
86
- pl.lit("-01-01T00:00:00Z"),
87
- ]).alias("datetime"),
88
- pl.col("crs").alias("proj:epsg"),
89
- pl.struct(["path"]).map_elements(make_assets, return_dtype=pl.String).alias("assets"),
90
- ])
91
-
92
- df_stac = df_stac_lazy.collect()
93
-
94
- df_stac = df_stac.with_columns([
95
- pl.col("geometry").cast(pl.Binary),
96
- ])
97
-
98
- table = df_stac.to_arrow()
99
-
100
- new_arrays = []
101
- new_fields = []
102
- for i, field in enumerate(table.schema):
103
- col = table.column(i)
104
- if field.name == "bbox":
105
- if not pa.types.is_struct(field.type):
106
- bbox_data = col.to_pylist()
107
- bbox_structs = [
108
- {"xmin": float(row[0]), "ymin": float(row[1]), "xmax": float(row[2]), "ymax": float(row[3])}
109
- if isinstance(row, (list, tuple)) and len(row) == 4
110
- else row
111
- for row in bbox_data
112
- ]
113
- col = pa.array(bbox_structs, type=pa.struct([
114
- pa.field("xmin", pa.float64()),
115
- pa.field("ymin", pa.float64()),
116
- pa.field("xmax", pa.float64()),
117
- pa.field("ymax", pa.float64()),
118
- ]))
119
- new_fields.append(pa.field("bbox", pa.struct([
120
- pa.field("xmin", pa.float64()),
121
- pa.field("ymin", pa.float64()),
122
- pa.field("xmax", pa.float64()),
123
- pa.field("ymax", pa.float64()),
124
- ])))
125
- new_arrays.append(col)
126
- elif field.name == "geometry":
127
- if not pa.types.is_binary(field.type):
128
- col = col.cast(pa.binary())
129
- new_fields.append(pa.field("geometry", pa.binary()))
130
- new_arrays.append(col)
131
- else:
132
- new_fields.append(field)
133
- new_arrays.append(col)
134
-
135
- table = pa.Table.from_arrays(new_arrays, schema=pa.schema(new_fields))
136
-
137
- metadata = {
138
- b"geo": b"""{
139
- "version": "1.1.0",
140
- "primary_column": "geometry",
141
- "columns": {
142
- "geometry": {
143
- "encoding": "WKB",
144
- "geometry_types": ["Polygon"]
145
- }
146
- }
147
- }""",
148
- b"stac_version": b"1.0.0"
149
- }
150
-
151
- pq.write_table(
152
- table.replace_schema_metadata(metadata),
153
- "aef-index-stac-geoparquet.parquet",
154
- compression="zstd"
155
- )
156
-
157
- assert len(df_stac) == len(table)
158
- ```
159
-
160
- Validated using the following code
161
-
162
- ```python
163
- import pyarrow.parquet as pq
164
- import pyarrow as pa
165
- import json
166
- from shapely import wkb
167
- import geopandas as gpd
168
- import numpy as np
169
- from typing import Any
170
-
171
- def validate_stac_geoparquet(filepath: str) -> dict[str, Any]:
172
- results: dict[str, Any] = {}
173
-
174
- try:
175
- table = pq.read_table(filepath)
176
- results["file_readable"] = True
177
- except Exception as e:
178
- results["file_readable"] = False
179
- results["error"] = str(e)
180
- return results
181
-
182
- required_stac_fields = {"id", "type", "geometry", "bbox", "datetime", "assets"}
183
- schema_fields = {field.name for field in table.schema}
184
- missing_fields = required_stac_fields - schema_fields
185
-
186
- results["has_required_fields"] = len(missing_fields) == 0
187
- if missing_fields:
188
- results["missing_fields"] = list(missing_fields)
189
-
190
- metadata = table.schema.metadata
191
- if metadata:
192
- results["has_metadata"] = True
193
-
194
- geo_meta = metadata.get(b"geo")
195
- stac_meta = metadata.get(b"stac_version")
196
-
197
- results["has_geo_metadata"] = geo_meta is not None
198
- results["has_stac_version"] = stac_meta is not None
199
-
200
- if geo_meta:
201
- try:
202
- geo_json = json.loads(geo_meta.decode())
203
- results["geo_version"] = geo_json.get("version")
204
- results["primary_column"] = geo_json.get("primary_column")
205
- results["geometry_encoding"] = geo_json.get("columns", {}).get("geometry", {}).get("encoding")
206
- results["geometry_types"] = geo_json.get("columns", {}).get("geometry", {}).get("geometry_types")
207
- except Exception as e:
208
- results["geo_metadata_parse_error"] = str(e)
209
-
210
- if stac_meta:
211
- results["stac_version"] = stac_meta.decode()
212
- else:
213
- results["has_metadata"] = False
214
-
215
- geometry_field = table.schema.field("geometry")
216
- results["geometry_type_is_binary"] = pa.types.is_binary(geometry_field.type)
217
-
218
- bbox_field = table.schema.field("bbox")
219
- results["bbox_type_is_struct"] = pa.types.is_struct(bbox_field.type)
220
- if pa.types.is_struct(bbox_field.type):
221
- struct_fields = {f.name for f in bbox_field.type}
222
- results["bbox_has_required_fields"] = {"xmin", "ymin", "xmax", "ymax"}.issubset(struct_fields)
223
-
224
- try:
225
- gdf = gpd.read_parquet(filepath)
226
- results["geopandas_readable"] = True
227
- results["row_count"] = len(gdf)
228
-
229
- sample_row = gdf.iloc[0]
230
- results["sample_geometry_valid"] = sample_row.geometry.is_valid
231
- results["sample_geometry_type"] = sample_row.geometry.geom_type
232
-
233
- if "bbox" in gdf.columns:
234
- bbox = sample_row["bbox"]
235
- results["sample_bbox_type"] = str(type(bbox))
236
- results["sample_bbox_value"] = str(bbox)
237
-
238
- is_dict = isinstance(bbox, dict)
239
- results["sample_bbox_is_dict"] = is_dict
240
-
241
- if is_dict:
242
- has_fields = all(k in bbox for k in ["xmin", "ymin", "xmax", "ymax"])
243
- results["sample_bbox_has_fields"] = has_fields
244
- if has_fields:
245
- results["sample_bbox_valid"] = (
246
- float(bbox["xmin"]) < float(bbox["xmax"]) and
247
- float(bbox["ymin"]) < float(bbox["ymax"])
248
- )
249
- else:
250
- results["sample_bbox_valid"] = False
251
- else:
252
- results["sample_bbox_valid"] = False
253
-
254
- sample_bbox_pyarrow = table.column("bbox")[0].as_py()
255
- results["pyarrow_bbox_type"] = str(type(sample_bbox_pyarrow))
256
- results["pyarrow_bbox_value"] = str(sample_bbox_pyarrow)
257
- results["pyarrow_bbox_valid"] = (
258
- isinstance(sample_bbox_pyarrow, dict) and
259
- all(k in sample_bbox_pyarrow for k in ["xmin", "ymin", "xmax", "ymax"]) and
260
- sample_bbox_pyarrow["xmin"] < sample_bbox_pyarrow["xmax"] and
261
- sample_bbox_pyarrow["ymin"] < sample_bbox_pyarrow["ymax"]
262
- )
263
-
264
- if "assets" in gdf.columns:
265
- try:
266
- assets = json.loads(sample_row["assets"])
267
- results["sample_assets_valid_json"] = True
268
- results["sample_assets_is_dict"] = isinstance(assets, dict)
269
- except Exception:
270
- results["sample_assets_valid_json"] = False
271
-
272
- if "datetime" in gdf.columns:
273
- results["sample_datetime"] = str(sample_row["datetime"])
274
-
275
- if "id" in gdf.columns:
276
- results["sample_id"] = str(sample_row["id"])
277
-
278
- except Exception as e:
279
- results["geopandas_readable"] = False
280
- results["geopandas_error"] = str(e)
281
-
282
- all_valid = all(
283
- v is True
284
- for k, v in results.items()
285
- if isinstance(v, bool) and k not in {"sample_geometry_valid", "sample_bbox_valid", "sample_assets_valid_json", "sample_assets_is_dict", "sample_bbox_is_dict", "sample_bbox_has_fields"}
286
- )
287
- results["overall_valid"] = all_valid
288
-
289
- return results
290
-
291
- validation_results = validate_stac_geoparquet("aef-index-stac-geoparquet.parquet")
292
-
293
- print("STAC GeoParquet Validation Results:")
294
- print("=" * 50)
295
- for key, value in sorted(validation_results.items()):
296
- if isinstance(value, bool):
297
- status = "✓" if value else "✗"
298
- print(f"{status} {key}: {value}")
299
- else:
300
- print(f" {key}: {value}")
301
-
302
- if validation_results.get("overall_valid"):
303
- print("\n✓ File is valid STAC GeoParquet!")
304
- else:
305
- print("\n✗ File has validation issues. See details above.")
306
- ```
 
2
  license: mit
3
  ---
4
 
5
+ Code for the conversion is available here https://gist.github.com/isaaccorley/3cf87bfe9a8f41fff429aafb45decdab