mspitzna
Update hf_hub_download calls in physicsgen.py to specify repo_type for parquet file retrieval
a7ac9c1
| import os | |
| import datasets | |
| import pyarrow.parquet as pq | |
| from huggingface_hub import hf_hub_download | |
| # Define configurations for each flavor. | |
| BUILDER_CONFIGS = [ | |
| datasets.BuilderConfig( | |
| name="sound_baseline", | |
| description="Physical dataset: baseline variant", | |
| data_dir="sound_baseline" | |
| ), | |
| datasets.BuilderConfig( | |
| name="sound_reflection", | |
| description="Physical dataset: reflection variant", | |
| data_dir="sound_reflection" | |
| ), | |
| datasets.BuilderConfig( | |
| name="sound_diffraction", | |
| description="Physical dataset: diffraction variant", | |
| data_dir="sound_diffraction" | |
| ), | |
| datasets.BuilderConfig( | |
| name="sound_combined", | |
| description="Physical dataset: combined variant", | |
| data_dir="sound_combined" | |
| ), | |
| datasets.BuilderConfig( | |
| name="lens_p1", | |
| description="Distortion dataset variant", | |
| data_dir="lens_p1" | |
| ), | |
| datasets.BuilderConfig( | |
| name="lens_p2", | |
| description="Distortion dataset variant", | |
| data_dir="lens_p2" | |
| ), | |
| datasets.BuilderConfig( | |
| name="ball_roll", | |
| description="Double image dataset variant", | |
| data_dir="ball_roll" | |
| ), | |
| datasets.BuilderConfig( | |
| name="ball_bounce", | |
| description="Double image dataset variant", | |
| data_dir="ball_bounce" | |
| ), | |
| ] | |
| class MyPhysicalDataset(datasets.GeneratorBasedBuilder): | |
| BUILDER_CONFIGS = BUILDER_CONFIGS | |
| VERSION = datasets.Version("1.1.0") | |
| def _info(self): | |
| if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]: | |
| features = datasets.Features({ | |
| "lat": datasets.Value("float"), | |
| "long": datasets.Value("float"), | |
| "db": datasets.Value("string"), | |
| "soundmap": datasets.Image(), # Expects a dict: {"bytes": ...} | |
| "osm": datasets.Image(), | |
| "temperature": datasets.Value("int32"), | |
| "humidity": datasets.Value("int32"), | |
| "yaw": datasets.Value("float"), | |
| "sample_id": datasets.Value("int32"), | |
| "soundmap_512": datasets.Image(), | |
| }) | |
| elif self.config.name in ["lens_p1", "lens_p2"]: | |
| features = datasets.Features({ | |
| "label_path": datasets.Value("string"), | |
| "fx": datasets.Value("float"), | |
| "k1": datasets.Value("float"), | |
| "k2": datasets.Value("float"), | |
| "k3": datasets.Value("float"), | |
| "p1": datasets.Value("float"), | |
| "p2": datasets.Value("float"), | |
| "cx": datasets.Value("float"), | |
| }) | |
| elif self.config.name in ["ball_roll", "ball_bounce"]: | |
| features = datasets.Features({ | |
| "ImgName": datasets.Value("string"), | |
| "StartHeight": datasets.Value("int32"), | |
| "GroundIncli": datasets.Value("float"), | |
| "InputTime": datasets.Value("int32"), | |
| "TargetTime": datasets.Value("int32"), | |
| "input_image": datasets.Image(), # Expects {"bytes": ...} | |
| "target_image": datasets.Image(), | |
| }) | |
| else: | |
| raise ValueError(f"Unknown config name: {self.config.name}") | |
| return datasets.DatasetInfo( | |
| description="Multiple variant physical tasks dataset stored as parquet files.", | |
| features=features, | |
| ) | |
| def _split_generators(self, dl_manager): | |
| # Use hf_hub_download to fetch the parquet files directly from the Hub. | |
| repo_id = "mspitzna/physicsgen" | |
| train_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/train.parquet", repo_type="dataset") | |
| test_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/test.parquet", repo_type="dataset") | |
| eval_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/eval.parquet", repo_type="dataset") | |
| return [ | |
| datasets.SplitGenerator( | |
| name=datasets.Split.TRAIN, | |
| gen_kwargs={"parquet_file": train_file}, | |
| ), | |
| datasets.SplitGenerator( | |
| name=datasets.Split.TEST, | |
| gen_kwargs={"parquet_file": test_file}, | |
| ), | |
| datasets.SplitGenerator( | |
| name=datasets.Split.VALIDATION, | |
| gen_kwargs={"parquet_file": eval_file}, | |
| ), | |
| ] | |
| def _generate_examples(self, parquet_file): | |
| table = pq.read_table(parquet_file) | |
| examples = table.to_pylist() | |
| # Wrap image bytes into the format expected by datasets.Image. | |
| if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]: | |
| for example in examples: | |
| for key in ["soundmap", "osm", "soundmap_512"]: | |
| if example.get(key) is not None and isinstance(example[key], bytes): | |
| example[key] = {"bytes": example[key]} | |
| elif self.config.name in ["ball_roll", "ball_bounce"]: | |
| for example in examples: | |
| for key in ["input_image", "target_image"]: | |
| if example.get(key) is not None and isinstance(example[key], bytes): | |
| example[key] = {"bytes": example[key]} | |
| for idx, row in enumerate(examples): | |
| yield idx, row | |