harpreetsahota commited on
Commit
93e7e1d
·
verified ·
1 Parent(s): 0ac4641

Upload parkseg-to-fo.py

Browse files
Files changed (1) hide show
  1. parkseg-to-fo.py +107 -0
parkseg-to-fo.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Simple script to create a FiftyOne dataset from the parkseg12k dataset with NDVI calculation.
3
+ """
4
+
5
+ # Import necessary libraries
6
+ import fiftyone as fo
7
+ import os
8
+ import numpy as np
9
+ from PIL import Image
10
+ from datasets import load_dataset
11
+
12
+ def main():
13
+ # Create a new FiftyOne dataset
14
+ name = "parkseg12k_train"
15
+ dataset = fo.Dataset(name, overwrite=True, persistent=True)
16
+
17
+ # Load the Hugging Face dataset
18
+ try:
19
+ hf_dataset = load_dataset("file://" + os.path.join(os.getcwd(), "parkseg12k_dataset"))
20
+ print("Loaded dataset from local storage")
21
+ except:
22
+ print("Loading from HuggingFace")
23
+ hf_dataset = load_dataset("UTEL-UIUC/parkseg12k")
24
+
25
+ # Create directories for storing images
26
+ images_dir = os.path.join(os.getcwd(), "parkseg12k_images_train")
27
+ ndvi_dir = os.path.join(os.getcwd(), "parkseg12k_ndvi")
28
+ os.makedirs(images_dir, exist_ok=True)
29
+ os.makedirs(ndvi_dir, exist_ok=True)
30
+
31
+ # Only process the train split
32
+ split = "train"
33
+ print(f"Processing {split} split...")
34
+ samples = []
35
+
36
+ # Process each sample
37
+ for i, sample in enumerate(hf_dataset[split]):
38
+ if i % 100 == 0:
39
+ print(f"Processing sample {i}/{len(hf_dataset[split])}")
40
+
41
+ # Create paths for saving images
42
+ rgb_path = os.path.join(images_dir, f"{i}_rgb.png")
43
+ mask_path = os.path.join(images_dir, f"{i}_mask.png")
44
+ nir_path = os.path.join(images_dir, f"{i}_nir.png")
45
+ ndvi_path = os.path.join(ndvi_dir, f"{i}_ndvi.npy")
46
+
47
+ # Save images to disk
48
+ sample['rgb'].save(rgb_path)
49
+ sample['mask'].save(mask_path)
50
+ sample['nir'].save(nir_path)
51
+
52
+ # Calculate NDVI
53
+ rgb_array = np.array(sample['rgb'])
54
+ nir_array = np.array(sample['nir'])
55
+
56
+ # Extract red channel and normalize
57
+ red = rgb_array[:, :, 0].astype(np.float32) / 255.0
58
+ nir = nir_array.astype(np.float32) / 255.0
59
+
60
+ # Calculate NDVI: (NIR - Red) / (NIR + Red)
61
+ numerator = nir - red
62
+ denominator = nir + red
63
+
64
+ # Avoid division by zero
65
+ ndvi = np.where(denominator != 0,
66
+ numerator / denominator,
67
+ 0)
68
+
69
+ # Clip to valid NDVI range
70
+ ndvi = np.clip(ndvi, -1, 1)
71
+
72
+ # Save NDVI array
73
+ np.save(ndvi_path, ndvi)
74
+
75
+ # Create FiftyOne sample
76
+ fo_sample = fo.Sample(filepath=rgb_path)
77
+
78
+ # Add mask as Segmentation using mask_path
79
+ fo_sample["segmentation"] = fo.Segmentation(mask_path=mask_path)
80
+
81
+ # Add NIR as Heatmap using map_path with range [0,1]
82
+ fo_sample["nir"] = fo.Heatmap(map_path=nir_path, range=[0, 1])
83
+
84
+ # Add NDVI as Heatmap with the array directly
85
+ fo_sample["ndvi"] = fo.Heatmap(map=ndvi, range=[-1, 1])
86
+
87
+ # Optional: Add NDVI statistics as metadata
88
+ fo_sample["ndvi_mean"] = float(np.mean(ndvi))
89
+ fo_sample["ndvi_std"] = float(np.std(ndvi))
90
+ fo_sample["ndvi_min"] = float(np.min(ndvi))
91
+ fo_sample["ndvi_max"] = float(np.max(ndvi))
92
+
93
+ # Add to samples list
94
+ samples.append(fo_sample)
95
+
96
+ # Add all samples at once
97
+ print(f"Adding {len(samples)} samples to dataset...")
98
+ dataset.add_samples(samples)
99
+
100
+ # Compute metadata and add dynamic fields
101
+ dataset.compute_metadata()
102
+ dataset.add_dynamic_sample_fields()
103
+
104
+ return dataset
105
+
106
+ if __name__ == "__main__":
107
+ dataset = main()