Skip to content

Commit 0c380be

Browse files
authored
Merge pull request #51 from Project-AgML/dev
Version 0.5.1
2 parents f09cabd + 8213539 commit 0c380be

16 files changed

+152
-12
lines changed

.github/workflows/update-datasets.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ on:
77
push:
88
paths:
99
- agml/_assets/public_datasources.json
10+
branches:
11+
- dev
1012

1113
permissions: write-all
1214

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,7 @@ You're now ready to use AgML for training your own models!
137137
[riseholme_strawberry_classification_2021](https://github.com/Project-AgML/AgML/blob/main/docs/datasets/riseholme_strawberry_classification_2021.md) | Image Classification | 3520 |
138138
[ghai_broccoli_detection](https://github.com/Project-AgML/AgML/blob/main/docs/datasets/ghai_broccoli_detection.md) | Object Detection | 500 |
139139
[bean_synthetic_earlygrowth_aerial](https://github.com/Project-AgML/AgML/blob/main/docs/datasets/bean_synthetic_earlygrowth_aerial.md) | Semantic Segmentation | 2500 |
140+
[ghai_strawberry_fruit_detection](https://github.com/Project-AgML/AgML/blob/main/docs/datasets/ghai_strawberry_fruit_detection.md) | Object Detection | 500 |
140141

141142
## Usage Information
142143

@@ -167,4 +168,4 @@ a bug or feature that you would like to see implemented, please don't hesitate t
167168
See the [contributing guidelines](/CONTRIBUTING.md) for more information.
168169

169170
## Funding
170-
This project is partly funded by the [National AI Institute for Food Systems (AIFS)](https://aifs.ucdavis.edu
171+
This project is partly funded by the [National AI Institute for Food Systems (AIFS)](https://aifs.ucdavis.ed

agml/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = '0.5.0'
15+
__version__ = '0.5.1'
1616
__all__ = ['data', 'synthetic', 'backend', 'viz', 'io']
1717

1818

agml/_assets/public_datasources.json

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1230,5 +1230,45 @@
12301230
0.08992248773574829
12311231
]
12321232
}
1233+
},
1234+
"ghai_strawberry_fruit_detection": {
1235+
"classes": {
1236+
"1": "Bud",
1237+
"2": "Calyx",
1238+
"3": "Detached Fruit",
1239+
"4": "Flower",
1240+
"5": "Large green",
1241+
"6": "Leaf",
1242+
"7": "Ripe fruit",
1243+
"8": "Small Green",
1244+
"9": "Stem",
1245+
"10": "Unripe fruit"
1246+
},
1247+
"ml_task": "object_detection",
1248+
"ag_task": "crop_detection",
1249+
"location": {
1250+
"continent": "north_america",
1251+
"country": "usa"
1252+
},
1253+
"sensor_modality": "rgb",
1254+
"real_synthetic": "real",
1255+
"platform": "handheld/ground",
1256+
"input_data_format": "jpg",
1257+
"annotation_format": "coco_json",
1258+
"n_images": "500",
1259+
"docs_url": "https://github.com/AxisAg/GHAIDatasets/blob/main/datasets/strawberry.md",
1260+
"external_image_sources": [],
1261+
"stats": {
1262+
"mean": [
1263+
0.49159616231918335,
1264+
0.5238277316093445,
1265+
0.4485996663570404
1266+
],
1267+
"std": [
1268+
0.18163496255874634,
1269+
0.16137710213661194,
1270+
0.18042609095573425
1271+
]
1272+
}
12331273
}
12341274
}

agml/_assets/shape_info.pickle

167 Bytes
Binary file not shown.

agml/_assets/source_citations.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,5 +138,9 @@
138138
"bean_synthetic_earlygrowth_aerial": {
139139
"license": "MIT",
140140
"citation": "@ARTICLE{10.3389/fpls.2019.01185,\n \nAUTHOR={Bailey, Brian N.}, \n\t \nTITLE={Helios: A Scalable 3D Plant and Environmental Biophysical Modeling Framework}, \n\t\nJOURNAL={Frontiers in Plant Science}, \n\t\nVOLUME={10}, \n\t\nYEAR={2019}, \n\t \nURL={https://www.frontiersin.org/article/10.3389/fpls.2019.01185}, \n\t\nDOI={10.3389/fpls.2019.01185}, \n\t\nISSN={1664-462X}, \n \nABSTRACT={This article presents an overview of Helios, a new three-dimensional (3D) plant and environmental modeling framework. Helios is a model coupling framework designed to provide maximum flexibility in integrating and running arbitrary 3D environmental system models. Users interact with Helios through a well-documented open-source C++ API. Version 1.0 comes with model plug-ins for radiation transport, the surface energy balance, stomatal conductance, photosynthesis, solar position, and procedural tree generation. Additional plug-ins are also available for visualizing model geometry and data and for processing and integrating LiDAR scanning data. Many of the plug-ins perform calculations on the graphics processing unit, which allows for efficient simulation of very large domains with high detail. An example modeling study is presented in which leaf-level heterogeneity in water usage and photosynthesis of an orchard is examined to understand how this leaf-scale variability contributes to whole-tree and -canopy fluxes.}\n}"
141+
},
142+
"ghai_strawberry_fruit_detection": {
143+
"license": "CC BY-SA 4.0",
144+
"citation": ""
141145
}
142146
}

agml/_internal/preprocess.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1016,6 +1016,19 @@ def ghai_broccoli_detection(self, dataset_name):
10161016
shutil.move(os.path.join(original_dir, 'coco.json'),
10171017
os.path.join(processed_dir, 'annotations.json'))
10181018

1019+
def ghai_strawberry_fruit_detection(self, dataset_name):
1020+
# Create processed directories
1021+
original_dir = os.path.join(self.data_original_dir, dataset_name)
1022+
processed_dir = os.path.join(self.data_processed_dir, dataset_name)
1023+
processed_image_dir = os.path.join(processed_dir, 'images')
1024+
os.makedirs(processed_image_dir, exist_ok = True)
1025+
1026+
# Move images
1027+
for image in tqdm(glob.glob(os.path.join(original_dir, '*.jpg'))):
1028+
shutil.move(image, processed_image_dir)
1029+
shutil.move(os.path.join(original_dir, 'coco.json'),
1030+
os.path.join(processed_dir, 'annotations.json'))
1031+
10191032

10201033
if __name__ == '__main__':
10211034
# Initialize program arguments.

agml/data/loader.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import os
1616
import json
1717
import copy
18+
import glob
1819
from typing import Union
1920
from collections.abc import Sequence
2021
from decimal import getcontext, Decimal
@@ -296,6 +297,38 @@ def helios(cls, name, dataset_path = None):
296297
information which is provided in the `.metadata` directory of the Helios
297298
generated dataset, allowing it to contain potentially even more info.
298299
"""
300+
# Instantiate from a list of datasets.
301+
if isinstance(name, (list, tuple)):
302+
if dataset_path is None:
303+
dataset_path = [None] * len(name)
304+
elif isinstance(dataset_path, str):
305+
dataset_path = [dataset_path] * len(name)
306+
else:
307+
if not len(dataset_path) == len(name):
308+
raise ValueError("The number of dataset paths must be "
309+
"the same as the number of dataset names.")
310+
datasets = [cls.helios(n, dataset_path = dp)
311+
for n, dp in zip(name, dataset_path)]
312+
return cls.merge(*datasets)
313+
314+
# Instantiate from a wildcard pattern.
315+
if isinstance(name, str) and '*' in name:
316+
if dataset_path is None:
317+
dataset_path = os.path.abspath(synthetic_data_save_path())
318+
elif not os.path.exists(dataset_path):
319+
raise NotADirectoryError(
320+
f"Existing directory '{dataset_path}' for dataset of name "
321+
f"{name} not found, pass a custom path if you want to use "
322+
f"a custom dataset path for the dataset.")
323+
324+
# Get the list of datasets.
325+
possible_datasets = glob.glob(os.path.join(dataset_path, name))
326+
if len(possible_datasets) == 0:
327+
raise ValueError(f"No datasets found for pattern: {name}.")
328+
datasets = [cls.helios(os.path.basename(p), dataset_path = dataset_path)
329+
for p in sorted(possible_datasets)]
330+
return cls.merge(*datasets)
331+
299332
# Locate the path to the dataset, using synthetic semantics.
300333
if dataset_path is None:
301334
dataset_path = os.path.abspath(

agml/io.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
import random
1616
import inspect
1717

18+
import cv2
19+
1820
from agml.utils.io import (
1921
get_file_list as _get_file_list,
2022
get_dir_list as _get_dir_list,
@@ -112,5 +114,15 @@ def random_file(path, **kwargs):
112114
return random.choice(get_file_list(path, **kwargs))
113115

114116

117+
def read_image(path, **kwargs):
118+
"""Reads an image from a file.
119+
120+
Args:
121+
path (str): The path to the image file.
122+
**kwargs: Keyword arguments to pass to `cv2.imread`.
115123
124+
Returns:
125+
numpy.ndarray: The image.
126+
"""
127+
return cv2.imread(path, **kwargs)
116128

agml/models/segmentation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
from agml.data.public import source
3333
from agml.utils.general import resolve_list_value
3434
from agml.utils.image import resolve_image_size
35-
from agml.viz.masks import show_image_with_overlaid_mask, show_image_and_mask
35+
from agml.viz.masks import show_image_and_overlaid_mask, show_image_and_mask
3636

3737
# This is last since `agml.models.base` will check for PyTorch Lightning,
3838
# and PyTorch Lightning automatically installed torchmetrics with it.
@@ -250,7 +250,7 @@ def show_prediction(self, image, overlay = False, **kwargs):
250250
image = self._expand_input_images(image)[0]
251251
mask = self.predict(image, **kwargs)
252252
if overlay:
253-
return show_image_with_overlaid_mask(image, mask, **kwargs)
253+
return show_image_and_overlaid_mask(image, mask, **kwargs)
254254
return show_image_and_mask(image, mask, **kwargs)
255255

256256
def load_benchmark(self, dataset):

agml/synthetic/generator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def _convert_options_to_xml(self):
155155

156156
# The `scan` tag is used for LiDAR generation. This must be added later
157157
# because there can be multiple origins and thus multiple `scan` tags.
158-
if self._generation_options.simulation_type == SimulationType.LiDAR:
158+
if self._generation_options.simulation_type == SimulationType.LiDAR or self._generation_options.simulation_type == SimulationType.Both:
159159
scan_tags = []
160160
if isinstance(parameters['lidar']['origin'][0], list):
161161
for origin in parameters['lidar']['origin']:
@@ -173,15 +173,15 @@ def _convert_options_to_xml(self):
173173
self._canopy + "Parameters": parameters['canopy'],
174174
'Ground': parameters['Ground']}
175175
xml_params = {'canopygenerator': canopy_parameters}
176-
if self._generation_options.simulation_type == SimulationType.RGB:
176+
if self._generation_options.simulation_type == SimulationType.RGB or self._generation_options.simulation_type == SimulationType.Both:
177177
xml_params[''] = parameters['camera']
178178

179179
# Convert all of the parameters to XML format.
180180
tree = ET.parse(io.StringIO(dict2xml({'helios': xml_params})))
181181
root = tree.getroot()
182182

183183
# Add the `scan` tags if necessary for LiDAR generation.
184-
if self._generation_options.simulation_type == SimulationType.LiDAR:
184+
if self._generation_options.simulation_type == SimulationType.LiDAR or self._generation_options.simulation_type == SimulationType.Both:
185185
for scan_tag in scan_tags: # noqa
186186
scan_tag_contents = ET.parse(
187187
io.StringIO(dict2xml({'scan': scan_tag}))).getroot()

agml/synthetic/options.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ class SimulationType(Enum):
3535
"""The simulation render (RGB vs. LiDAR) that is generated."""
3636
RGB: str = "rgb"
3737
LiDAR: str = "lidar"
38+
Both: str = "rgb lidar"
3839

3940

4041
NumberOrMaybeList = TypeVar('NumberOrMaybeList', Number, List[Number])

agml/synthetic/synthetic_data_generation/generate.cpp

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ struct SyntheticAnnotationConfig {
1212
public:
1313
int num_images;
1414
vector<string> annotation_type;
15-
string simulation_type;
15+
vector<string> simulation_type;
1616
vector<string> labels;
1717
string xml_path;
1818
string output_path;
@@ -53,7 +53,14 @@ void SyntheticAnnotationConfig::load_config(const char* path) {
5353
}
5454
this->annotation_type.push_back(line);
5555
} else if (i == 2) {
56-
this->simulation_type = line;
56+
string delimeter = " "; size_t pos;
57+
vector<string> simulation_type;
58+
while ((pos = line.find(' ')) != string::npos)
59+
{
60+
this -> simulation_type.push_back(line.substr(0,pos));
61+
line.erase(0, pos + delimeter.length());
62+
}
63+
this->simulation_type.push_back(line);
5764
} else if (i == 3) {
5865
string delimeter = " "; size_t pos;
5966
vector<string> labels;
@@ -150,7 +157,7 @@ int main(int argc, char** argv) {
150157
SyntheticAnnotation annotation(&context);
151158

152159
// Choose either the LiDAR or RGB image simulation.
153-
if (config.simulation_type == "lidar") {
160+
if (!config.simulation_type.empty() && config.simulation_type[1] == "lidar") {
154161
// Get the UUID of all the elements on the scene
155162
vector<uint> UUID_trunk = cgen.getTrunkUUIDs();
156163
vector<uint> UUID_shoot = cgen.getBranchUUIDs();
@@ -190,7 +197,9 @@ int main(int argc, char** argv) {
190197
string cloud_export = this_image_dir + "/" + string("point_cloud_" + to_string(i) + ".xyz");
191198
std::cout << "Writing LiDAR Point cloud to " << cloud_export << " " << std::endl;
192199
lidarcloud.exportPointCloud(cloud_export.c_str());
193-
} else {
200+
}
201+
if (!config.simulation_type.empty() && config.simulation_type[0] == "rgb")
202+
{
194203
if (!config.annotation_type.empty() && config.annotation_type[0] != "none") {
195204
// Set the annotation type based on the configuration.
196205
vector<string> va = config.annotation_type;
@@ -207,7 +216,7 @@ int main(int argc, char** argv) {
207216
// Add labels according to whatever scheme we want.
208217
vector<string> vl = config.labels;
209218
for (int p = 0; p < cgen.getPlantCount(); p++) { // loop over vines
210-
if (config.simulation_type == "rgb") {
219+
if (!config.simulation_type.empty() && config.simulation_type[0] == "rgb") {
211220
if (contains(vl, "trunks")) {
212221
annotation.labelPrimitives(cgen.getTrunkUUIDs(p), "trunks");
213222
}

agml/viz/boxes.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,8 @@ def annotate_object_detection(image,
110110
"either `bbox` or `bboxes` for bounding boxes.")
111111
if bbox_format is not None:
112112
bboxes = convert_bbox_format(bboxes, bbox_format)
113+
if labels is None:
114+
labels = [0] * len(bboxes)
113115

114116
# Run a few final checks in order to ensure data is formatted properly.
115117
image = format_image(image, mask = False)
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
2+
# `ghai_strawberry_fruit_detection`
3+
4+
## Dataset Metadata
5+
6+
| Metadata | Value |
7+
| --- | --- |
8+
| **Classes** | Bud, Calyx, Detached Fruit, Flower, Large green, Leaf, Ripe fruit, Small Green, Stem, Unripe fruit |
9+
| **Machine Learning Task** | object_detection |
10+
| **Agricultural Task** | crop_detection |
11+
| **Location** | United States, North America |
12+
| **Sensor Modality** | RGB |
13+
| **Real or Synthetic** | real |
14+
| **Platform** | handheld/ground |
15+
| **Input Data Format** | JPG |
16+
| **Annotation Format** | coco_json |
17+
| **Number of Images** | 500 |
18+
| **Documentation** | https://github.com/AxisAg/GHAIDatasets/blob/main/datasets/strawberry.md |
19+
20+
21+
## Examples
22+
23+
![Example Images for ghai_strawberry_fruit_detection](https://github.com/Project-AgML/AgML/blob/main/docs/sample_images/ghai_strawberry_fruit_detection_examples.png)
Loading

0 commit comments

Comments
 (0)