bcore/datamodel/
_impl.rs

1use crate::error::ApiError;
2use crate::process::Histogram;
3
4use super::main_file::{MainFInal, MainInitial, MainRecords, Misc};
5use super::tallies::Tallies;
6use super::{Dim, ResultGroup};
7use hdf5::Group;
8use ndarray::{s, Array1, Array2, ArrayView1};
9use std::collections::HashMap;
10
11macro_rules! read_scalar {
12    // Match the types f64, usize, or u64 and provide the correct default behavior
13    ($group:expr, $name:expr, f64) => {
14        $group.dataset($name)?.read_scalar::<f64>()?
15    };
16    ($group:expr, $name:expr, usize) => {
17        $group.dataset($name)?.read_scalar::<usize>()?
18    };
19    ($group:expr, $name:expr, u64) => {
20        $group.dataset($name)?.read_scalar::<u64>()?
21    };
22}
23
24macro_rules! read_vec {
25    // Match the types f64, usize, or u64 and provide the correct default behavior
26    ($group:expr, $name:expr, f64) => {
27        $group.dataset($name)?.read_raw::<f64>()?
28    };
29    ($group:expr, $name:expr, usize) => {
30        $group.dataset($name)?.read_raw::<usize>()?
31    };
32    ($group:expr, $name:expr, u64) => {
33        $group.dataset($name)?.read_raw::<u64>()?
34    };
35}
36
37impl ResultGroup<Misc> for hdf5::Group {
38    fn read_g(&self) -> hdf5::Result<Misc> {
39        let mut fields: HashMap<String, u64> = HashMap::new();
40        self.iter_visit_default(&mut fields, |group, name, _link_info, fields| {
41            if let Ok(dataset) = group.dataset(name) {
42                if let Ok(value) = dataset.read_scalar::<u64>() {
43                    fields.insert(name.to_string(), value);
44                }
45            }
46            // Continue the iteration
47            true
48        })
49        .unwrap();
50
51        Ok(Misc {
52            n_node_thread: *fields.get("n_node_thread").unwrap_or(&0),
53            n_rank: *fields.get("n_rank").unwrap_or(&0),
54        })
55    }
56}
57
58pub fn read_number_particle(filename: &str) -> hdf5::Result<Vec<f64>> {
59    let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
60    let rec = file.group("/records")?;
61    let v = read_vec!(rec, "number_particle", f64);
62    Ok(v)
63}
64
65pub fn read_spatial_model_properties(
66    key: &str,
67    files: &[String],
68    cx: &mut Array2<f64>,
69    n_export: usize,
70) -> Result<(), ApiError> {
71    for filename in files.iter() {
72        // Open the HDF5 file in read mode
73        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
74
75        // Access the "biological_model" group
76        let group = file.group("biological_model")?;
77        //TODO: use group.len() instead of n_export
78        for i_e in 0..n_export {
79            // Read the data for the current export index
80            let tmp: Vec<f64> = match group.dataset(&format!("{}/spatial/{}", i_e, key)) {
81                Ok(dataset) => dataset.read_raw::<f64>()?, // Read the data directly as Vec<f64>
82                Err(_) => continue,                        // Skip if the dataset doesn't exist
83            };
84
85            let tmp_array = ArrayView1::from_shape(tmp.len(), &tmp).map_err(|_| {
86                hdf5::Error::Internal("Shape mismatch while creating ArrayView1".to_string())
87            })?;
88
89            let slice_shape = cx.slice(s![i_e, ..]).len();
90            if tmp_array.len() == slice_shape {
91                cx.slice_mut(s![i_e, ..])
92                    .zip_mut_with(&tmp_array, |a, b| *a += b);
93            } else {
94                eprintln!(
95                    "Shape mismatch: cx[{}, ..].shape = {}, tmp.shape = {}",
96                    i_e,
97                    slice_shape,
98                    tmp_array.len()
99                );
100            }
101        }
102    }
103    Ok(())
104}
105
106pub fn get_probe_size(files: &[String])-> Result<usize,ApiError>
107{
108    let mut probe_size = 0;
109    for filename in files.iter() {
110        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
111        if let Ok(dataset) = file.dataset("probes")
112        {
113            probe_size += dataset.size();
114        }
115        else {
116            return Err(ApiError::Default("No Probes in dataset".to_string()));
117        }
118        
119    }
120    Ok(probe_size)
121}
122
123pub fn read_model_properties(
124    key: &str,
125    files: &[String],
126    i_export: usize,
127) -> hdf5::Result<Array1<f64>> {
128    let mut total_size = 0;
129    for filename in files.iter() {
130        // Open the HDF5 file in read mode
131        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
132
133        // Access the "biological_model" group
134        let group = file.group("biological_model")?;
135        // Not all nodes have th=e same number of export so it happens that i_export is >
136        // number_export for the current file
137        if (group.len() as usize) >= i_export {
138            let dataset = group.dataset(&format!("{}/{}", i_export, key))?;
139            total_size += dataset.size();
140        }
141    }
142
143    let mut result = Array1::zeros(total_size);
144    let mut offset = 0;
145    for filename in files.iter() {
146        // Open the HDF5 file in read mode
147        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
148
149        // Access the "biological_model" group
150        let group = file.group("biological_model")?;
151        if (group.len() as usize) >= i_export {
152            let dataset = group.dataset(&format!("{}/{}", i_export, key))?;
153
154            // Read the dataset into a temporary array
155            let temp_array: Vec<f64> = dataset.read_raw::<f64>()?;
156            let tmp_array =
157                ArrayView1::from_shape(temp_array.len(), &temp_array).map_err(|_| {
158                    hdf5::Error::Internal("Shape mismatch while creating ArrayView1".to_string())
159                })?;
160
161            // Copy the data into the result array
162            result
163                .slice_mut(s![offset..offset + temp_array.len()])
164                .assign(&tmp_array);
165
166            offset += temp_array.len();
167        }
168    }
169
170    Ok(result)
171}
172
173pub fn get_n_export_real(files: &[String]) -> hdf5::Result<usize> {
174    if files.is_empty() {
175        panic!("FIXME: not enough files")
176    }
177    let file = hdf5::File::open_as(&files[0], hdf5::file::OpenMode::Read)?;
178    let group = file.group("biological_model")?;
179    let group_size = group.len() as usize; //We export n_export times properties but if there is no
180                                           //particle we do not export. group_size <= n_export and for
181    Ok(group_size)
182}
183
184pub fn make_histogram(
185    files: &[String],
186    i_export: usize,
187    key: &str,
188    hist: &mut Histogram,
189) -> hdf5::Result<()> {
190    for filename in files.iter() {
191        // Open the HDF5 file in read mode
192        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
193        let group = file.group("biological_model")?;
194        if (group.len() as usize) >= i_export {
195            let dataset = group.dataset(&format!("{}/{}", i_export, key))?;
196            let temp_array: Vec<f64> = dataset.read_raw::<f64>()?;
197            hist.add(temp_array);
198        }
199    }
200
201    Ok(())
202}
203
204pub fn read_avg_model_properties(
205    key: &str,
206    files: &[String],
207    n_export: usize,
208) -> hdf5::Result<Array1<f64>> {
209    let mut result = Array1::zeros(n_export);
210    let mut tot_particle: Array1<f64> = Array1::zeros(n_export);
211
212    for filename in files {
213        let file = hdf5::File::open_as(filename, hdf5::file::OpenMode::Read)?;
214        let group = file.group("biological_model")?;
215        let group_size = group.len() as usize; //We export n_export times properties but if there is no
216                                               //particle we do not export. group_size <= n_export and for
217                                               //all i > group_size , value is set to 0
218        for i_e in 0..group_size {
219            let dataset = group.dataset(&format!("{}/{}", i_e, key))?;
220            let temp_array: Vec<f64> = dataset.read_raw::<f64>()?;
221            result[i_e] += temp_array.iter().sum::<f64>();
222            tot_particle[i_e] += temp_array.len() as f64;
223        }
224    }
225
226    Ok(result / tot_particle)
227}
228
229pub fn read_model_mass(
230    files: &[String],
231    cx: &mut Array2<f64>,
232    n_export: usize,
233) -> Result<(), ApiError> {
234    read_spatial_model_properties("mass", files, cx, n_export)
235}
236
237impl ResultGroup<MainInitial> for Group {
238    fn read_g(&self) -> hdf5::Result<MainInitial> {
239        let delta_time = read_scalar!(self, "delta_time", f64);
240        let final_time = read_scalar!(self, "final_time", f64);
241        let initial_biomass_concentration =
242            read_scalar!(self, "initial_biomass_concentration", f64);
243        let initial_weight = read_scalar!(self, "initial_weight", f64);
244        let n_map = read_scalar!(self, "n_map", usize);
245        let number_compartment = read_scalar!(self, "number_compartment", usize);
246        let number_particles = read_scalar!(self, "number_particles", u64);
247        let t_per_flow_map = read_scalar!(self, "t_per_flow_map", f64);
248        // println!("{:?}",read_vec!(self,"particle_distribution",u64));
249
250        Ok(MainInitial {
251            delta_time,
252            final_time,
253            initial_biomass_concentration,
254            initial_weight,
255            n_map,
256            number_compartment,
257            number_particles,
258            t_per_flow_map,
259        })
260    }
261}
262
263impl ResultGroup<MainRecords> for Group {
264    fn read_g(&self) -> hdf5::Result<MainRecords> {
265        let concentration_liquid = read_vec!(self, "concentration_liquid", f64);
266        let volume_liquid = read_vec!(self, "volume_liquid", f64);
267        let (concentration_gas, volume_gas) = match (
268            self.dataset("concentration_gas"),
269            self.dataset("volume_gas"),
270        ) {
271            (Ok(cg), Ok(vg)) => (Some(cg.read_raw::<f64>()?), Some(vg.read_raw::<f64>()?)),
272            _ => (None, None),
273        };
274
275        let mtr = match self.dataset("mtr") {
276            Ok(_mtr) => Some(_mtr.read_raw::<f64>()?),
277            _ => None,
278        };
279
280        let tallies = match self.dataset("tallies") {
281            Ok(_t) => Some(Tallies(_t.read_raw::<f64>()?)),
282            _ => None,
283        };
284
285        let shape = self.dataset("concentration_liquid")?.shape();
286        let dim = Dim(shape[1], shape[2]);
287        let time = read_vec!(self, "time", f64);
288        Ok(MainRecords {
289            concentration_liquid,
290            volume_liquid,
291            concentration_gas,
292            volume_gas,
293            mtr,
294            tallies,
295            dim,
296            time,
297        })
298    }
299}
300
301impl ResultGroup<MainFInal> for Group {
302    fn read_g(&self) -> hdf5::Result<MainFInal> {
303        let number_particles = read_scalar!(self, "number_particles", u64);
304
305        let events = if let Ok(ds_events) = self.group("events") {
306            let mut events: HashMap<String, u64> = HashMap::new();
307            ds_events
308                .iter_visit_default(&mut events, |group, name, _link_info, fields| {
309                    if let Ok(dataset) = group.dataset(name) {
310                        if let Ok(value) = dataset.read_scalar::<u64>() {
311                            fields.insert(name.to_string(), value);
312                        }
313                    }
314                    true
315                })
316                .unwrap();
317            Some(events)
318        } else {
319            None
320        };
321
322        Ok(MainFInal {
323            events,
324            number_particles,
325        })
326    }
327}