TNO Intern

Commit b2a75d31 authored by Hen Brett's avatar Hen Brett 🐔
Browse files

Woah! It works really well, currently the testing seems to show that the best...

Woah! It works really well, currently the testing seems to show that the best number of simulations to chunk by is 100
parent a50f7b93
Loading
Loading
Loading
Loading
+1 −3
Original line number Diff line number Diff line
@@ -21,9 +21,7 @@ def simulate_doublet(output_data: xr.Dataset, reservoir_properties: xr.Dataset,
                                        input_core_dims=[[], [], [], [], [], [], [], [], []],
                                        output_core_dims=[[], [], [], [], [], [], [], [], [], [], [], [], [], []],
                                        vectorize=True,
                                        dask="parallelized",
                                        dask_gufunc_kwargs={"allow_rechunk": True}
                                        )
                                        dask="parallelized")

    # Assign output DataArrays to the output_data object
    output_data["power"] = output_data_arrays[0]
+1 −1
Original line number Diff line number Diff line
@@ -34,7 +34,7 @@ class PyThermoGIS(TestCase):


    def read_input_grids(self):
        new_cellsize=5000 # in m
        new_cellsize=1000 # in m
        input_grids = resample_xarray_grid(read_grid(self.test_files_out_path / "ROSL_ROSLU__thick.zmap"), new_cellsize=new_cellsize).to_dataset(name="thickness_mean")
        input_grids["thickness_sd"] = resample_xarray_grid(read_grid(self.test_files_out_path / "ROSL_ROSLU__thick_sd.zmap"), new_cellsize=new_cellsize)
        input_grids["ntg"] = resample_xarray_grid(read_grid(self.test_files_out_path / "ROSL_ROSLU__ntg.zmap"), new_cellsize=new_cellsize)