tstMaterialBuffer.cpp

./Core/io/tests/tstMaterialBuffer.cpp

// This test is not a part of the ORIGEN API in SCALE 6.2. It tests the ability
// to read/write HDF5 versions of the Origen::Material to disk. This was
// envisioned
// as a way to buffer the contents of the Origen::Material to disk, but was
// not fully implemented. For now, it is the Origen::Material user's
// responsibility. The following related files are copied and pasted here.
//
// ------------------------------------------------------------------------------
// FILE: Origen/Core/io/MaterialBuffer.h
//
// #ifndef Origen_MaterialBuffer_H
// #define Origen_MaterialBuffer_H
//
// #include "Origen/Core/dc/Material.h"
//
// namespace Origen {
//
// class MaterialBuffer {
// public:
// bool initialize_file(std::string file, const Material& mat, bool
// overwrite=false);
// bool dump_range(int pstart, int pend, std::string file, Material* mat);
// bool retrieve_range(int pstart, int pend, std::string file, Material*
// mat);
//
// virtual ~MaterialBuffer(){}
//
// private:
// virtual bool initialize_file_impl(std::string file, const Material& mat,
// bool overwrite);
// virtual bool dump_range_impl(int pstart, int pend, std::string file,
// Material* mat);
// virtual bool retrieve_range_impl(int pstart, int pend, std::string file,
// Material* mat);
// };
//
// typedef std::shared_ptr<MaterialBuffer> SP_MaterialBuffer;
//
// }
//
// #endif
//
// ------------------------------------------------------------------------------
// FILE: Origen/Core/io/MaterialBuffer.cpp
// #include "Origen/Core/io/MaterialBuffer.h"
// #include "ScaleUtils/IO/DB.h"
// #include <ostream>
//
// namespace Origen {
// bool MaterialBuffer::initialize_file(std::string file,
// const Material& mat,
// bool overwrite){
// return initialize_file_impl(file,mat,overwrite);
// }
// bool MaterialBuffer::dump_range(int pstart, int pend, std::string
// file, Material* mat){
// return dump_range_impl(pstart,pend,file,mat);
// }
// bool MaterialBuffer::retrieve_range(int pstart, int pend, std::string
// file, Material* mat){
// return retrieve_range_impl(pstart,pend,file,mat);
// }
//
// bool MaterialBuffer::initialize_file_impl(std::string file,
// const Material& mat,
// bool overwrite){
// ScaleUtils::IO::DB opts;
// std::ofstream ofs;
// bool status = opts.prepareBinaryFileWrite(ofs,file);
// return status;
// }
// bool MaterialBuffer::dump_range_impl(int pstart, int pend,
// std::string file, Material* mat){
// return false;
// }
// bool MaterialBuffer::retrieve_range_impl(int pstart, int pend,
// std::string file, Material* mat){
// return false;
// }
// }
//
// ------------------------------------------------------------------------------
// FILE: Origen/Core/io/MaterialBuffer_hdf5.h
// #include "Origen/Core/io/MaterialBuffer.h"
//
// namespace Origen {
//
// class MaterialBuffer_hdf5 : public MaterialBuffer {
// private:
// virtual bool initialize_file_impl(std::string file, const Material&
// mat, bool overwrite);
// virtual bool dump_range_impl(size_t pstart, size_t pend, std::string
// file, Material* mat);
// virtual bool retrieve_range_impl(size_t pstart, size_t pend,
// std::string file, Material* mat);
// };
//
// }
//
// ------------------------------------------------------------------------------
// FILE: Origen/Core/io/MaterialBuffer_hdf5.cpp
// #include "ScaleUtils/IO/DB.h"
// #include "Origen/Core/io/MaterialBuffer_hdf5.h"
//
// //#include <hdf5.h>
// //#include <hdf5_hl.h>
//
// namespace Origen {
//
// bool MaterialBuffer_hdf5::initialize_file_impl(std::string file,
// const Material& mat, bool overwrite){
// ScaleUtils::IO::DB opts;
// std::ofstream ofs;
// bool status = false;
// std::string path;
// if( file=="" ){
// path=mat.name()+".h5";
// }
//
// // hid_t result;
// jDebugLine("creating path={"<<path<<"}");
//
// // if( overwrite ){
// // result = H5Fopen(path.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT);
//
// // // scalar dimensions
// // hsize_t dims[1] = {1};
//
// // // write the data
// // int total_nuclides = mat.total_nuclides();
// // H5LTmake_dataset_int(result, "total_nuclides", 1, dims,
// &total_nuclides);
//
// // } else {
// // result = H5Fopen(path.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT);
// // }
// // result = H5Fcreate (path.c_str() , H5F_ACC_EXCL,
// // H5P_DEFAULT, H5P_DEFAULT);
//
// // H5Fclose(result);
// //status = opts.prepareTextFileWrite(ofs,path);
// //if( !status )return status;
// return status;
// }
// bool MaterialBuffer_hdf5::dump_range_impl(size_t pstart, size_t pend,
// std::string file, Material* mat){
// return false;
// }
// bool MaterialBuffer_hdf5::retrieve_range_impl(size_t pstart, size_t
// pend, std::string file, Material* mat){
// return false;
// }
//
//
// }
// ------------------------------------------------------------------------------
#include <algorithm>
#include <iostream>
#include <string>
#include <vector>
#include "Nemesis/gtest/nemesis_gtest.hh"
#include "Nemesis/harness/DBC.hh"
#include "Origen/Core/TestPaths.h"
using std::string;
typedef std::map<std::string, Origen::SP_Library> LibraryMap_t;
#include <hdf5.h>
#include <hdf5_hl.h>
hid_t h5_group_handle( hid_t parent, const char* name )
{
Require( parent > 0 );
hid_t g;
htri_t exists = H5Lexists( parent, name, H5P_DEFAULT );
if( exists )
{
g = H5Gopen1( parent, name );
}
else
{
g = H5Gcreate1( parent, name, 0 );
}
return g;
}
herr_t h5_write_units( hid_t dataset, const char* units )
{
// create a space padded string for 32 characters of units info
const size_t UNITS_SIZE( 32 );
std::string sunits( units );
for( size_t i = sunits.size(); i < UNITS_SIZE; ++i )
{
sunits += " ";
}
// handle units
htri_t exists = H5Aexists( dataset, "units" );
herr_t status = 0;
if( !exists )
{
hid_t dataspace = H5Screate( H5S_SCALAR );
hid_t type = H5Tcopy( H5T_C_S1 );
status = H5Tset_size( type, UNITS_SIZE + 1 );
Check( status >= 0 );
status = H5Tset_strpad( type, H5T_STR_NULLTERM );
Check( status >= 0 );
hid_t att = H5Acreate2(
dataset, "units", type, dataspace, H5P_DEFAULT, H5P_DEFAULT );
status = H5Awrite( att, type, sunits.c_str() );
H5Tclose( type );
H5Aclose( att );
H5Sclose( dataspace );
}
return status;
}
//! create a new dataset
template <typename T>
hid_t g, const char* name, hsize_t rank, hsize_t* dims, T type_match );
// for doubles
template <>
hid_t g, const char* name, hsize_t rank, hsize_t* dims, double type_match )
{
hid_t dataspace = H5Screate_simple( rank, dims, nullptr );
hid_t datatype = H5Tcopy( H5T_NATIVE_DOUBLE );
hid_t dataset = H5Dcreate(
g, name, datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
H5Sclose( dataspace );
H5Tclose( datatype );
return dataset;
}
// for floats
template <>
hid_t g, const char* name, hsize_t rank, hsize_t* dims, float type_match )
{
hid_t dataspace = H5Screate_simple( rank, dims, nullptr );
hid_t datatype = H5Tcopy( H5T_NATIVE_FLOAT );
hid_t dataset = H5Dcreate(
g, name, datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
H5Sclose( dataspace );
H5Tclose( datatype );
return dataset;
}
// write to an existing dataset
template <typename T>
herr_t h5_write_dataset( hid_t dataset, const T* first );
// for doubles
template <>
herr_t h5_write_dataset( hid_t dataset, const double* first )
{
return H5Dwrite(
dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, first );
}
// for doubles
template <>
herr_t h5_write_dataset( hid_t dataset, const float* first )
{
return H5Dwrite(
dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, first );
}
// update an existing or write a new dataset
template <typename T>
herr_t h5_update_dataset( hid_t g,
const char* name,
const char* units,
hsize_t rank,
hsize_t* dims,
T* first )
{
// check if dataset exists
htri_t exists = H5Lexists( g, name, H5P_DEFAULT );
herr_t status;
// if it doesn't then create a new one
hid_t dataset;
if( !exists )
{
jDebugLine( " creating new dataset" );
dataset = h5_create_dataset( g, name, rank, dims, first[0] );
herr_t astatus = h5_write_units( dataset, units );
Check( astatus >= 0 );
}
// if it does then open
else
{
dataset = H5Dopen( g, name, H5P_DEFAULT );
}
Check( dataset > 0 );
status = h5_write_dataset( dataset, first );
H5Dclose( dataset );
return status;
}
hid_t h5_write_trx( const Origen::TransitionMatrixP& trx, hid_t g_step )
{
jDebugLine( " write_trx" );
// convenience
herr_t status;
int i;
hsize_t dims_nd[] = {(hsize_t)trx.get_itot()};
hsize_t dims[] = {1};
// transition matrix over the step is a group
hid_t g_trx = h5_group_handle( g_step, "TransitionMatrix" );
std::cout << "g_trx=" << g_trx << std::endl;
// number of lite
i = trx.get_ilite();
status = H5LTmake_dataset_int( g_trx, "num_lite_nuclides", 1, dims, &i );
// number of actinides
i = trx.get_iact();
hsize_t dims_fisxs[] = {(hsize_t)i};
status =
H5LTmake_dataset_int( g_trx, "num_actinide_nuclides", 1, dims, &i );
// number of fission products
i = trx.get_ifp();
status = H5LTmake_dataset_int(
g_trx, "num_fission_product_nuclides", 1, dims, &i );
// total capture
g_trx, "loxs", "barn", 1, dims_nd, &trx.get_tocap()->at( 0 ) );
// fission
g_trx, "fisxs", "barn", 1, dims_fisxs, &trx.get_fiss()->at( 0 ) );
// a-matrix
hsize_t dims_t[] = {(hsize_t)trx.get_a()->size()};
g_trx, "coeff", "1/s", 1, dims_t, &trx.get_a()->at( 0 ) );
// flux spectrum
hsize_t dims_ng[] = {(hsize_t)trx.get_nfluxgrp()};
status = h5_update_dataset( g_trx,
"spectrum",
"1/cm2-s",
1,
dims_ng,
&trx.get_spectrum()->at( 0 ) );
// neutron yields
hsize_t dims_n[] = {( hsize_t )( trx.get_iact() + trx.get_ilite() )};
g_trx, "neutron_yields", "#/s", 1, dims_n, &trx.get_genneu()->at( 0 ) );
// kappa capture
status = h5_update_dataset( g_trx,
"kappa_capture",
"MeV",
1,
dims_nd,
&trx.get_kappa_capture()->at( 0 ) );
// kappa fission
status = h5_update_dataset( g_trx,
"kappa_fission",
"MeV",
1,
dims_fisxs,
&trx.get_kappa_fission()->at( 0 ) );
(void)status;
return g_trx;
}
hid_t h5_write_step( const Origen::Material& mat, int p, hid_t g_mat )
{
jDebugLine( " write_step" );
// convenience
hsize_t dims_nd[] = {mat.total_nuclides()};
hsize_t dims[] = {1};
herr_t status;
std::string s;
double d;
{
std::stringstream ss;
ss << "Step(" << ( p + 1 ) << ")";
s = ss.str();
}
hid_t g_step = h5_group_handle( g_mat, s.c_str() );
std::cout << "g_step=" << g_step << std::endl;
// write amount at begin-of-step
g_step, "bos_amount", "#-cm2/barn", 1, dims_nd, &( *vec )[0] );
if( p >= mat.nsteps() )
{
return g_step;
}
// time at begin-of-step
d = mat.time_at( p );
status = h5_update_dataset( g_step, "time_bos", "s", 1, dims, &d );
// flux over the step
d = mat.flux_over( p );
status = h5_update_dataset( g_step, "step_flux", "#/cm2-s", 1, dims, &d );
// power over the step
d = mat.power_over( p );
status = h5_update_dataset( g_step, "step_power", "W", 1, dims, &d );
// get transition matrix
// write transition matrix
hid_t g_trx = h5_write_trx( *trx, g_step );
status = H5Gclose( g_trx );
(void)status;
return g_step;
}
hid_t h5_write_material( const Origen::Material& mat, hid_t g_reg )
{
// convenience
herr_t status;
std::string s;
double d;
int i;
// scalar dims
hsize_t dims[1] = {1};
// material group
{
std::stringstream ss;
ss << "Material(" << mat.id() << ")";
s = ss.str();
}
hid_t g_mat = H5Gcreate1( g_reg, s.c_str(), 0 );
// library type
s = mat.library_type();
status = H5LTmake_dataset_string( g_mat, "library_type", s.c_str() );
// initial mass
d = mat.initial_mass();
status = h5_update_dataset( g_mat, "initial_mass", "g", 1, dims, &d );
// initial heavy metal mass
d = mat.initial_hm_mass();
status = h5_update_dataset( g_mat, "initial_hm_mass", "g", 1, dims, &d );
// material name
s = mat.name();
status = H5LTmake_dataset_string( g_mat, "name", s.c_str() );
// material id
i = mat.id();
status = H5LTmake_dataset_int( g_mat, "id", 1, dims, &i );
// cold volume
d = mat.cold_volume();
status = h5_update_dataset( g_mat, "cold_volume", "cm3", 1, dims, &d );
// just make a group for each point
for( int p = 0; p < mat.ntimes(); ++p )
{
hid_t g_step = h5_write_step( mat, p, g_mat );
status = H5Gclose( g_step );
}
(void)status;
return g_mat;
}
hid_t file )
{
herr_t status;
hid_t g_matreg = H5Gcreate1( file, "MaterialMap", 0 );
for( auto it = mat_map.begin(); it != mat_map.end(); ++it )
{
const Origen::SP_Material& mat = it->second;
hid_t g_mat = h5_write_material( *mat, g_matreg );
status = H5Gclose( g_mat );
}
(void)status;
return g_matreg;
}
hid_t h5_write_library_registry( const LibraryMap_t& lib_map, hid_t file )
{
// convenience
herr_t status;
// int i;
std::string s;
// hsize_t dims[]={1};
hid_t g_libreg = H5Gcreate1( file, "LibraryMap", 0 );
for( auto it = lib_map.begin(); it != lib_map.end(); ++it )
{
{
std::stringstream ss;
ss << "Library(" << it->first << ")";
s = ss.str();
}
hid_t g_lib = H5Gcreate1( g_libreg, s.c_str(), 0 );
Origen::SP_Library lib = it->second;
hsize_t dims_nd[] = {(hsize_t)lib->definition().total_nuclides()};
// decay constants
status = h5_update_dataset( g_lib,
"decay_constants",
"1/s",
1,
dims_nd,
&lib->decay_data().decay_constants[0] );
// ids
status = H5LTmake_dataset_int(
g_lib,
"sizzzaaa_list",
1,
dims_nd,
&lib->definition().nuclide_set().ids().at( 0 ) );
// group structure
// fissionable nuclides
Origen::SP_Vec_Int fiss = lib->decay_data().fissionables();
hsize_t dims_f[] = {(hsize_t)fiss->size()};
status = H5LTmake_dataset_int(
g_lib, "fissionables", 1, dims_f, &fiss->at( 0 ) );
// locations
const Origen::TransitionStructure ts = lib->transition_structure();
status = H5LTmake_dataset_int(
g_lib, "num_parents", 1, dims_nd, &ts.num_parents().at( 0 ) );
status = H5LTmake_dataset_int( g_lib,
"num_decay_parents",
1,
dims_nd,
&ts.num_decay_parents().at( 0 ) );
hsize_t dims_t[] = {(hsize_t)ts.parent_positions_size()};
status = H5LTmake_dataset_int( g_lib,
"parent_positions",
1,
dims_t,
&ts.parent_positions().at( 0 ) );
status = H5LTmake_dataset_int(
g_lib, "transition_ids", 1, dims_t, &ts.transition_ids().at( 0 ) );
}
(void)status;
return g_libreg;
}
TEST( MaterialBuffer, hdf5 )
{
// library
{
ASSERT_TRUE( pass );
}
// initialize
int id = 1;
double vol = 1.2;
new Origen::Material( "GENERAL", lib, "mix001", id, vol ) );
Origen::Vec_Int ref_ids;
Origen::Vec_Dbl ref_vals;
Origen::FakeFactory::vera_uox_e360( ref_ids, ref_vals );
mat->set_numden_bos( ref_vals, ref_ids );
// add the material map
mat_map[id] = mat;
// transition matrix
mat->library()->newsp_transition_matrix_at( 0 );
double flux_m1 = 3.91;
// add a step
mat->add_step( 123.456 ); // step 0
mat->set_flux( flux_m1 );
mat->set_transition_matrix( trx_m1 );
mat->add_step( 223.456 ); // step 1
mat->set_flux( flux_m1 );
mat->set_transition_matrix( trx_m1 );
mat->add_step( 323.456 ); // step 2
mat->set_flux( flux_m1 );
mat->set_transition_matrix( trx_m1 );
mat->add_step( 423.456 ); // step 3
mat->set_flux( flux_m1 );
mat->set_transition_matrix( trx_m1 );
// collect the library map
LibraryMap_t lib_map;
for( auto it = mat_map.begin(); it != mat_map.end(); ++it )
{
const Origen::SP_Material& mat = it->second;
lib_map[mat->library_type()] = mat->library();
}
herr_t status;
/*
* Create a new file using the default properties.
*/
hid_t file = H5Fcreate( "a.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
{
hid_t g_matreg = h5_write_materialmap_registry( mat_map, file );
status = H5Gclose( g_matreg );
hid_t g_libreg = h5_write_library_registry( lib_map, file );
status = H5Gclose( g_libreg );
}
status = H5Fclose( file );
/*
* Add a new data point to the file
*/
mat->add_step( 523.456 ); // step 4
mat->set_flux( flux_m1 );
mat->set_transition_matrix( trx_m1 );
hid_t ofile = H5Fopen( "a.h5", H5F_ACC_RDWR, H5P_DEFAULT );
{
std::string s;
{
std::stringstream ss;
ss << "/MaterialMap/Material(" << mat->id() << ")";
s = ss.str();
}
hid_t g_mat = H5Gopen1( ofile, s.c_str() );
hid_t g_step = h5_write_step( *mat, 4, g_mat );
status = H5Gclose( g_step );
status = H5Gclose( g_mat );
}
status = H5Fclose( ofile );
(void)status;
}