generate_class.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118
  1. #!/usr/bin/env python3
  2. def generate_collection_class(obj_name, obj_attrs):
  3. src = []
  4. src += f'''\
  5. struct {obj_name};
  6. class {obj_name}Collection {{
  7. public:
  8. class iter {{
  9. public:
  10. iter(const {obj_name}Collection* collection, size_t idx)
  11. :collection(collection), idx(idx) {{ }}
  12. iter operator++() {{ ++idx; return *this; }}
  13. bool operator!=(const iter & other) {{ return idx != other.idx; }}
  14. const {obj_name} operator*() const;
  15. TrackingDataSet* tds;
  16. private:
  17. const {obj_name}Collection* collection;
  18. size_t idx;
  19. }};
  20. '''.splitlines()
  21. for field in obj_attrs['fields']:
  22. name = field['name']
  23. type_ = field['type']
  24. src.append(f' Value<vector<{type_}>>* val_{name};')
  25. src.append(f' bool {name}_loaded;')
  26. src.append(f'\n {obj_name}Collection() {{ }}\n')
  27. src.append(' void init(TrackingDataSet* tds){')
  28. src.append(' this->tds = tds;')
  29. src.append(' }\n')
  30. first_obj_name = list(obj_attrs['fields'])[0]['name']
  31. src.append(f' size_t size() const {{ return (*val_{first_obj_name})().size();}}\n')
  32. src.append(f' const {obj_name} operator[](size_t) const;')
  33. src.append(' iter begin() const { return iter(this, 0); }')
  34. src.append(' iter end() const { return iter(this, size()); }')
  35. src.append('};')
  36. src += f'''
  37. struct {obj_name} {{
  38. const {obj_name}Collection* collection;
  39. const size_t idx;
  40. {obj_name}(const {obj_name}Collection* collection, const size_t idx)
  41. :collection(collection), idx(idx) {{ }}\n
  42. '''.splitlines()
  43. for field in obj_attrs['fields']:
  44. name = field['name']
  45. type_ = field['type']
  46. prefix = obj_attrs['treename_prefix']+'_'
  47. src.append(f'''\
  48. const {type_}& {name}() const {{
  49. if (!collection->{name}_loaded) {{
  50. collection->val_{name} = collection->tds->track_branch_obj<vector<{type_}>>("{prefix}{name}");
  51. collection->{name}_loaded = true;
  52. }}
  53. return (*collection->val_{name})().at(idx);}}
  54. ''')
  55. src.append('};')
  56. src.append(f'''
  57. const {obj_name} {obj_name}Collection::iter::operator*() const {{
  58. return {{collection, idx}};
  59. }}
  60. const {obj_name} {obj_name}Collection::operator[](size_t idx) const {{
  61. return {{this, idx}};
  62. }}
  63. ''')
  64. return '\n'.join(src)
  65. def generate_header(input_filename, output_filename):
  66. from datetime import datetime
  67. return f'''\
  68. /** {output_filename} created on {datetime.now()} by generate_class.py
  69. * AVOID EDITING THIS FILE BY HAND!! Instead edit {input_filename} and re-run
  70. * generate_class.py
  71. */
  72. #include "filval.hpp"
  73. #include "root_filval.hpp"
  74. #include<cmath>
  75. #include "TrackingNtuple.h"
  76. using namespace std;
  77. using namespace fv;
  78. using namespace fv_root;
  79. typedef TreeDataSet<TrackingNtuple> TrackingDataSet;
  80. '''
  81. if __name__ == '__main__':
  82. import argparse
  83. import yaml
  84. parser = argparse.ArgumentParser()
  85. add = parser.add_argument
  86. add('input_file', help='An input YAML file defining the objects to generate')
  87. args = parser.parse_args()
  88. classes = []
  89. with open(args.input_file) as fi:
  90. for obj, attrs in yaml.load(fi).items():
  91. classes.append(generate_collection_class(obj, attrs))
  92. output_filename = args.input_file.replace('.yaml', '.hpp')
  93. with open(output_filename, 'w') as fo:
  94. fo.write(generate_header(args.input_file, output_filename))
  95. for class_ in classes:
  96. fo.write(class_)