generate_projects.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. # Copyright 2015 gRPC authors.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import argparse
  15. import glob
  16. import multiprocessing
  17. import os
  18. import pickle
  19. import shutil
  20. import sys
  21. import tempfile
  22. from typing import Dict, List, Union
  23. import _utils
  24. import yaml
  25. PROJECT_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
  26. "..")
  27. os.chdir(PROJECT_ROOT)
  28. # TODO(lidiz) find a better way for plugins to reference each other
  29. sys.path.append(os.path.join(PROJECT_ROOT, 'tools', 'buildgen', 'plugins'))
  30. # from tools.run_tests.python_utils import jobset
  31. jobset = _utils.import_python_module(
  32. os.path.join(PROJECT_ROOT, 'tools', 'run_tests', 'python_utils',
  33. 'jobset.py'))
  34. PREPROCESSED_BUILD = '.preprocessed_build'
  35. test = {} if os.environ.get('TEST', 'false') == 'true' else None
  36. assert sys.argv[1:], 'run generate_projects.sh instead of this directly'
  37. parser = argparse.ArgumentParser()
  38. parser.add_argument('build_files',
  39. nargs='+',
  40. default=[],
  41. help="build files describing build specs")
  42. parser.add_argument('--templates',
  43. nargs='+',
  44. default=[],
  45. help="mako template files to render")
  46. parser.add_argument('--output_merged',
  47. '-m',
  48. default='',
  49. type=str,
  50. help="merge intermediate results to a file")
  51. parser.add_argument('--jobs',
  52. '-j',
  53. default=multiprocessing.cpu_count(),
  54. type=int,
  55. help="maximum parallel jobs")
  56. parser.add_argument('--base',
  57. default='.',
  58. type=str,
  59. help="base path for generated files")
  60. args = parser.parse_args()
  61. def preprocess_build_files() -> _utils.Bunch:
  62. """Merges build yaml into a one dictionary then pass it to plugins."""
  63. build_spec = dict()
  64. for build_file in args.build_files:
  65. with open(build_file, 'r') as f:
  66. _utils.merge_json(build_spec,
  67. yaml.load(f.read(), Loader=yaml.FullLoader))
  68. # Executes plugins. Plugins update the build spec in-place.
  69. for py_file in sorted(glob.glob('tools/buildgen/plugins/*.py')):
  70. plugin = _utils.import_python_module(py_file)
  71. plugin.mako_plugin(build_spec)
  72. if args.output_merged:
  73. with open(args.output_merged, 'w') as f:
  74. f.write(yaml.dump(build_spec))
  75. # Makes build_spec sort of immutable and dot-accessible
  76. return _utils.to_bunch(build_spec)
  77. def generate_template_render_jobs(templates: List[str]) -> List[jobset.JobSpec]:
  78. """Generate JobSpecs for each one of the template rendering work."""
  79. jobs = []
  80. base_cmd = [sys.executable, 'tools/buildgen/_mako_renderer.py']
  81. for template in sorted(templates, reverse=True):
  82. root, f = os.path.split(template)
  83. if os.path.splitext(f)[1] == '.template':
  84. out_dir = args.base + root[len('templates'):]
  85. out = os.path.join(out_dir, os.path.splitext(f)[0])
  86. if not os.path.exists(out_dir):
  87. os.makedirs(out_dir)
  88. cmd = base_cmd[:]
  89. cmd.append('-P')
  90. cmd.append(PREPROCESSED_BUILD)
  91. cmd.append('-o')
  92. if test is None:
  93. cmd.append(out)
  94. else:
  95. tf = tempfile.mkstemp()
  96. test[out] = tf[1]
  97. os.close(tf[0])
  98. cmd.append(test[out])
  99. cmd.append(args.base + '/' + root + '/' + f)
  100. jobs.append(jobset.JobSpec(cmd, shortname=out,
  101. timeout_seconds=None))
  102. return jobs
  103. def main() -> None:
  104. templates = args.templates
  105. if not templates:
  106. for root, _, files in os.walk('templates'):
  107. for f in files:
  108. templates.append(os.path.join(root, f))
  109. build_spec = preprocess_build_files()
  110. with open(PREPROCESSED_BUILD, 'wb') as f:
  111. pickle.dump(build_spec, f)
  112. err_cnt, _ = jobset.run(generate_template_render_jobs(templates),
  113. maxjobs=args.jobs)
  114. if err_cnt != 0:
  115. print('ERROR: %s error(s) found while generating projects.' % err_cnt,
  116. file=sys.stderr)
  117. sys.exit(1)
  118. if test is not None:
  119. for s, g in test.items():
  120. if os.path.isfile(g):
  121. assert 0 == os.system('diff %s %s' % (s, g)), s
  122. os.unlink(g)
  123. else:
  124. assert 0 == os.system('diff -r %s %s' % (s, g)), s
  125. shutil.rmtree(g, ignore_errors=True)
  126. if __name__ == "__main__":
  127. main()