result_uploader.py 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. from __future__ import print_function
  2. from __future__ import absolute_import
  3. import argparse
  4. import os
  5. import re
  6. import copy
  7. import uuid
  8. import calendar
  9. import time
  10. import datetime
  11. from util import big_query_utils
  12. from util import result_parser
  13. _PROJECT_ID = 'grpc-testing'
  14. _DATASET = 'protobuf_benchmark_result'
  15. _TABLE = 'opensource_result_v2'
  16. _NOW = "%d%02d%02d" % (datetime.datetime.now().year,
  17. datetime.datetime.now().month,
  18. datetime.datetime.now().day)
  19. _INITIAL_TIME = calendar.timegm(time.gmtime())
  20. def get_metadata():
  21. build_number = os.getenv('BUILD_NUMBER')
  22. build_url = os.getenv('BUILD_URL')
  23. job_name = os.getenv('JOB_NAME')
  24. git_commit = os.getenv('GIT_COMMIT')
  25. # actual commit is the actual head of PR that is getting tested
  26. git_actual_commit = os.getenv('ghprbActualCommit')
  27. utc_timestamp = str(calendar.timegm(time.gmtime()))
  28. metadata = {'created': utc_timestamp}
  29. if build_number:
  30. metadata['buildNumber'] = build_number
  31. if build_url:
  32. metadata['buildUrl'] = build_url
  33. if job_name:
  34. metadata['jobName'] = job_name
  35. if git_commit:
  36. metadata['gitCommit'] = git_commit
  37. if git_actual_commit:
  38. metadata['gitActualCommit'] = git_actual_commit
  39. return metadata
  40. def upload_result(result_list, metadata):
  41. for result in result_list:
  42. new_result = {}
  43. new_result["metric"] = "throughput"
  44. new_result["value"] = result["throughput"]
  45. new_result["unit"] = "MB/s"
  46. new_result["test"] = "protobuf_benchmark"
  47. new_result["product_name"] = "protobuf"
  48. labels_string = ""
  49. for key in result:
  50. labels_string += ",|%s:%s|" % (key, result[key])
  51. new_result["labels"] = labels_string[1:]
  52. new_result["timestamp"] = _INITIAL_TIME
  53. print(labels_string)
  54. bq = big_query_utils.create_big_query()
  55. row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
  56. if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
  57. _TABLE + "$" + _NOW,
  58. [row]):
  59. print('Error when uploading result', new_result)
  60. if __name__ == "__main__":
  61. parser = argparse.ArgumentParser()
  62. parser.add_argument("-cpp", "--cpp_input_file",
  63. help="The CPP benchmark result file's name",
  64. default="")
  65. parser.add_argument("-java", "--java_input_file",
  66. help="The Java benchmark result file's name",
  67. default="")
  68. parser.add_argument("-python", "--python_input_file",
  69. help="The Python benchmark result file's name",
  70. default="")
  71. parser.add_argument("-go", "--go_input_file",
  72. help="The golang benchmark result file's name",
  73. default="")
  74. parser.add_argument("-node", "--node_input_file",
  75. help="The node.js benchmark result file's name",
  76. default="")
  77. parser.add_argument("-php", "--php_input_file",
  78. help="The pure php benchmark result file's name",
  79. default="")
  80. parser.add_argument("-php_c", "--php_c_input_file",
  81. help="The php with c ext benchmark result file's name",
  82. default="")
  83. args = parser.parse_args()
  84. metadata = get_metadata()
  85. print("uploading results...")
  86. upload_result(result_parser.get_result_from_file(
  87. cpp_file=args.cpp_input_file,
  88. java_file=args.java_input_file,
  89. python_file=args.python_input_file,
  90. go_file=args.go_input_file,
  91. node_file=args.node_input_file,
  92. php_file=args.php_input_file,
  93. php_c_file=args.php_c_input_file,
  94. ), metadata)