get_all_test_results.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. #!/usr/bin/env python
  2. #
  3. # SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
  4. # SPDX-License-Identifier: Apache-2.0
  5. '''
  6. Download artifacts from all test jobs
  7. CI_PROJECT_DIR
  8. └── TEST_RESULTS
  9. ├── <job_id>
  10. │ └── XUNIT_RESULT.xml
  11. ├── <job_id>
  12. │ └── XUNIT_RESULT.xml
  13. ├── <job_id>
  14. │ └── TEST_LOGS
  15. '''
  16. import argparse
  17. import json
  18. import os
  19. import shutil
  20. import gitlab_api
  21. CI_PROJECT_ID = int(os.getenv('CI_PROJECT_ID', ''))
  22. CI_PIPELINE_ID = int(os.getenv('CI_PIPELINE_ID', ''))
  23. IDF_PATH = os.getenv('IDF_PATH', '.')
  24. TEST_RESULT_PATH = os.path.join(IDF_PATH, 'TEST_RESULTS')
  25. def download_all_test_results(result_path: str, include_retried: bool = False) -> None:
  26. if os.path.exists(result_path):
  27. shutil.rmtree(result_path)
  28. os.makedirs(result_path, exist_ok=True)
  29. gitlab_inst = gitlab_api.Gitlab(CI_PROJECT_ID)
  30. pipelines = gitlab_inst.project.pipelines.get(CI_PIPELINE_ID)
  31. _include_retried = 'true' if include_retried else 'false'
  32. jobs = pipelines.jobs.list(all=True, per_page=100, include_retried=_include_retried)
  33. job_info_list = []
  34. for job in jobs:
  35. if job.stage in ['target_test', 'host_test']:
  36. log_path = ''
  37. if job.status not in ['success', 'failed']:
  38. print('Job {}({}) is not finished'.format(job.id, job.name))
  39. elif not hasattr(job, 'artifacts_file'):
  40. print('Job {}({}) has no artifacts.'.format(job.id, job.name))
  41. else:
  42. log_path = os.path.join(result_path, 'job_{}'.format(job.id))
  43. print('Downloading artifacts from: {}'.format(job.name))
  44. os.makedirs(log_path, exist_ok=True)
  45. gitlab_inst.download_artifacts(job.id, log_path)
  46. job_info = {
  47. 'id': job.id,
  48. 'name': job.name,
  49. 'tag_list': job.tag_list,
  50. 'status': job.status,
  51. 'stage': job.stage,
  52. 'web_url': job.web_url,
  53. 'commit_url': job.commit['web_url'],
  54. 'log_path': log_path,
  55. }
  56. job_info_list.append(job_info)
  57. with open(os.path.join(result_path, 'index.json'), 'w') as f:
  58. f.write(json.dumps({'jobs': job_info_list}, indent=1, sort_keys=True))
  59. def main() -> None:
  60. parser = argparse.ArgumentParser()
  61. parser.add_argument('--path', '-p', default=TEST_RESULT_PATH, help='Path to save result files.')
  62. parser.add_argument('--include_retried', action='store_true', help='Including retried jobs.')
  63. args = parser.parse_args()
  64. download_all_test_results(args.path, args.include_retried)
  65. if __name__ == '__main__':
  66. main()