artifacts_handler.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203
  1. # SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
  2. # SPDX-License-Identifier: Apache-2.0
  3. import argparse
  4. import fnmatch
  5. import glob
  6. import os
  7. import typing as t
  8. import zipfile
  9. from enum import Enum
  10. from pathlib import Path
  11. from zipfile import ZipFile
  12. import urllib3
  13. from minio import Minio
  14. class ArtifactType(str, Enum):
  15. MAP_AND_ELF_FILES = 'map_and_elf_files'
  16. BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES = 'build_dir_without_map_and_elf_files'
  17. LOGS = 'logs'
  18. SIZE_REPORTS = 'size_reports'
  19. JUNIT_REPORTS = 'junit_reports'
  20. TYPE_PATTERNS_DICT = {
  21. ArtifactType.MAP_AND_ELF_FILES: [
  22. '**/build*/bootloader/*.map',
  23. '**/build*/bootloader/*.elf',
  24. '**/build*/*.map',
  25. '**/build*/*.elf',
  26. ],
  27. ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
  28. '**/build*/build_log.txt',
  29. '**/build*/*.bin',
  30. '**/build*/bootloader/*.bin',
  31. '**/build*/partition_table/*.bin',
  32. '**/build*/flasher_args.json',
  33. '**/build*/flash_project_args',
  34. '**/build*/config/sdkconfig.json',
  35. '**/build*/project_description.json',
  36. 'list_job_*.txt',
  37. ],
  38. ArtifactType.LOGS: [
  39. '**/build*/build_log.txt',
  40. ],
  41. ArtifactType.SIZE_REPORTS: [
  42. '**/build*/size.json',
  43. 'size_info.txt',
  44. ],
  45. ArtifactType.JUNIT_REPORTS: [
  46. 'XUNIT_RESULT.xml',
  47. ],
  48. }
  49. def getenv(env_var: str) -> str:
  50. try:
  51. return os.environ[env_var]
  52. except KeyError as e:
  53. raise Exception(f'Environment variable {env_var} not set') from e
  54. def _download_files(
  55. pipeline_id: int,
  56. *,
  57. artifact_type: t.Optional[ArtifactType] = None,
  58. job_name: t.Optional[str] = None,
  59. job_id: t.Optional[int] = None,
  60. ) -> None:
  61. if artifact_type:
  62. prefix = f'{pipeline_id}/{artifact_type.value}/'
  63. else:
  64. prefix = f'{pipeline_id}/'
  65. for obj in client.list_objects(getenv('IDF_S3_BUCKET'), prefix=prefix, recursive=True):
  66. obj_name = obj.object_name
  67. obj_p = Path(obj_name)
  68. # <pipeline_id>/<action_type>/<job_name>/<job_id>.zip
  69. if len(obj_p.parts) != 4:
  70. print(f'Invalid object name: {obj_name}')
  71. continue
  72. if job_name:
  73. # could be a pattern
  74. if not fnmatch.fnmatch(obj_p.parts[2], job_name):
  75. print(f'Job name {job_name} does not match {obj_p.parts[2]}')
  76. continue
  77. if job_id:
  78. if obj_p.parts[3] != f'{job_id}.zip':
  79. print(f'Job ID {job_id} does not match {obj_p.parts[3]}')
  80. continue
  81. client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, obj_name)
  82. print(f'Downloaded {obj_name}')
  83. if obj_name.endswith('.zip'):
  84. with ZipFile(obj_name, 'r') as zr:
  85. zr.extractall()
  86. print(f'Extracted {obj_name}')
  87. os.remove(obj_name)
  88. def _upload_files(
  89. pipeline_id: int,
  90. *,
  91. artifact_type: ArtifactType,
  92. job_name: str,
  93. job_id: str,
  94. ) -> None:
  95. has_file = False
  96. with ZipFile(
  97. f'{job_id}.zip',
  98. 'w',
  99. compression=zipfile.ZIP_DEFLATED,
  100. # 1 is the fastest compression level
  101. # the size differs not much between 1 and 9
  102. compresslevel=1,
  103. ) as zw:
  104. for pattern in TYPE_PATTERNS_DICT[artifact_type]:
  105. for file in glob.glob(pattern, recursive=True):
  106. zw.write(file)
  107. has_file = True
  108. try:
  109. if has_file:
  110. obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.split(" ")[0]}/{job_id}.zip'
  111. print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
  112. client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
  113. url = client.get_presigned_url('GET', getenv('IDF_S3_BUCKET'), obj_name)
  114. print(f'Please download the archive file which includes {artifact_type.value} from {url}')
  115. finally:
  116. os.remove(f'{job_id}.zip')
  117. if __name__ == '__main__':
  118. parser = argparse.ArgumentParser(
  119. description='Download or upload files from/to S3, the object name would be '
  120. '[PIPELINE_ID]/[ACTION_TYPE]/[JOB_NAME]/[JOB_ID].zip.'
  121. '\n'
  122. 'For example: 123456/binaries/build_pytest_examples_esp32/123456789.zip',
  123. formatter_class=argparse.ArgumentDefaultsHelpFormatter,
  124. )
  125. common_args = argparse.ArgumentParser(add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
  126. common_args.add_argument('--pipeline-id', type=int, help='Pipeline ID')
  127. common_args.add_argument(
  128. '--type', type=str, nargs='+', choices=[a.value for a in ArtifactType], help='Types of files to download'
  129. )
  130. action = parser.add_subparsers(dest='action', help='Download or Upload')
  131. download = action.add_parser('download', help='Download files from S3', parents=[common_args])
  132. upload = action.add_parser('upload', help='Upload files to S3', parents=[common_args])
  133. download.add_argument('--job-name', type=str, help='Job name pattern')
  134. download.add_argument('--job-id', type=int, help='Job ID')
  135. upload.add_argument('--job-name', type=str, help='Job name')
  136. upload.add_argument('--job-id', type=int, help='Job ID')
  137. args = parser.parse_args()
  138. client = Minio(
  139. getenv('IDF_S3_SERVER').replace('https://', ''),
  140. access_key=getenv('IDF_S3_ACCESS_KEY'),
  141. secret_key=getenv('IDF_S3_SECRET_KEY'),
  142. http_client=urllib3.PoolManager(
  143. timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
  144. retries=urllib3.Retry(
  145. total=5,
  146. backoff_factor=0.2,
  147. status_forcelist=[500, 502, 503, 504],
  148. ),
  149. ),
  150. )
  151. ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
  152. if args.action == 'download':
  153. method = _download_files
  154. ci_job_name = args.job_name # optional
  155. ci_job_id = args.job_id # optional
  156. else:
  157. method = _upload_files # type: ignore
  158. ci_job_name = args.job_name or getenv('CI_JOB_NAME') # required
  159. ci_job_id = args.job_id or getenv('CI_JOB_ID') # required
  160. if args.type:
  161. types = [ArtifactType(t) for t in args.type]
  162. else:
  163. types = list(ArtifactType)
  164. print(f'{"Pipeline ID":15}: {ci_pipeline_id}')
  165. if ci_job_name:
  166. print(f'{"Job name":15}: {ci_job_name}')
  167. if ci_job_id:
  168. print(f'{"Job ID":15}: {ci_job_id}')
  169. for _t in types:
  170. method(ci_pipeline_id, artifact_type=_t, job_name=ci_job_name, job_id=ci_job_id) # type: ignore