artifacts_handler.py 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199
  1. # SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
  2. # SPDX-License-Identifier: Apache-2.0
  3. import argparse
  4. import fnmatch
  5. import glob
  6. import os
  7. import typing as t
  8. import zipfile
  9. from enum import Enum
  10. from pathlib import Path
  11. from zipfile import ZipFile
  12. import urllib3
  13. from minio import Minio
  14. class ArtifactType(str, Enum):
  15. MAP_AND_ELF_FILES = 'map_and_elf_files'
  16. BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES = 'build_dir_without_map_and_elf_files'
  17. LOGS = 'logs'
  18. SIZE_REPORTS = 'size_reports'
  19. TYPE_PATTERNS_DICT = {
  20. ArtifactType.MAP_AND_ELF_FILES: [
  21. '**/build*/bootloader/*.map',
  22. '**/build*/bootloader/*.elf',
  23. '**/build*/*.map',
  24. '**/build*/*.elf',
  25. ],
  26. ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
  27. '**/build*/build_log.txt',
  28. '**/build*/*.bin',
  29. '**/build*/bootloader/*.bin',
  30. '**/build*/partition_table/*.bin',
  31. '**/build*/flasher_args.json',
  32. '**/build*/flash_project_args',
  33. '**/build*/config/sdkconfig.json',
  34. '**/build*/project_description.json',
  35. 'list_job_*.txt',
  36. ],
  37. ArtifactType.LOGS: [
  38. '**/build*/build_log.txt',
  39. ],
  40. ArtifactType.SIZE_REPORTS: [
  41. '**/build*/size.json',
  42. 'size_info.txt',
  43. ],
  44. }
  45. def getenv(env_var: str) -> str:
  46. try:
  47. return os.environ[env_var]
  48. except KeyError as e:
  49. raise Exception(f'Environment variable {env_var} not set') from e
  50. def _download_files(
  51. pipeline_id: int,
  52. *,
  53. artifact_type: t.Optional[ArtifactType] = None,
  54. job_name: t.Optional[str] = None,
  55. job_id: t.Optional[int] = None,
  56. ) -> None:
  57. if artifact_type:
  58. prefix = f'{pipeline_id}/{artifact_type.value}/'
  59. else:
  60. prefix = f'{pipeline_id}/'
  61. for obj in client.list_objects(getenv('IDF_S3_BUCKET'), prefix=prefix, recursive=True):
  62. obj_name = obj.object_name
  63. obj_p = Path(obj_name)
  64. # <pipeline_id>/<action_type>/<job_name>/<job_id>.zip
  65. if len(obj_p.parts) != 4:
  66. print(f'Invalid object name: {obj_name}')
  67. continue
  68. if job_name:
  69. # could be a pattern
  70. if not fnmatch.fnmatch(obj_p.parts[2], job_name):
  71. print(f'Job name {job_name} does not match {obj_p.parts[2]}')
  72. continue
  73. if job_id:
  74. if obj_p.parts[3] != f'{job_id}.zip':
  75. print(f'Job ID {job_id} does not match {obj_p.parts[3]}')
  76. continue
  77. client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, obj_name)
  78. print(f'Downloaded {obj_name}')
  79. if obj_name.endswith('.zip'):
  80. with ZipFile(obj_name, 'r') as zr:
  81. zr.extractall()
  82. print(f'Extracted {obj_name}')
  83. os.remove(obj_name)
  84. def _upload_files(
  85. pipeline_id: int,
  86. *,
  87. artifact_type: ArtifactType,
  88. job_name: str,
  89. job_id: str,
  90. ) -> None:
  91. has_file = False
  92. with ZipFile(
  93. f'{job_id}.zip',
  94. 'w',
  95. compression=zipfile.ZIP_DEFLATED,
  96. # 1 is the fastest compression level
  97. # the size differs not much between 1 and 9
  98. compresslevel=1,
  99. ) as zw:
  100. for pattern in TYPE_PATTERNS_DICT[artifact_type]:
  101. for file in glob.glob(pattern, recursive=True):
  102. zw.write(file)
  103. has_file = True
  104. try:
  105. if has_file:
  106. obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.split(" ")[0]}/{job_id}.zip'
  107. print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
  108. client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
  109. url = client.get_presigned_url('GET', getenv('IDF_S3_BUCKET'), obj_name)
  110. print(f'Please download the archive file which includes {artifact_type.value} from {url}')
  111. finally:
  112. os.remove(f'{job_id}.zip')
  113. if __name__ == '__main__':
  114. parser = argparse.ArgumentParser(
  115. description='Download or upload files from/to S3, the object name would be '
  116. '[PIPELINE_ID]/[ACTION_TYPE]/[JOB_NAME]/[JOB_ID].zip.'
  117. '\n'
  118. 'For example: 123456/binaries/build_pytest_examples_esp32/123456789.zip',
  119. formatter_class=argparse.ArgumentDefaultsHelpFormatter,
  120. )
  121. common_args = argparse.ArgumentParser(add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
  122. common_args.add_argument('--pipeline-id', type=int, help='Pipeline ID')
  123. common_args.add_argument(
  124. '--type', type=str, nargs='+', choices=[a.value for a in ArtifactType], help='Types of files to download'
  125. )
  126. action = parser.add_subparsers(dest='action', help='Download or Upload')
  127. download = action.add_parser('download', help='Download files from S3', parents=[common_args])
  128. upload = action.add_parser('upload', help='Upload files to S3', parents=[common_args])
  129. download.add_argument('--job-name', type=str, help='Job name pattern')
  130. download.add_argument('--job-id', type=int, help='Job ID')
  131. upload.add_argument('--job-name', type=str, help='Job name')
  132. upload.add_argument('--job-id', type=int, help='Job ID')
  133. args = parser.parse_args()
  134. client = Minio(
  135. getenv('IDF_S3_SERVER').replace('https://', ''),
  136. access_key=getenv('IDF_S3_ACCESS_KEY'),
  137. secret_key=getenv('IDF_S3_SECRET_KEY'),
  138. http_client=urllib3.PoolManager(
  139. timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
  140. retries=urllib3.Retry(
  141. total=5,
  142. backoff_factor=0.2,
  143. status_forcelist=[500, 502, 503, 504],
  144. ),
  145. ),
  146. )
  147. ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
  148. if args.action == 'download':
  149. method = _download_files
  150. ci_job_name = args.job_name # optional
  151. ci_job_id = args.job_id # optional
  152. else:
  153. method = _upload_files # type: ignore
  154. ci_job_name = args.job_name or getenv('CI_JOB_NAME') # required
  155. ci_job_id = args.job_id or getenv('CI_JOB_ID') # required
  156. if args.type:
  157. types = [ArtifactType(t) for t in args.type]
  158. else:
  159. types = list(ArtifactType)
  160. print(f'{"Pipeline ID":15}: {ci_pipeline_id}')
  161. if ci_job_name:
  162. print(f'{"Job name":15}: {ci_job_name}')
  163. if ci_job_id:
  164. print(f'{"Job ID":15}: {ci_job_id}')
  165. for _t in types:
  166. method(ci_pipeline_id, artifact_type=_t, job_name=ci_job_name, job_id=ci_job_id) # type: ignore