gitlab_api.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273
  1. # SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
  2. # SPDX-License-Identifier: Apache-2.0
  3. import argparse
  4. import os
  5. import re
  6. import tarfile
  7. import tempfile
  8. import time
  9. import zipfile
  10. from functools import wraps
  11. from typing import Any, Callable, Dict, List, Optional
  12. import gitlab
  13. TR = Callable[..., Any]
  14. def retry(func: TR) -> TR:
  15. """
  16. This wrapper will only catch several exception types associated with
  17. "network issues" and retry the whole function.
  18. """
  19. @wraps(func)
  20. def wrapper(self: 'Gitlab', *args: Any, **kwargs: Any) -> Any:
  21. retried = 0
  22. while True:
  23. try:
  24. res = func(self, *args, **kwargs)
  25. except (IOError, EOFError, gitlab.exceptions.GitlabError) as e:
  26. if isinstance(e, gitlab.exceptions.GitlabError):
  27. if e.response_code == 500:
  28. # retry on this error
  29. pass
  30. elif e.response_code == 404 and os.environ.get('LOCAL_GITLAB_HTTPS_HOST', None):
  31. # remove the environment variable "LOCAL_GITLAB_HTTPS_HOST" and retry
  32. os.environ.pop('LOCAL_GITLAB_HTTPS_HOST', None)
  33. else:
  34. # other GitlabErrors aren't retried
  35. raise e
  36. retried += 1
  37. if retried > self.DOWNLOAD_ERROR_MAX_RETRIES:
  38. raise e # get out of the loop
  39. else:
  40. print('Network failure in {}, retrying ({})'.format(getattr(func, '__name__', '(unknown callable)'), retried))
  41. time.sleep(2 ** retried) # wait a bit more after each retry
  42. continue
  43. else:
  44. break
  45. return res
  46. return wrapper
  47. class Gitlab(object):
  48. JOB_NAME_PATTERN = re.compile(r'(\w+)(\s+(\d+)/(\d+))?')
  49. DOWNLOAD_ERROR_MAX_RETRIES = 3
  50. def __init__(self, project_id: Optional[int] = None):
  51. config_data_from_env = os.getenv('PYTHON_GITLAB_CONFIG')
  52. if config_data_from_env:
  53. # prefer to load config from env variable
  54. with tempfile.NamedTemporaryFile('w', delete=False) as temp_file:
  55. temp_file.write(config_data_from_env)
  56. config_files = [temp_file.name] # type: Optional[List[str]]
  57. else:
  58. # otherwise try to use config file at local filesystem
  59. config_files = None
  60. self._init_gitlab_inst(project_id, config_files)
  61. @retry
  62. def _init_gitlab_inst(self, project_id: Optional[int], config_files: Optional[List[str]]) -> None:
  63. gitlab_id = os.getenv('LOCAL_GITLAB_HTTPS_HOST') # if None, will use the default gitlab server
  64. self.gitlab_inst = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config_files)
  65. self.gitlab_inst.auth()
  66. if project_id:
  67. self.project = self.gitlab_inst.projects.get(project_id)
  68. else:
  69. self.project = None
  70. @retry
  71. def get_project_id(self, name: str, namespace: Optional[str] = None) -> int:
  72. """
  73. search project ID by name
  74. :param name: project name
  75. :param namespace: namespace to match when we have multiple project with same name
  76. :return: project ID
  77. """
  78. projects = self.gitlab_inst.projects.list(search=name)
  79. res = []
  80. for project in projects:
  81. if namespace is None:
  82. if len(projects) == 1:
  83. res.append(project.id)
  84. break
  85. if project.namespace['path'] == namespace:
  86. if project.name == name:
  87. res.insert(0, project.id)
  88. else:
  89. res.append(project.id)
  90. if not res:
  91. raise ValueError("Can't find project")
  92. return int(res[0])
  93. @retry
  94. def download_artifacts(self, job_id: int, destination: str) -> None:
  95. """
  96. download full job artifacts and extract to destination.
  97. :param job_id: Gitlab CI job ID
  98. :param destination: extract artifacts to path.
  99. """
  100. job = self.project.jobs.get(job_id)
  101. with tempfile.NamedTemporaryFile(delete=False) as temp_file:
  102. job.artifacts(streamed=True, action=temp_file.write)
  103. with zipfile.ZipFile(temp_file.name, 'r') as archive_file:
  104. archive_file.extractall(destination)
  105. @retry
  106. def download_artifact(self, job_id: int, artifact_path: str, destination: Optional[str] = None) -> List[bytes]:
  107. """
  108. download specific path of job artifacts and extract to destination.
  109. :param job_id: Gitlab CI job ID
  110. :param artifact_path: list of path in artifacts (relative path to artifact root path)
  111. :param destination: destination of artifact. Do not save to file if destination is None
  112. :return: A list of artifact file raw data.
  113. """
  114. job = self.project.jobs.get(job_id)
  115. raw_data_list = []
  116. for a_path in artifact_path:
  117. try:
  118. data = job.artifact(a_path) # type: bytes
  119. except gitlab.GitlabGetError as e:
  120. print("Failed to download '{}' from job {}".format(a_path, job_id))
  121. raise e
  122. raw_data_list.append(data)
  123. if destination:
  124. file_path = os.path.join(destination, a_path)
  125. try:
  126. os.makedirs(os.path.dirname(file_path))
  127. except OSError:
  128. # already exists
  129. pass
  130. with open(file_path, 'wb') as f:
  131. f.write(data)
  132. return raw_data_list
  133. @retry
  134. def find_job_id(self, job_name: str, pipeline_id: Optional[str] = None, job_status: str = 'success') -> List[Dict]:
  135. """
  136. Get Job ID from job name of specific pipeline
  137. :param job_name: job name
  138. :param pipeline_id: If None, will get pipeline id from CI pre-defined variable.
  139. :param job_status: status of job. One pipeline could have multiple jobs with same name after retry.
  140. job_status is used to filter these jobs.
  141. :return: a list of job IDs (parallel job will generate multiple jobs)
  142. """
  143. job_id_list = []
  144. if pipeline_id is None:
  145. pipeline_id = os.getenv('CI_PIPELINE_ID')
  146. pipeline = self.project.pipelines.get(pipeline_id)
  147. jobs = pipeline.jobs.list(all=True)
  148. for job in jobs:
  149. match = self.JOB_NAME_PATTERN.match(job.name)
  150. if match:
  151. if match.group(1) == job_name and job.status == job_status:
  152. job_id_list.append({'id': job.id, 'parallel_num': match.group(3)})
  153. return job_id_list
  154. @retry
  155. def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None, cache_dir: Optional[str] = None) -> str:
  156. """
  157. Download archive of certain commit of a repository and extract to destination path
  158. :param ref: commit or branch name
  159. :param destination: destination path of extracted archive file
  160. :param project_id: download project of current instance if project_id is None
  161. :return: root path name of archive file
  162. """
  163. if project_id is None:
  164. project = self.project
  165. else:
  166. project = self.gitlab_inst.projects.get(project_id)
  167. if cache_dir:
  168. local_archive_file = os.path.join(cache_dir, f'{ref}.tar.gz')
  169. os.makedirs(os.path.dirname(local_archive_file), exist_ok=True)
  170. if os.path.isfile(local_archive_file):
  171. print('Use cached archive file. Skipping download...')
  172. else:
  173. with open(local_archive_file, 'wb') as fw:
  174. try:
  175. project.repository_archive(sha=ref, streamed=True, action=fw.write)
  176. except gitlab.GitlabGetError as e:
  177. print('Failed to archive from project {}'.format(project_id))
  178. raise e
  179. print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(local_archive_file)) / (1024 * 1024)))
  180. return self.decompress_archive(local_archive_file, destination)
  181. # no cache
  182. with tempfile.NamedTemporaryFile(delete=False) as temp_file:
  183. try:
  184. project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
  185. except gitlab.GitlabGetError as e:
  186. print('Failed to archive from project {}'.format(project_id))
  187. raise e
  188. print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
  189. return self.decompress_archive(temp_file.name, destination)
  190. @staticmethod
  191. def decompress_archive(path: str, destination: str) -> str:
  192. with tarfile.open(path, 'r') as archive_file:
  193. root_name = archive_file.getnames()[0]
  194. archive_file.extractall(destination)
  195. return os.path.join(os.path.realpath(destination), root_name)
  196. def get_job_tags(self, job_id: int) -> str:
  197. """
  198. Get tags of a job
  199. :param job_id: job id
  200. :return: comma-separated tags of the job
  201. """
  202. job = self.project.jobs.get(job_id)
  203. return ','.join(job.tag_list)
  204. def main() -> None:
  205. parser = argparse.ArgumentParser()
  206. parser.add_argument('action')
  207. parser.add_argument('project_id', type=int)
  208. parser.add_argument('--pipeline_id', '-i', type=int, default=None)
  209. parser.add_argument('--ref', '-r', default='master')
  210. parser.add_argument('--job_id', '-j', type=int, default=None)
  211. parser.add_argument('--job_name', '-n', default=None)
  212. parser.add_argument('--project_name', '-m', default=None)
  213. parser.add_argument('--destination', '-d', default=None)
  214. parser.add_argument('--artifact_path', '-a', nargs='*', default=None)
  215. args = parser.parse_args()
  216. gitlab_inst = Gitlab(args.project_id)
  217. if args.action == 'download_artifacts':
  218. gitlab_inst.download_artifacts(args.job_id, args.destination)
  219. if args.action == 'download_artifact':
  220. gitlab_inst.download_artifact(args.job_id, args.artifact_path, args.destination)
  221. elif args.action == 'find_job_id':
  222. job_ids = gitlab_inst.find_job_id(args.job_name, args.pipeline_id)
  223. print(';'.join([','.join([str(j['id']), j['parallel_num']]) for j in job_ids]))
  224. elif args.action == 'download_archive':
  225. gitlab_inst.download_archive(args.ref, args.destination)
  226. elif args.action == 'get_project_id':
  227. ret = gitlab_inst.get_project_id(args.project_name)
  228. print('project id: {}'.format(ret))
  229. elif args.action == 'get_job_tags':
  230. ret = gitlab_inst.get_job_tags(args.job_id)
  231. print(ret)
  232. if __name__ == '__main__':
  233. main()