2019-11-26 22:22:14 -05:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import argparse
|
|
|
|
import tempfile
|
|
|
|
import tarfile
|
|
|
|
import zipfile
|
2020-06-17 04:03:49 -04:00
|
|
|
from functools import wraps
|
2019-11-26 22:22:14 -05:00
|
|
|
|
|
|
|
import gitlab
|
|
|
|
|
|
|
|
|
|
|
|
class Gitlab(object):
|
|
|
|
JOB_NAME_PATTERN = re.compile(r"(\w+)(\s+(\d+)/(\d+))?")
|
|
|
|
|
2020-06-17 04:03:49 -04:00
|
|
|
DOWNLOAD_ERROR_MAX_RETRIES = 3
|
|
|
|
|
2019-11-26 22:22:14 -05:00
|
|
|
def __init__(self, project_id=None):
|
|
|
|
config_data_from_env = os.getenv("PYTHON_GITLAB_CONFIG")
|
|
|
|
if config_data_from_env:
|
|
|
|
# prefer to load config from env variable
|
2019-12-09 20:38:31 -05:00
|
|
|
with tempfile.NamedTemporaryFile("w", delete=False) as temp_file:
|
2019-11-26 22:22:14 -05:00
|
|
|
temp_file.write(config_data_from_env)
|
|
|
|
config_files = [temp_file.name]
|
|
|
|
else:
|
|
|
|
# otherwise try to use config file at local filesystem
|
|
|
|
config_files = None
|
|
|
|
self.gitlab_inst = gitlab.Gitlab.from_config(config_files=config_files)
|
|
|
|
self.gitlab_inst.auth()
|
|
|
|
if project_id:
|
|
|
|
self.project = self.gitlab_inst.projects.get(project_id)
|
|
|
|
else:
|
|
|
|
self.project = None
|
|
|
|
|
|
|
|
def get_project_id(self, name, namespace=None):
|
|
|
|
"""
|
|
|
|
search project ID by name
|
|
|
|
|
|
|
|
:param name: project name
|
|
|
|
:param namespace: namespace to match when we have multiple project with same name
|
|
|
|
:return: project ID
|
|
|
|
"""
|
|
|
|
projects = self.gitlab_inst.projects.list(search=name)
|
|
|
|
for project in projects:
|
|
|
|
if namespace is None:
|
|
|
|
if len(projects) == 1:
|
|
|
|
project_id = project.id
|
|
|
|
break
|
|
|
|
if project.namespace["path"] == namespace:
|
|
|
|
project_id = project.id
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise ValueError("Can't find project")
|
|
|
|
return project_id
|
|
|
|
|
|
|
|
def download_artifacts(self, job_id, destination):
|
|
|
|
"""
|
|
|
|
download full job artifacts and extract to destination.
|
|
|
|
|
|
|
|
:param job_id: Gitlab CI job ID
|
|
|
|
:param destination: extract artifacts to path.
|
|
|
|
"""
|
|
|
|
job = self.project.jobs.get(job_id)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
|
|
job.artifacts(streamed=True, action=temp_file.write)
|
|
|
|
|
|
|
|
with zipfile.ZipFile(temp_file.name, "r") as archive_file:
|
|
|
|
archive_file.extractall(destination)
|
|
|
|
|
2020-06-17 04:03:49 -04:00
|
|
|
def retry_download(func):
|
|
|
|
"""
|
|
|
|
This wrapper will only catch IOError and retry the whole function.
|
|
|
|
|
|
|
|
So only use it with download functions, read() inside and atomic
|
|
|
|
functions
|
|
|
|
"""
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
retried = 0
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
res = func(self, *args, **kwargs)
|
|
|
|
except (IOError, EOFError) as e:
|
|
|
|
retried += 1
|
|
|
|
if retried > self.DOWNLOAD_ERROR_MAX_RETRIES:
|
|
|
|
raise e # get out of the loop
|
|
|
|
else:
|
|
|
|
print('Retried for the {} time'.format(retried))
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
return res
|
|
|
|
return wrapper
|
|
|
|
|
2019-11-26 22:22:14 -05:00
|
|
|
def download_artifact(self, job_id, artifact_path, destination=None):
|
|
|
|
"""
|
|
|
|
download specific path of job artifacts and extract to destination.
|
|
|
|
|
|
|
|
:param job_id: Gitlab CI job ID
|
|
|
|
:param artifact_path: list of path in artifacts (relative path to artifact root path)
|
|
|
|
:param destination: destination of artifact. Do not save to file if destination is None
|
|
|
|
:return: A list of artifact file raw data.
|
|
|
|
"""
|
|
|
|
job = self.project.jobs.get(job_id)
|
|
|
|
|
|
|
|
raw_data_list = []
|
|
|
|
|
|
|
|
for a_path in artifact_path:
|
|
|
|
try:
|
|
|
|
data = job.artifact(a_path)
|
|
|
|
except gitlab.GitlabGetError as e:
|
2020-07-21 04:59:31 -04:00
|
|
|
print("Failed to download '{}' from job {}".format(a_path, job_id))
|
2019-11-26 22:22:14 -05:00
|
|
|
raise e
|
|
|
|
raw_data_list.append(data)
|
|
|
|
if destination:
|
|
|
|
file_path = os.path.join(destination, a_path)
|
|
|
|
try:
|
|
|
|
os.makedirs(os.path.dirname(file_path))
|
|
|
|
except OSError:
|
|
|
|
# already exists
|
|
|
|
pass
|
|
|
|
with open(file_path, "wb") as f:
|
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
return raw_data_list
|
|
|
|
|
2020-01-21 21:04:54 -05:00
|
|
|
def find_job_id(self, job_name, pipeline_id=None, job_status="success"):
|
2019-11-26 22:22:14 -05:00
|
|
|
"""
|
|
|
|
Get Job ID from job name of specific pipeline
|
|
|
|
|
|
|
|
:param job_name: job name
|
|
|
|
:param pipeline_id: If None, will get pipeline id from CI pre-defined variable.
|
2020-01-21 21:04:54 -05:00
|
|
|
:param job_status: status of job. One pipeline could have multiple jobs with same name after retry.
|
|
|
|
job_status is used to filter these jobs.
|
2019-11-26 22:22:14 -05:00
|
|
|
:return: a list of job IDs (parallel job will generate multiple jobs)
|
|
|
|
"""
|
|
|
|
job_id_list = []
|
|
|
|
if pipeline_id is None:
|
|
|
|
pipeline_id = os.getenv("CI_PIPELINE_ID")
|
|
|
|
pipeline = self.project.pipelines.get(pipeline_id)
|
|
|
|
jobs = pipeline.jobs.list(all=True)
|
|
|
|
for job in jobs:
|
|
|
|
match = self.JOB_NAME_PATTERN.match(job.name)
|
|
|
|
if match:
|
2020-01-21 21:04:54 -05:00
|
|
|
if match.group(1) == job_name and job.status == job_status:
|
2019-11-26 22:22:14 -05:00
|
|
|
job_id_list.append({"id": job.id, "parallel_num": match.group(3)})
|
|
|
|
return job_id_list
|
|
|
|
|
2020-06-17 04:03:49 -04:00
|
|
|
@retry_download
|
2019-11-26 22:22:14 -05:00
|
|
|
def download_archive(self, ref, destination, project_id=None):
|
|
|
|
"""
|
|
|
|
Download archive of certain commit of a repository and extract to destination path
|
|
|
|
|
|
|
|
:param ref: commit or branch name
|
|
|
|
:param destination: destination path of extracted archive file
|
|
|
|
:param project_id: download project of current instance if project_id is None
|
|
|
|
:return: root path name of archive file
|
|
|
|
"""
|
|
|
|
if project_id is None:
|
|
|
|
project = self.project
|
|
|
|
else:
|
|
|
|
project = self.gitlab_inst.projects.get(project_id)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
|
|
try:
|
|
|
|
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
|
|
|
|
except gitlab.GitlabGetError as e:
|
|
|
|
print("Failed to archive from project {}".format(project_id))
|
|
|
|
raise e
|
|
|
|
|
|
|
|
print("archive size: {:.03f}MB".format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
|
|
|
|
|
|
|
|
with tarfile.open(temp_file.name, "r") as archive_file:
|
|
|
|
root_name = archive_file.getnames()[0]
|
|
|
|
archive_file.extractall(destination)
|
|
|
|
|
|
|
|
return os.path.join(os.path.realpath(destination), root_name)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("action")
|
|
|
|
parser.add_argument("project_id", type=int)
|
|
|
|
parser.add_argument("--pipeline_id", "-i", type=int, default=None)
|
|
|
|
parser.add_argument("--ref", "-r", default="master")
|
|
|
|
parser.add_argument("--job_id", "-j", type=int, default=None)
|
|
|
|
parser.add_argument("--job_name", "-n", default=None)
|
|
|
|
parser.add_argument("--project_name", "-m", default=None)
|
|
|
|
parser.add_argument("--destination", "-d", default=None)
|
|
|
|
parser.add_argument("--artifact_path", "-a", nargs="*", default=None)
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
gitlab_inst = Gitlab(args.project_id)
|
|
|
|
if args.action == "download_artifacts":
|
|
|
|
gitlab_inst.download_artifacts(args.job_id, args.destination)
|
|
|
|
if args.action == "download_artifact":
|
|
|
|
gitlab_inst.download_artifact(args.job_id, args.artifact_path, args.destination)
|
|
|
|
elif args.action == "find_job_id":
|
|
|
|
job_ids = gitlab_inst.find_job_id(args.job_name, args.pipeline_id)
|
|
|
|
print(";".join([",".join([str(j["id"]), j["parallel_num"]]) for j in job_ids]))
|
|
|
|
elif args.action == "download_archive":
|
|
|
|
gitlab_inst.download_archive(args.ref, args.destination)
|
|
|
|
elif args.action == "get_project_id":
|
|
|
|
ret = gitlab_inst.get_project_id(args.project_name)
|
|
|
|
print("project id: {}".format(ret))
|