initial commit - basic api call and filtering of jobs working

This commit is contained in:
dechert 2023-06-14 12:16:25 +02:00
parent dd2fb629f7
commit 9a2f1f7cb8
3 changed files with 82 additions and 1 deletions

View File

@ -1,2 +1,3 @@
# gitlab-api-ci-finder
# gitlab-api-ci-finder (glcifinder)

19
config_parser.py Normal file
View File

@ -0,0 +1,19 @@
import yaml
import os
DEFAULT_CONFIG_FILE_NAME = "config.yml"
# if os.environ['ENV'] == 'prod':
# config_file_to_load = "config_prod.yml"
# else:
config_file_to_load = DEFAULT_CONFIG_FILE_NAME
BASE_FODLER = os.path.dirname(os.path.abspath(__file__))
path_to_config = os.path.join(BASE_FODLER, config_file_to_load)
print("opening config file " + path_to_config)
with open(path_to_config, "r") as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
token = cfg["token"]
prod_mode = cfg["prod"]

61
gitlab-api.py Normal file
View File

@ -0,0 +1,61 @@
# TODOs:
# upload on github
# add CLI interface
# add config file
import json
import requests
# curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/namespaces?per_page=50"
# https://docs.gitlab.com/ee/api/rest/index.html#pagination
# curl --globoff --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs?scope[]=pending&scope[]=running"
import config_parser
# print(response_array)
#
def write_to_file(response_array):
f = open("gitlab-jobs1.txt", "a")
f.write(str(response_array))
f.close()
def find_acceptance_jobs_that_were_run(list_of_jobs):
name_filter = filter(lambda x: x['name'] == 'deploy-acceptance' and x['status'] == 'success', list_of_jobs)
filtered_list_of_jobs = list(name_filter)
print(filtered_list_of_jobs)
return filtered_list_of_jobs
if __name__ == '__main__':
NUMBER_OF_ITEMS_PER_PAGE = 100
url_template = 'https://gitlab.atb-bremen.de/api/v4/projects/244/jobs?per_page={}&page={}'
# get the last 100x50 jobs = 5000
iterations = list(range(0, 1))
response_array = []
headers = {
'PRIVATE-TOKEN': config_parser.token
}
for i in iterations:
response = requests.request("GET", url_template.format(NUMBER_OF_ITEMS_PER_PAGE, i), headers=headers)
json_response = response.text
# print(response.text)
response_array.append(json.loads(json_response))
import itertools
flat_list_of_jobs = list(itertools.chain(*response_array))
find_acceptance_jobs_that_were_run(flat_list_of_jobs)
# TODO filter output for relevant fields