-
Notifications
You must be signed in to change notification settings - Fork 0
/
auto-pipe-launch.py
181 lines (165 loc) · 6.27 KB
/
auto-pipe-launch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
import pandas as pd
import http.client
import json
import ssl
import time
import collections
import threading
#################GLOBAL SETTINGS#################
file_name = "./Repos.xlsx"
# writer = pd.ExcelWriter("./Repos.xlsx", engine="xlsxwriter")
# worksheet = writer.sheets['suraj1']
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
queue = collections.deque(maxlen=3)
lock =threading.Lock()
branch_name = "api/sectest_090624"
semaphore = threading.BoundedSemaphore(value=1)
##################################################
##Метод возвращает json-список информации о репозиториях из xlsx
def getDataFromFile(file):
raw_data = pd.read_excel(file)
data = raw_data.to_dict("records")
return data
# def writeDataToFile(file):
# raw_data = pd.read_excel(file)
# data = raw_data.to_dict("records")
# for i in data:
# if pd.isnull(i['status']):
# i['status'] = "pending"
# else:
# print(f"it is not null it is {i['status']}")
# output = pd.DataFrame.from_dict(data)
# print(output)
# output.to_excel(file_name)
# writeDataToFile(file_name)
# conn.request("GET", "/api/v4/projects/1736/pipelines?ref=check/proRules&status=success", headers=headers)
def launchRepoPipe(id, start_branch):
global queue
global repositories
conn = http.client.HTTPSConnection("gitlab.host", context=ssl_context, timeout=10 )
headers = {
'PRIVATE-TOKEN': '9zFDNtcWsPz6xWCnWeQH',
'Content-Type': 'application/json'
}
commit_data = {
"branch": str(branch_name),
"start_branch": str(start_branch),
"commit_message": "some commit message",
"actions": [
{
"action": "create",
"file_path": ".gitlab-ci.yml",
"content":"""stages:
- sast
- deps-src
include:
- project: "awesome-security/pipelines/sast-pipes"
file: ".gitlab-ci.yml"
- project: "awesome-security/pipelines/deps-pipes"
file: ".gitlab-ci.yml" """
}
]
}
commit_data_400_error = {
"branch": str(branch_name),
"start_branch": str(start_branch),
"commit_message": "some commit message",
"actions": [
{
"action": "update",
"file_path": ".gitlab-ci.yml",
"content":"""stages:
- sast
- deps-src
include:
- project: "awesome-security/pipelines/sast-pipes"
file: ".gitlab-ci.yml"
- project: "awesome-security/pipelines/deps-pipes"
file: ".gitlab-ci.yml" """
}
]
}
commit_data_400_error_branch = {
"branch": str(branch_name),
"commit_message": "some commit message",
"actions": [
{
"action": "update",
"file_path": ".gitlab-ci.yml",
"content":"""stages:
- sast
- deps-src
include:
- project: "awesome-security/pipelines/sast-pipes"
file: ".gitlab-ci.yml"
- project: "awesome-security/pipelines/deps-pipes"
file: ".gitlab-ci.yml" """
}
]
}
json_data = json.dumps(commit_data)
print(f"pushing config file to project w/ id: {id}... ")
conn.request("POST", f"/api/v4/projects/{id}/repository/commits", body=json_data, headers=headers)
response = conn.getresponse()
conn.close()
if response.status == 400:
print("Such file or branch already exeists! Updating the exeisting file on the branch...")
json_data = json.dumps(commit_data_400_error)
conn.request("POST", f"/api/v4/projects/{id}/repository/commits", body=json_data, headers=headers)
response = conn.getresponse()
print(response.status)
conn.close()
if(response.status == 200 or response.status == 201):
print("File pushed successfully")
elif response.status == 400:
print("Such branch already exists")
json_data = json.dumps(commit_data_400_error_branch)
conn.request("POST", f"/api/v4/projects/{id}/repository/commits", body=json_data, headers=headers)
response = conn.getresponse()
else:
print(f"An error occured while pushing file to repository #{id} : {response}" )
# response = conn.getresponse().read().decode()
json_response = json.loads(response.read().decode())
else: json_response = json.loads(response.read().decode())
commit_id = json_response.get('id')
while True:
time.sleep(60)
print(f"checking status for pipeline of the project w/ id: {id}")
conn.request("GET", f"/api/v4/projects/{id}/repository/commits/{commit_id}", headers=headers)
response = conn.getresponse().read().decode()
print(response)
try:
pipeline_status = json.loads(response).get('last_pipeline').get('status')
if pipeline_status == "failed" or pipeline_status == "canceled" or pipeline_status == "success":
print(f"pipeline of project with id {id} has {pipeline_status.upper()} status. \n Deleting it from the queue... ")
lock.acquire()
# for i in queue:
# if i['repo_id'] == id and len(queue) > 0:
# i['status'] = pipeline_status
if len(queue) > 0:
for i in queue:
if i['repo_id'] == id and len(queue) > 0:
queue.pop(i)
print(f" pipeline for project w/ id {id} has been removed from the queue!")
lock.release()
break
else: print(f"pipeline in project #{id} is still running")
except Exception as e:
print(f"NO PIPELINE STATUS for projetc w/ id {id}:\n {e}")
break
conn.close()
def queueHandler(item, sem):
with sem:
launchRepoPipe(item['repo_id'], item['branch'])
#===================================================================================
def main():
repositories = getDataFromFile(file_name)
# queueHandler(repositories=repositories)
queue = repositories.copy()
for i in range(len(queue)):
t = threading.Thread(target=queueHandler, args=(queue[i], semaphore))
t.start()
if __name__ == "__main__":
main()