2021-12-01 12:38:17 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2021-12-22 09:25:16 +00:00
|
|
|
from collections import namedtuple
|
2021-12-01 12:38:17 +00:00
|
|
|
import json
|
|
|
|
import time
|
|
|
|
|
2021-12-22 09:25:16 +00:00
|
|
|
import jwt
|
2022-01-25 17:32:13 +00:00
|
|
|
import requests # type: ignore
|
|
|
|
import boto3 # type: ignore
|
2021-12-01 12:38:17 +00:00
|
|
|
|
2021-12-22 09:25:16 +00:00
|
|
|
NEED_RERUN_OR_CANCELL_WORKFLOWS = {
|
2022-01-25 17:32:13 +00:00
|
|
|
"PullRequestCI",
|
|
|
|
"Docs",
|
|
|
|
"DocsRelease",
|
|
|
|
"BackportPR",
|
2021-12-22 09:25:16 +00:00
|
|
|
}
|
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
# https://docs.github.com/en/rest/reference/actions#cancel-a-workflow-run
|
|
|
|
#
|
2022-01-25 17:28:17 +00:00
|
|
|
API_URL = "https://api.github.com/repos/ClickHouse/ClickHouse"
|
2021-12-01 12:38:17 +00:00
|
|
|
|
|
|
|
MAX_RETRY = 5
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
def get_installation_id(jwt_token):
|
|
|
|
headers = {
|
|
|
|
"Authorization": f"Bearer {jwt_token}",
|
|
|
|
"Accept": "application/vnd.github.v3+json",
|
|
|
|
}
|
|
|
|
response = requests.get("https://api.github.com/app/installations", headers=headers)
|
|
|
|
response.raise_for_status()
|
|
|
|
data = response.json()
|
2022-01-25 17:28:17 +00:00
|
|
|
return data[0]["id"]
|
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
|
|
|
|
def get_access_token(jwt_token, installation_id):
|
|
|
|
headers = {
|
|
|
|
"Authorization": f"Bearer {jwt_token}",
|
|
|
|
"Accept": "application/vnd.github.v3+json",
|
|
|
|
}
|
2022-01-25 17:28:17 +00:00
|
|
|
response = requests.post(
|
|
|
|
f"https://api.github.com/app/installations/{installation_id}/access_tokens",
|
|
|
|
headers=headers,
|
|
|
|
)
|
2021-12-01 12:38:17 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
data = response.json()
|
2022-01-25 17:28:17 +00:00
|
|
|
return data["token"]
|
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
|
|
|
|
def get_key_and_app_from_aws():
|
|
|
|
secret_name = "clickhouse_github_secret_key"
|
|
|
|
session = boto3.session.Session()
|
|
|
|
client = session.client(
|
2022-01-25 17:28:17 +00:00
|
|
|
service_name="secretsmanager",
|
2021-12-01 12:38:17 +00:00
|
|
|
)
|
2022-01-25 17:28:17 +00:00
|
|
|
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
|
|
|
data = json.loads(get_secret_value_response["SecretString"])
|
|
|
|
return data["clickhouse-app-key"], int(data["clickhouse-app-id"])
|
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
|
|
|
|
def get_token_from_aws():
|
|
|
|
private_key, app_id = get_key_and_app_from_aws()
|
|
|
|
payload = {
|
|
|
|
"iat": int(time.time()) - 60,
|
|
|
|
"exp": int(time.time()) + (10 * 60),
|
|
|
|
"iss": app_id,
|
|
|
|
}
|
|
|
|
|
|
|
|
encoded_jwt = jwt.encode(payload, private_key, algorithm="RS256")
|
|
|
|
installation_id = get_installation_id(encoded_jwt)
|
|
|
|
return get_access_token(encoded_jwt, installation_id)
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
def _exec_get_with_retry(url):
|
|
|
|
for i in range(MAX_RETRY):
|
|
|
|
try:
|
|
|
|
response = requests.get(url)
|
|
|
|
response.raise_for_status()
|
|
|
|
return response.json()
|
|
|
|
except Exception as ex:
|
|
|
|
print("Got exception executing request", ex)
|
|
|
|
time.sleep(i + 1)
|
|
|
|
|
|
|
|
raise Exception("Cannot execute GET request with retries")
|
|
|
|
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
WorkflowDescription = namedtuple(
|
|
|
|
"WorkflowDescription", ["run_id", "status", "rerun_url", "cancel_url"]
|
|
|
|
)
|
2021-12-22 09:25:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_workflows_description_for_pull_request(pull_request_event):
|
2022-01-25 17:28:17 +00:00
|
|
|
head_branch = pull_request_event["head"]["ref"]
|
|
|
|
print("PR", pull_request_event["number"], "has head ref", head_branch)
|
2021-12-27 10:26:16 +00:00
|
|
|
workflows_data = []
|
2022-01-25 17:28:17 +00:00
|
|
|
workflows = _exec_get_with_retry(
|
|
|
|
API_URL + f"/actions/runs?branch={head_branch}&event=pull_request&page=1"
|
|
|
|
)
|
|
|
|
workflows_data += workflows["workflow_runs"]
|
2021-12-27 10:26:16 +00:00
|
|
|
i = 2
|
2022-01-25 17:28:17 +00:00
|
|
|
while len(workflows["workflow_runs"]) > 0:
|
|
|
|
workflows = _exec_get_with_retry(
|
|
|
|
API_URL + f"/actions/runs?branch={head_branch}&event=pull_request&page={i}"
|
|
|
|
)
|
|
|
|
workflows_data += workflows["workflow_runs"]
|
2021-12-27 10:26:16 +00:00
|
|
|
i += 1
|
|
|
|
if i > 30:
|
|
|
|
print("Too many workflows found")
|
|
|
|
break
|
|
|
|
|
2021-12-22 09:25:16 +00:00
|
|
|
workflow_descriptions = []
|
2021-12-27 10:26:16 +00:00
|
|
|
for workflow in workflows_data:
|
2022-01-25 17:32:13 +00:00
|
|
|
# unfortunately we cannot filter workflows from forks in request to API
|
|
|
|
# so doing it manually
|
2022-01-25 17:28:17 +00:00
|
|
|
if (
|
|
|
|
workflow["head_repository"]["full_name"]
|
|
|
|
== pull_request_event["head"]["repo"]["full_name"]
|
2022-01-25 17:32:13 +00:00
|
|
|
and workflow["name"] in NEED_RERUN_OR_CANCELL_WORKFLOWS
|
2022-01-25 17:28:17 +00:00
|
|
|
):
|
|
|
|
workflow_descriptions.append(
|
|
|
|
WorkflowDescription(
|
|
|
|
run_id=workflow["id"],
|
|
|
|
status=workflow["status"],
|
|
|
|
rerun_url=workflow["rerun_url"],
|
|
|
|
cancel_url=workflow["cancel_url"],
|
|
|
|
)
|
|
|
|
)
|
2021-12-22 09:25:16 +00:00
|
|
|
|
|
|
|
return workflow_descriptions
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-22 09:25:16 +00:00
|
|
|
def get_workflow_description(workflow_id):
|
|
|
|
workflow = _exec_get_with_retry(API_URL + f"/actions/runs/{workflow_id}")
|
|
|
|
return WorkflowDescription(
|
2022-01-25 17:28:17 +00:00
|
|
|
run_id=workflow["id"],
|
|
|
|
status=workflow["status"],
|
|
|
|
rerun_url=workflow["rerun_url"],
|
|
|
|
cancel_url=workflow["cancel_url"],
|
|
|
|
)
|
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
|
|
|
|
def _exec_post_with_retry(url, token):
|
2022-01-25 17:28:17 +00:00
|
|
|
headers = {"Authorization": f"token {token}"}
|
2021-12-01 12:38:17 +00:00
|
|
|
for i in range(MAX_RETRY):
|
|
|
|
try:
|
|
|
|
response = requests.post(url, headers=headers)
|
|
|
|
response.raise_for_status()
|
|
|
|
return response.json()
|
|
|
|
except Exception as ex:
|
|
|
|
print("Got exception executing request", ex)
|
|
|
|
time.sleep(i + 1)
|
|
|
|
|
|
|
|
raise Exception("Cannot execute POST request with retry")
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-22 07:54:50 +00:00
|
|
|
def exec_workflow_url(urls_to_cancel, token):
|
2021-12-01 12:38:17 +00:00
|
|
|
for url in urls_to_cancel:
|
2021-12-22 09:25:16 +00:00
|
|
|
print("Post for workflow workflow using url", url)
|
2021-12-01 12:38:17 +00:00
|
|
|
_exec_post_with_retry(url, token)
|
2021-12-22 09:25:16 +00:00
|
|
|
print("Workflow post finished")
|
2021-12-01 12:38:17 +00:00
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
def main(event):
|
|
|
|
token = get_token_from_aws()
|
2022-01-25 17:28:17 +00:00
|
|
|
event_data = json.loads(event["body"])
|
2021-12-01 12:38:17 +00:00
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
print("Got event for PR", event_data["number"])
|
|
|
|
action = event_data["action"]
|
|
|
|
print("Got action", event_data["action"])
|
|
|
|
pull_request = event_data["pull_request"]
|
2022-01-25 17:32:13 +00:00
|
|
|
labels = {label["name"] for label in pull_request["labels"]}
|
2021-12-01 12:38:17 +00:00
|
|
|
print("PR has labels", labels)
|
2022-01-25 17:28:17 +00:00
|
|
|
if action == "closed" or "do not test" in labels:
|
2021-12-01 12:38:17 +00:00
|
|
|
print("PR merged/closed or manually labeled 'do not test' will kill workflows")
|
2021-12-22 09:25:16 +00:00
|
|
|
workflow_descriptions = get_workflows_description_for_pull_request(pull_request)
|
|
|
|
urls_to_cancel = []
|
|
|
|
for workflow_description in workflow_descriptions:
|
2022-01-25 17:28:17 +00:00
|
|
|
if workflow_description.status != "completed":
|
2021-12-22 09:25:16 +00:00
|
|
|
urls_to_cancel.append(workflow_description.cancel_url)
|
|
|
|
print(f"Found {len(urls_to_cancel)} workflows to cancel")
|
|
|
|
exec_workflow_url(urls_to_cancel, token)
|
2022-01-25 17:28:17 +00:00
|
|
|
elif action == "labeled" and "can be tested" in labels:
|
2021-12-22 07:54:50 +00:00
|
|
|
print("PR marked with can be tested label, rerun workflow")
|
2021-12-22 09:25:16 +00:00
|
|
|
workflow_descriptions = get_workflows_description_for_pull_request(pull_request)
|
|
|
|
if not workflow_descriptions:
|
|
|
|
print("Not found any workflows")
|
|
|
|
return
|
|
|
|
|
|
|
|
sorted_workflows = list(sorted(workflow_descriptions, key=lambda x: x.run_id))
|
|
|
|
most_recent_workflow = sorted_workflows[-1]
|
|
|
|
print("Latest workflow", most_recent_workflow)
|
2022-01-25 17:28:17 +00:00
|
|
|
if most_recent_workflow.status != "completed":
|
2021-12-22 09:25:16 +00:00
|
|
|
print("Latest workflow is not completed, cancelling")
|
|
|
|
exec_workflow_url([most_recent_workflow.cancel_url], token)
|
|
|
|
print("Cancelled")
|
|
|
|
|
|
|
|
for _ in range(30):
|
|
|
|
latest_workflow_desc = get_workflow_description(most_recent_workflow.run_id)
|
|
|
|
print("Checking latest workflow", latest_workflow_desc)
|
2022-01-25 17:28:17 +00:00
|
|
|
if latest_workflow_desc.status in ("completed", "cancelled"):
|
2021-12-22 09:25:16 +00:00
|
|
|
print("Finally latest workflow done, going to rerun")
|
|
|
|
exec_workflow_url([most_recent_workflow.rerun_url], token)
|
|
|
|
print("Rerun finished, exiting")
|
2021-12-22 08:13:04 +00:00
|
|
|
break
|
2021-12-22 09:25:16 +00:00
|
|
|
print("Still have strange status")
|
2021-12-22 08:13:04 +00:00
|
|
|
time.sleep(3)
|
2021-12-22 09:25:16 +00:00
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
else:
|
|
|
|
print("Nothing to do")
|
|
|
|
|
2022-01-25 17:28:17 +00:00
|
|
|
|
2021-12-01 12:38:17 +00:00
|
|
|
def handler(event, _):
|
|
|
|
main(event)
|