Merge pull request #32911 from ClickHouse/deb-aarch64

Deb aarch64
This commit is contained in:
alesapin 2022-01-13 19:11:19 +03:00 committed by GitHub
commit 0804e04558
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 1380 additions and 256 deletions

View File

@ -9,7 +9,26 @@ on: # yamllint disable-line rule:truthy
branches: branches:
- 'backport/**' - 'backport/**'
jobs: jobs:
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
steps: steps:
- name: Clear repository - name: Clear repository
@ -20,12 +39,40 @@ jobs:
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
CompatibilityCheck: CompatibilityCheck:
needs: [BuilderDebRelease] needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
@ -106,6 +153,47 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH sudo rm -fr $TEMP_PATH $CACHES_PATH
BuilderDebAarch64:
needs: [DockerHubPush, FastTest]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
CHECK_NAME=ClickHouse build check (actions)
BUILD_NAME=package_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'true'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
BuilderDebAsan: BuilderDebAsan:
needs: [DockerHubPush] needs: [DockerHubPush]
runs-on: [self-hosted, builder] runs-on: [self-hosted, builder]
@ -247,6 +335,7 @@ jobs:
BuilderReport: BuilderReport:
needs: needs:
- BuilderDebRelease - BuilderDebRelease
- BuilderDebAarch64
- BuilderDebAsan - BuilderDebAsan
- BuilderDebTsan - BuilderDebTsan
- BuilderDebDebug - BuilderDebDebug

View File

@ -6,7 +6,7 @@ env:
on: # yamllint disable-line rule:truthy on: # yamllint disable-line rule:truthy
workflow_run: workflow_run:
workflows: ["CIGithubActions", "ReleaseCI", "DocsCheck", "BackportPR"] workflows: ["PullRequestCI", "ReleaseCI", "DocsCheck", "BackportPR"]
types: types:
- requested - requested
jobs: jobs:

View File

@ -28,24 +28,70 @@ jobs:
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 run_check.py python3 run_check.py
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, func-tester-aarch64]
steps: steps:
- name: Clear repository - name: Clear repository
run: | run: |
sudo rm -rf $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code - name: Check out repository code
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
DocsCheck: DocsCheck:
needs: DockerHubPush needs: DockerHubPush
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]

View File

@ -8,7 +8,7 @@ on: # yamllint disable-line rule:truthy
schedule: schedule:
- cron: '0 */6 * * *' - cron: '0 */6 * * *'
workflow_run: workflow_run:
workflows: ["CIGithubActions"] workflows: ["PullRequestCI"]
types: types:
- completed - completed
workflow_dispatch: workflow_dispatch:

View File

@ -9,7 +9,26 @@ on: # yamllint disable-line rule:truthy
branches: branches:
- 'master' - 'master'
jobs: jobs:
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
steps: steps:
- name: Clear repository - name: Clear repository
@ -20,12 +39,40 @@ jobs:
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
StyleCheck: StyleCheck:
needs: DockerHubPush needs: DockerHubPush
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
@ -168,6 +215,47 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
BuilderDebAarch64:
needs: [DockerHubPush, FastTest]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
CHECK_NAME=ClickHouse build check (actions)
BUILD_NAME=package_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'true'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
BuilderPerformance: BuilderPerformance:
needs: DockerHubPush needs: DockerHubPush
runs-on: [self-hosted, builder] runs-on: [self-hosted, builder]
@ -815,6 +903,7 @@ jobs:
BuilderReport: BuilderReport:
needs: needs:
- BuilderDebRelease - BuilderDebRelease
- BuilderDebAarch64
- BuilderBinRelease - BuilderBinRelease
- BuilderDebAsan - BuilderDebAsan
- BuilderDebTsan - BuilderDebTsan
@ -963,6 +1052,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateless_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateless tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse
KILL_TIMEOUT=10800
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAsan0: FunctionalStatelessTestAsan0:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -1478,6 +1602,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateful_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateful tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse
KILL_TIMEOUT=3600
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAsan: FunctionalStatefulTestAsan:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -2659,6 +2818,7 @@ jobs:
- FunctionalStatelessTestDebug2 - FunctionalStatelessTestDebug2
- FunctionalStatelessTestRelease - FunctionalStatelessTestRelease
- FunctionalStatelessTestReleaseDatabaseOrdinary - FunctionalStatelessTestReleaseDatabaseOrdinary
- FunctionalStatelessTestAarch64
- FunctionalStatelessTestAsan0 - FunctionalStatelessTestAsan0
- FunctionalStatelessTestAsan1 - FunctionalStatelessTestAsan1
- FunctionalStatelessTestTsan0 - FunctionalStatelessTestTsan0
@ -2671,6 +2831,7 @@ jobs:
- FunctionalStatefulTestDebug - FunctionalStatefulTestDebug
- FunctionalStatefulTestRelease - FunctionalStatefulTestRelease
- FunctionalStatefulTestReleaseDatabaseOrdinary - FunctionalStatefulTestReleaseDatabaseOrdinary
- FunctionalStatefulTestAarch64
- FunctionalStatefulTestAsan - FunctionalStatefulTestAsan
- FunctionalStatefulTestTsan - FunctionalStatefulTestTsan
- FunctionalStatefulTestMsan - FunctionalStatefulTestMsan

View File

@ -1,4 +1,4 @@
name: CIGithubActions name: PullRequestCI
env: env:
# Force the stdout and stderr streams to be unbuffered # Force the stdout and stderr streams to be unbuffered
@ -31,7 +31,25 @@ jobs:
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 run_check.py python3 run_check.py
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
steps: steps:
@ -43,12 +61,40 @@ jobs:
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
StyleCheck: StyleCheck:
needs: DockerHubPush needs: DockerHubPush
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
@ -928,8 +974,8 @@ jobs:
BuilderReport: BuilderReport:
needs: needs:
- BuilderDebRelease - BuilderDebRelease
- BuilderBinRelease
- BuilderDebAarch64 - BuilderDebAarch64
- BuilderBinRelease
- BuilderDebAsan - BuilderDebAsan
- BuilderDebTsan - BuilderDebTsan
- BuilderDebUBsan - BuilderDebUBsan
@ -1153,6 +1199,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateless_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateless tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse
KILL_TIMEOUT=10800
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAsan0: FunctionalStatelessTestAsan0:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -1668,6 +1749,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateful_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateful tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse
KILL_TIMEOUT=3600
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAsan: FunctionalStatefulTestAsan:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -2888,6 +3004,7 @@ jobs:
- FunctionalStatelessTestReleaseDatabaseReplicated0 - FunctionalStatelessTestReleaseDatabaseReplicated0
- FunctionalStatelessTestReleaseDatabaseReplicated1 - FunctionalStatelessTestReleaseDatabaseReplicated1
- FunctionalStatelessTestReleaseWideParts - FunctionalStatelessTestReleaseWideParts
- FunctionalStatelessTestAarch64
- FunctionalStatelessTestAsan0 - FunctionalStatelessTestAsan0
- FunctionalStatelessTestAsan1 - FunctionalStatelessTestAsan1
- FunctionalStatelessTestTsan0 - FunctionalStatelessTestTsan0
@ -2899,6 +3016,7 @@ jobs:
- FunctionalStatelessTestUBsan - FunctionalStatelessTestUBsan
- FunctionalStatefulTestDebug - FunctionalStatefulTestDebug
- FunctionalStatefulTestRelease - FunctionalStatefulTestRelease
- FunctionalStatefulTestAarch64
- FunctionalStatefulTestAsan - FunctionalStatefulTestAsan
- FunctionalStatefulTestTsan - FunctionalStatefulTestTsan
- FunctionalStatefulTestMsan - FunctionalStatefulTestMsan

View File

@ -19,7 +19,26 @@ on: # yamllint disable-line rule:truthy
- '.github/**' - '.github/**'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
steps: steps:
- name: Clear repository - name: Clear repository
@ -30,12 +49,40 @@ jobs:
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
DocsRelease: DocsRelease:
needs: DockerHubPush needs: DockerHubPush
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]

View File

@ -12,7 +12,26 @@ on: # yamllint disable-line rule:truthy
- '23.[1-9][1-9]' - '23.[1-9][1-9]'
- '24.[1-9][1-9]' - '24.[1-9][1-9]'
jobs: jobs:
DockerHubPush: DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
steps: steps:
- name: Clear repository - name: Clear repository
@ -23,12 +42,40 @@ jobs:
- name: Images check - name: Images check
run: | run: |
cd $GITHUB_WORKSPACE/tests/ci cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts - name: Upload images files to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: changed_images name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json path: ${{ runner.temp }}/changed_images.json
CompatibilityCheck: CompatibilityCheck:
needs: [BuilderDebRelease] needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker] runs-on: [self-hosted, style-checker]
@ -109,6 +156,47 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH sudo rm -fr $TEMP_PATH $CACHES_PATH
BuilderDebAarch64:
needs: [DockerHubPush, FastTest]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
CHECK_NAME=ClickHouse build check (actions)
BUILD_NAME=package_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'true'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
BuilderDebAsan: BuilderDebAsan:
needs: [DockerHubPush] needs: [DockerHubPush]
runs-on: [self-hosted, builder] runs-on: [self-hosted, builder]
@ -340,6 +428,7 @@ jobs:
BuilderReport: BuilderReport:
needs: needs:
- BuilderDebRelease - BuilderDebRelease
- BuilderDebAarch64
- BuilderDebAsan - BuilderDebAsan
- BuilderDebTsan - BuilderDebTsan
- BuilderDebUBsan - BuilderDebUBsan
@ -413,6 +502,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateless_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateless tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse
KILL_TIMEOUT=10800
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAsan0: FunctionalStatelessTestAsan0:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -893,6 +1017,41 @@ jobs:
docker kill $(docker ps -q) ||: docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||: docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateful_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateful tests (aarch64, actions)
REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse
KILL_TIMEOUT=3600
EOF
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAsan: FunctionalStatefulTestAsan:
needs: [BuilderDebAsan] needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester] runs-on: [self-hosted, func-tester]
@ -1580,6 +1739,7 @@ jobs:
- FunctionalStatelessTestDebug1 - FunctionalStatelessTestDebug1
- FunctionalStatelessTestDebug2 - FunctionalStatelessTestDebug2
- FunctionalStatelessTestRelease - FunctionalStatelessTestRelease
- FunctionalStatelessTestAarch64
- FunctionalStatelessTestAsan0 - FunctionalStatelessTestAsan0
- FunctionalStatelessTestAsan1 - FunctionalStatelessTestAsan1
- FunctionalStatelessTestTsan0 - FunctionalStatelessTestTsan0
@ -1591,6 +1751,7 @@ jobs:
- FunctionalStatelessTestUBsan - FunctionalStatelessTestUBsan
- FunctionalStatefulTestDebug - FunctionalStatefulTestDebug
- FunctionalStatefulTestRelease - FunctionalStatefulTestRelease
- FunctionalStatefulTestAarch64
- FunctionalStatefulTestAsan - FunctionalStatefulTestAsan
- FunctionalStatefulTestTsan - FunctionalStatefulTestTsan
- FunctionalStatefulTestMsan - FunctionalStatefulTestMsan

View File

@ -1,3 +1,4 @@
# rebuild in #32911
# docker build -t clickhouse/docs-build . # docker build -t clickhouse/docs-build .
FROM ubuntu:20.04 FROM ubuntu:20.04

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/docs-check . # docker build -t clickhouse/docs-check .
FROM clickhouse/docs-builder ARG FROM_TAG=latest
FROM clickhouse/docs-builder:$FROM_TAG
COPY run.sh / COPY run.sh /

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/docs-release . # docker build -t clickhouse/docs-release .
FROM clickhouse/docs-builder ARG FROM_TAG=latest
FROM clickhouse/docs-builder:$FROM_TAG
COPY run.sh / COPY run.sh /

View File

@ -1,3 +1,4 @@
# rebuild in #32911
# docker build -t clickhouse/binary-builder . # docker build -t clickhouse/binary-builder .
FROM ubuntu:20.04 FROM ubuntu:20.04

View File

@ -28,12 +28,14 @@ RUN apt-get update \
software-properties-common \ software-properties-common \
--yes --no-install-recommends --yes --no-install-recommends
# Architecture of the image when BuildKit/buildx is used
ARG TARGETARCH
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able # Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip. # to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
# Significantly increase deb packaging speed and compatible with old systems # Significantly increase deb packaging speed and compatible with old systems
RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \ RUN arch=${TARGETARCH:-amd64} \
&& chmod +x dpkg-deb \ && curl -Lo /usr/bin/dpkg-deb https://github.com/ClickHouse-Extras/dpkg/releases/download/1.21.1-clickhouse/dpkg-deb-${arch}
&& cp dpkg-deb /usr/bin
RUN apt-get update \ RUN apt-get update \
&& apt-get install \ && apt-get install \

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/test-base . # docker build -t clickhouse/test-base .
FROM clickhouse/test-util ARG FROM_TAG=latest
FROM clickhouse/test-util:$FROM_TAG
# ARG for quick switch to a given ubuntu mirror # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com" ARG apt_archive="http://archive.ubuntu.com"
@ -28,12 +29,14 @@ RUN apt-get update \
software-properties-common \ software-properties-common \
--yes --no-install-recommends --yes --no-install-recommends
# Architecture of the image when BuildKit/buildx is used
ARG TARGETARCH
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able # Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip. # to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
# Significantly increase deb packaging speed and compatible with old systems # Significantly increase deb packaging speed and compatible with old systems
RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \ RUN arch=${TARGETARCH:-amd64} \
&& chmod +x dpkg-deb \ && curl -Lo /usr/bin/dpkg-deb https://github.com/ClickHouse-Extras/dpkg/releases/download/1.21.1-clickhouse/dpkg-deb-${arch}
&& cp dpkg-deb /usr/bin
RUN apt-get update \ RUN apt-get update \
&& apt-get install \ && apt-get install \

View File

@ -1,12 +1,13 @@
# docker build --network=host -t clickhouse/codebrowser . # docker build --network=host -t clickhouse/codebrowser .
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser
FROM clickhouse/binary-builder ARG FROM_TAG=latest
FROM clickhouse/binary-builder:$FROM_TAG
# ARG for quick switch to a given ubuntu mirror # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com" ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-13 libllvm13 libclang-13-dev RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-13 libllvm13 libclang-13-dev libmlir-13-dev
# repo versions doesn't work correctly with C++17 # repo versions doesn't work correctly with C++17
# also we push reports to s3, so we add index.html to subfolder urls # also we push reports to s3, so we add index.html to subfolder urls

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/fasttest . # docker build -t clickhouse/fasttest .
FROM clickhouse/test-util ARG FROM_TAG=latest
FROM clickhouse/test-util:$FROM_TAG
# ARG for quick switch to a given ubuntu mirror # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com" ARG apt_archive="http://archive.ubuntu.com"
@ -28,12 +29,14 @@ RUN apt-get update \
software-properties-common \ software-properties-common \
--yes --no-install-recommends --yes --no-install-recommends
# Architecture of the image when BuildKit/buildx is used
ARG TARGETARCH
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able # Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip. # to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
# Significantly increase deb packaging speed and compatible with old systems # Significantly increase deb packaging speed and compatible with old systems
RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \ RUN arch=${TARGETARCH:-amd64} \
&& chmod +x dpkg-deb \ && curl -Lo /usr/bin/dpkg-deb https://github.com/ClickHouse-Extras/dpkg/releases/download/1.21.1-clickhouse/dpkg-deb-${arch}
&& cp dpkg-deb /usr/bin
RUN apt-get update \ RUN apt-get update \
&& apt-get install \ && apt-get install \

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/fuzzer . # docker build -t clickhouse/fuzzer .
FROM clickhouse/test-base ARG FROM_TAG=latest
FROM clickhouse/test-base:$FROM_TAG
# ARG for quick switch to a given ubuntu mirror # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com" ARG apt_archive="http://archive.ubuntu.com"

View File

@ -1,44 +1,56 @@
# docker build -t clickhouse/integration-test . # docker build -t clickhouse/integration-test .
FROM clickhouse/test-base ARG FROM_TAG=latest
FROM clickhouse/test-base:$FROM_TAG
SHELL ["/bin/bash", "-c"] SHELL ["/bin/bash", "-c"]
RUN apt-get update \ RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \ && env DEBIAN_FRONTEND=noninteractive apt-get -y install \
tzdata \
python3 \
libicu-dev \
bsdutils \ bsdutils \
curl \
default-jre \
g++ \
gdb \ gdb \
unixodbc \ iproute2 \
odbcinst \ krb5-user \
libicu-dev \
libsqlite3-dev \ libsqlite3-dev \
libsqliteodbc \ libsqliteodbc \
odbc-postgresql \
sqlite3 \
curl \
tar \
lz4 \
krb5-user \
iproute2 \
lsof \ lsof \
g++ \ lz4 \
default-jre odbc-postgresql \
odbcinst \
python3 \
rpm2cpio \
sqlite3 \
tar \
tzdata \
unixodbc \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/*
RUN rm -rf \ # Architecture of the image when BuildKit/buildx is used
/var/lib/apt/lists/* \ ARG TARGETARCH
/var/cache/debconf \
/tmp/* \
RUN apt-get clean
# Install MySQL ODBC driver # Install MySQL ODBC driver from RHEL rpm
RUN curl 'https://downloads.mysql.com/archives/get/p/10/file/mysql-connector-odbc-8.0.21-linux-glibc2.12-x86-64bit.tar.gz' --location --output 'mysql-connector.tar.gz' && tar -xzf mysql-connector.tar.gz && cd mysql-connector-odbc-8.0.21-linux-glibc2.12-x86-64bit/lib && mv * /usr/local/lib && ln -s /usr/local/lib/libmyodbc8a.so /usr/lib/x86_64-linux-gnu/odbc/libmyodbc.so RUN arch=${TARGETARCH:-amd64} \
&& case $arch in \
amd64) rarch=x86_64 ;; \
arm64) rarch=aarch64 ;; \
esac \
&& cd /tmp \
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/Downloads/Connector-ODBC/8.0/mysql-connector-odbc-8.0.27-1.el8.${rarch}.rpm" \
&& rpm2archive mysql-odbc.rpm \
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
&& LINK_DIR=$(dpkg -L libodbc1 | grep '^/usr/lib/.*-linux-gnu/odbc$') \
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR" \
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR"/libmyodbc.so
# Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper. # Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper.
# ZooKeeper is not started by default, but consumes some space in containers. # ZooKeeper is not started by default, but consumes some space in containers.
# 777 perms used to allow anybody to start/stop ZooKeeper # 777 perms used to allow anybody to start/stop ZooKeeper
ENV ZOOKEEPER_VERSION='3.6.3' ENV ZOOKEEPER_VERSION='3.6.3'
RUN curl -O "https://mirrors.estointernet.in/apache/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz" RUN curl -O "https://dlcdn.apache.org/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz"
RUN tar -zxvf apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz && mv apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && chmod -R 777 /opt/zookeeper && rm apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz RUN tar -zxvf apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz && mv apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && chmod -R 777 /opt/zookeeper && rm apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz
RUN echo $'tickTime=2500 \n\ RUN echo $'tickTime=2500 \n\
tickTime=2500 \n\ tickTime=2500 \n\

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/keeper-jepsen-test . # docker build -t clickhouse/keeper-jepsen-test .
FROM clickhouse/test-base ARG FROM_TAG=latest
FROM clickhouse/test-base:$FROM_TAG
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive
ENV CLOJURE_VERSION=1.10.3.814 ENV CLOJURE_VERSION=1.10.3.814

View File

@ -1,8 +1,13 @@
# docker build -t clickhouse/pvs-test . # docker build -t clickhouse/pvs-test .
FROM clickhouse/binary-builder ARG FROM_TAG=latest
FROM clickhouse/binary-builder:$FROM_TAG
RUN apt-get update --yes \ # PVS studio doesn't support aarch64/arm64, so there is a check for it everywhere
# We'll produce an empty image for arm64
ARG TARGETARCH
RUN test x$TARGETARCH = xarm64 || ( apt-get update --yes \
&& apt-get install \ && apt-get install \
bash \ bash \
wget \ wget \
@ -15,7 +20,7 @@ RUN apt-get update --yes \
libprotoc-dev \ libprotoc-dev \
libgrpc++-dev \ libgrpc++-dev \
libc-ares-dev \ libc-ares-dev \
--yes --no-install-recommends --yes --no-install-recommends )
#RUN wget -nv -O - http://files.viva64.com/etc/pubkey.txt | sudo apt-key add - #RUN wget -nv -O - http://files.viva64.com/etc/pubkey.txt | sudo apt-key add -
#RUN sudo wget -nv -O /etc/apt/sources.list.d/viva64.list http://files.viva64.com/etc/viva64.list #RUN sudo wget -nv -O /etc/apt/sources.list.d/viva64.list http://files.viva64.com/etc/viva64.list
@ -27,7 +32,7 @@ RUN apt-get update --yes \
ENV PKG_VERSION="pvs-studio-latest" ENV PKG_VERSION="pvs-studio-latest"
RUN set -x \ RUN test x$TARGETARCH = xarm64 || ( set -x \
&& export PUBKEY_HASHSUM="ad369a2e9d8b8c30f5a9f2eb131121739b79c78e03fef0f016ea51871a5f78cd4e6257b270dca0ac3be3d1f19d885516" \ && export PUBKEY_HASHSUM="ad369a2e9d8b8c30f5a9f2eb131121739b79c78e03fef0f016ea51871a5f78cd4e6257b270dca0ac3be3d1f19d885516" \
&& wget -nv https://files.viva64.com/etc/pubkey.txt -O /tmp/pubkey.txt \ && wget -nv https://files.viva64.com/etc/pubkey.txt -O /tmp/pubkey.txt \
&& echo "${PUBKEY_HASHSUM} /tmp/pubkey.txt" | sha384sum -c \ && echo "${PUBKEY_HASHSUM} /tmp/pubkey.txt" | sha384sum -c \
@ -35,7 +40,7 @@ RUN set -x \
&& wget -nv "https://files.viva64.com/${PKG_VERSION}.deb" \ && wget -nv "https://files.viva64.com/${PKG_VERSION}.deb" \
&& { debsig-verify ${PKG_VERSION}.deb \ && { debsig-verify ${PKG_VERSION}.deb \
|| echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \ || echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \
&& dpkg -i "${PKG_VERSION}.deb" && dpkg -i "${PKG_VERSION}.deb" )
ENV CCACHE_DIR=/test_output/ccache ENV CCACHE_DIR=/test_output/ccache

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/split-build-smoke-test . # docker build -t clickhouse/split-build-smoke-test .
FROM clickhouse/binary-builder ARG FROM_TAG=latest
FROM clickhouse/binary-builder:$FROM_TAG
COPY run.sh /run.sh COPY run.sh /run.sh
COPY process_split_build_smoke_test_result.py / COPY process_split_build_smoke_test_result.py /

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/stateful-test . # docker build -t clickhouse/stateful-test .
FROM clickhouse/stateless-test ARG FROM_TAG=latest
FROM clickhouse/stateless-test:$FROM_TAG
RUN apt-get update -y \ RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \

View File

@ -1,11 +1,9 @@
# docker build -t clickhouse/stateless-test . # docker build -t clickhouse/stateless-test .
FROM clickhouse/test-base ARG FROM_TAG=latest
FROM clickhouse/test-base:$FROM_TAG
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz" ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
RUN echo "deb [trusted=yes] http://repo.mysql.com/apt/ubuntu/ bionic mysql-5.7" >> /etc/apt/sources.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 8C718D3B5072E1F5
RUN apt-get update -y \ RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \ apt-get install --yes --no-install-recommends \
@ -30,7 +28,7 @@ RUN apt-get update -y \
tree \ tree \
unixodbc \ unixodbc \
wget \ wget \
mysql-client=5.7* \ mysql-client=8.0* \
postgresql-client \ postgresql-client \
sqlite3 sqlite3
@ -49,10 +47,13 @@ RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
ENV NUM_TRIES=1 ENV NUM_TRIES=1
ENV MAX_RUN_TIME=0 ENV MAX_RUN_TIME=0
ARG TARGETARCH
# Download Minio-related binaries # Download Minio-related binaries
RUN wget 'https://dl.min.io/server/minio/release/linux-amd64/minio' \ RUN arch=${TARGETARCH:-amd64} \
&& wget "https://dl.min.io/server/minio/release/linux-${arch}/minio" \
&& chmod +x ./minio \ && chmod +x ./minio \
&& wget 'https://dl.min.io/client/mc/release/linux-amd64/mc' \ && wget "https://dl.min.io/client/mc/release/linux-${arch}/mc" \
&& chmod +x ./mc && chmod +x ./mc
ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_USER="clickhouse"

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/stateless-pytest . # docker build -t clickhouse/stateless-pytest .
FROM clickhouse/test-base ARG FROM_TAG=latest
FROM clickhouse/test-base:$FROM_TAG
RUN apt-get update -y && \ RUN apt-get update -y && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/stress-test . # docker build -t clickhouse/stress-test .
FROM clickhouse/stateful-test ARG FROM_TAG=latest
FROM clickhouse/stateful-test:$FROM_TAG
RUN apt-get update -y \ RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \

View File

@ -1,5 +1,6 @@
# docker build -t clickhouse/unit-test . # docker build -t clickhouse/unit-test .
FROM clickhouse/stateless-test ARG FROM_TAG=latest
FROM clickhouse/stateless-test:$FROM_TAG
RUN apt-get install gdb RUN apt-get install gdb

View File

@ -1,3 +1,4 @@
# rebuild in #32911
# docker build -t clickhouse/test-util . # docker build -t clickhouse/test-util .
FROM ubuntu:20.04 FROM ubuntu:20.04

View File

@ -209,6 +209,8 @@ CI_CONFIG = {
], ],
}, },
"tests_config": { "tests_config": {
# required_build - build name for artifacts
# force_tests - force success status for tests
"Stateful tests (address, actions)": { "Stateful tests (address, actions)": {
"required_build": "package_asan", "required_build": "package_asan",
}, },
@ -227,6 +229,10 @@ CI_CONFIG = {
"Stateful tests (release, actions)": { "Stateful tests (release, actions)": {
"required_build": "package_release", "required_build": "package_release",
}, },
"Stateful tests (aarch64, actions)": {
"required_build": "package_aarch64",
"force_tests": True,
},
"Stateful tests (release, DatabaseOrdinary, actions)": { "Stateful tests (release, DatabaseOrdinary, actions)": {
"required_build": "package_release", "required_build": "package_release",
}, },
@ -251,6 +257,10 @@ CI_CONFIG = {
"Stateless tests (release, actions)": { "Stateless tests (release, actions)": {
"required_build": "package_release", "required_build": "package_release",
}, },
"Stateless tests (aarch64, actions)": {
"required_build": "package_aarch64",
"force_tests": True,
},
"Stateless tests (release, wide parts enabled, actions)": { "Stateless tests (release, wide parts enabled, actions)": {
"required_build": "package_release", "required_build": "package_release",
}, },

View File

@ -3,7 +3,7 @@ import time
import logging import logging
import json import json
import requests import requests # type: ignore
from get_robot_token import get_parameter_from_ssm from get_robot_token import get_parameter_from_ssm
class ClickHouseHelper: class ClickHouseHelper:

View File

@ -2,10 +2,17 @@
import time import time
from env_helper import GITHUB_REPOSITORY from env_helper import GITHUB_REPOSITORY
from ci_config import CI_CONFIG
RETRY = 5 RETRY = 5
def override_status(status, check_name):
if CI_CONFIG["tests_config"][check_name].get("force_tests", False):
return "success"
return status
def get_commit(gh, commit_sha, retry_count=RETRY): def get_commit(gh, commit_sha, retry_count=RETRY):
for i in range(retry_count): for i in range(retry_count):
try: try:
@ -25,7 +32,12 @@ def post_commit_status(gh, sha, check_name, description, state, report_url):
for i in range(RETRY): for i in range(RETRY):
try: try:
commit = get_commit(gh, sha, 1) commit = get_commit(gh, sha, 1)
commit.create_status(context=check_name, description=description, state=state, target_url=report_url) commit.create_status(
context=check_name,
description=description,
state=state,
target_url=report_url,
)
break break
except Exception as ex: except Exception as ex:
if i == RETRY - 1: if i == RETRY - 1:

View File

@ -1,10 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import subprocess import argparse
import logging
import json import json
import logging
import os import os
import time
import shutil import shutil
import subprocess
import time
from typing import List, Tuple
from github import Github from github import Github
from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP
@ -12,40 +15,52 @@ from s3_helper import S3Helper
from pr_info import PRInfo from pr_info import PRInfo
from get_robot_token import get_best_robot_token, get_parameter_from_ssm from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from upload_result_helper import upload_results from upload_result_helper import upload_results
from commit_status_helper import get_commit from commit_status_helper import post_commit_status
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from stopwatch import Stopwatch from stopwatch import Stopwatch
NAME = "Push to Dockerhub (actions)" NAME = "Push to Dockerhub (actions)"
def get_changed_docker_images(pr_info, repo_path, image_file_path): TEMP_PATH = os.path.join(RUNNER_TEMP, "docker_images_check")
def get_changed_docker_images(
pr_info: PRInfo, repo_path: str, image_file_path: str
) -> List[Tuple[str, str]]:
images_dict = {} images_dict = {}
path_to_images_file = os.path.join(repo_path, image_file_path) path_to_images_file = os.path.join(repo_path, image_file_path)
if os.path.exists(path_to_images_file): if os.path.exists(path_to_images_file):
with open(path_to_images_file, 'r') as dict_file: with open(path_to_images_file, "r") as dict_file:
images_dict = json.load(dict_file) images_dict = json.load(dict_file)
else: else:
logging.info("Image file %s doesnt exists in repo %s", image_file_path, repo_path) logging.info(
"Image file %s doesnt exists in repo %s", image_file_path, repo_path
)
dockerhub_repo_name = 'yandex'
if not images_dict: if not images_dict:
return [], dockerhub_repo_name return []
files_changed = pr_info.changed_files files_changed = pr_info.changed_files
logging.info("Changed files for PR %s @ %s: %s", pr_info.number, pr_info.sha, str(files_changed)) logging.info(
"Changed files for PR %s @ %s: %s",
pr_info.number,
pr_info.sha,
str(files_changed),
)
changed_images = [] changed_images = []
for dockerfile_dir, image_description in images_dict.items(): for dockerfile_dir, image_description in images_dict.items():
if image_description['name'].startswith('clickhouse/'):
dockerhub_repo_name = 'clickhouse'
for f in files_changed: for f in files_changed:
if f.startswith(dockerfile_dir): if f.startswith(dockerfile_dir):
logging.info( logging.info(
"Found changed file '%s' which affects docker image '%s' with path '%s'", "Found changed file '%s' which affects "
f, image_description['name'], dockerfile_dir) "docker image '%s' with path '%s'",
f,
image_description["name"],
dockerfile_dir,
)
changed_images.append(dockerfile_dir) changed_images.append(dockerfile_dir)
break break
@ -54,15 +69,20 @@ def get_changed_docker_images(pr_info, repo_path, image_file_path):
index = 0 index = 0
while index < len(changed_images): while index < len(changed_images):
image = changed_images[index] image = changed_images[index]
for dependent in images_dict[image]['dependent']: for dependent in images_dict[image]["dependent"]:
logging.info( logging.info(
"Marking docker image '%s' as changed because it depends on changed docker image '%s'", "Marking docker image '%s' as changed because it "
dependent, image) "depends on changed docker image '%s'",
dependent,
image,
)
changed_images.append(dependent) changed_images.append(dependent)
index += 1 index += 1
if index > 100: if index > 5 * len(images_dict):
# Sanity check to prevent infinite loop. # Sanity check to prevent infinite loop.
raise RuntimeError("Too many changed docker images, this is a bug." + str(changed_images)) raise RuntimeError(
f"Too many changed docker images, this is a bug. {changed_images}"
)
# If a dependent image was already in the list because its own files # If a dependent image was already in the list because its own files
# changed, but then it was added as a dependent of a changed base, we # changed, but then it was added as a dependent of a changed base, we
@ -76,140 +96,248 @@ def get_changed_docker_images(pr_info, repo_path, image_file_path):
seen.add(x) seen.add(x)
no_dups_reversed.append(x) no_dups_reversed.append(x)
result = [(x, images_dict[x]['name']) for x in reversed(no_dups_reversed)] result = [(x, images_dict[x]["name"]) for x in reversed(no_dups_reversed)]
logging.info("Changed docker images for PR %s @ %s: '%s'", pr_info.number, pr_info.sha, result) logging.info(
return result, dockerhub_repo_name "Changed docker images for PR %s @ %s: '%s'",
pr_info.number,
pr_info.sha,
result,
)
return result
def build_and_push_one_image(path_to_dockerfile_folder, image_name, version_string):
logging.info("Building docker image %s with version %s from path %s", image_name, version_string, path_to_dockerfile_folder) def build_and_push_one_image(
build_log = None path_to_dockerfile_folder: str, image_name: str, version_string: str, push: bool
push_log = None ) -> Tuple[bool, str]:
with open('build_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl: path = path_to_dockerfile_folder
cmd = "docker build --network=host -t {im}:{ver} {path}".format(im=image_name, ver=version_string, path=path_to_dockerfile_folder) logging.info(
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait() "Building docker image %s with version %s from path %s",
build_log = str(pl.name) image_name,
version_string,
path,
)
build_log = os.path.join(
TEMP_PATH,
"build_and_push_log_{}_{}".format(
str(image_name).replace("/", "_"), version_string
),
)
push_arg = ""
if push:
push_arg = "--push "
with open(build_log, "w") as bl:
cmd = (
"docker buildx build --builder default "
f"--build-arg FROM_TAG={version_string} "
f"--build-arg BUILDKIT_INLINE_CACHE=1 "
f"--tag {image_name}:{version_string} "
f"--cache-from type=registry,ref={image_name}:{version_string} "
f"{push_arg}"
f"--progress plain {path}"
)
logging.info("Docker command to run: %s", cmd)
retcode = subprocess.Popen(cmd, shell=True, stderr=bl, stdout=bl).wait()
if retcode != 0: if retcode != 0:
return False, build_log, None return False, build_log
with open('tag_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl:
cmd = "docker build --network=host -t {im} {path}".format(im=image_name, path=path_to_dockerfile_folder)
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait()
build_log = str(pl.name)
if retcode != 0:
return False, build_log, None
logging.info("Pushing image %s to dockerhub", image_name)
with open('push_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl:
cmd = "docker push {im}:{ver}".format(im=image_name, ver=version_string)
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait()
push_log = str(pl.name)
if retcode != 0:
return False, build_log, push_log
logging.info("Processing of %s successfully finished", image_name) logging.info("Processing of %s successfully finished", image_name)
return True, build_log, push_log return True, build_log
def process_single_image(versions, path_to_dockerfile_folder, image_name):
logging.info("Image will be pushed with versions %s", ', '.join(versions)) def process_single_image(
versions: List[str], path_to_dockerfile_folder: str, image_name: str, push: bool
) -> List[Tuple[str, str, str]]:
logging.info("Image will be pushed with versions %s", ", ".join(versions))
result = [] result = []
for ver in versions: for ver in versions:
for i in range(5): for i in range(5):
success, build_log, push_log = build_and_push_one_image(path_to_dockerfile_folder, image_name, ver) success, build_log = build_and_push_one_image(
path_to_dockerfile_folder, image_name, ver, push
)
if success: if success:
result.append((image_name + ":" + ver, build_log, push_log, 'OK')) result.append((image_name + ":" + ver, build_log, "OK"))
break break
logging.info("Got error will retry %s time and sleep for %s seconds", i, i * 5) logging.info(
"Got error will retry %s time and sleep for %s seconds", i, i * 5
)
time.sleep(i * 5) time.sleep(i * 5)
else: else:
result.append((image_name + ":" + ver, build_log, push_log, 'FAIL')) result.append((image_name + ":" + ver, build_log, "FAIL"))
logging.info("Processing finished") logging.info("Processing finished")
return result return result
def process_test_results(s3_client, test_results, s3_path_prefix): def process_test_results(
overall_status = 'success' s3_client: S3Helper, test_results: List[Tuple[str, str, str]], s3_path_prefix: str
) -> Tuple[str, List[Tuple[str, str]]]:
overall_status = "success"
processed_test_results = [] processed_test_results = []
for image, build_log, push_log, status in test_results: for image, build_log, status in test_results:
if status != 'OK': if status != "OK":
overall_status = 'failure' overall_status = "failure"
url_part = '' url_part = ""
if build_log is not None and os.path.exists(build_log): if build_log is not None and os.path.exists(build_log):
build_url = s3_client.upload_test_report_to_s3( build_url = s3_client.upload_test_report_to_s3(
build_log, build_log, s3_path_prefix + "/" + os.path.basename(build_log)
s3_path_prefix + "/" + os.path.basename(build_log)) )
url_part += '<a href="{}">build_log</a>'.format(build_url) url_part += '<a href="{}">build_log</a>'.format(build_url)
if push_log is not None and os.path.exists(push_log):
push_url = s3_client.upload_test_report_to_s3(
push_log,
s3_path_prefix + "/" + os.path.basename(push_log))
if url_part: if url_part:
url_part += ', ' test_name = image + " (" + url_part + ")"
url_part += '<a href="{}">push_log</a>'.format(push_url)
if url_part:
test_name = image + ' (' + url_part + ')'
else: else:
test_name = image test_name = image
processed_test_results.append((test_name, status)) processed_test_results.append((test_name, status))
return overall_status, processed_test_results return overall_status, processed_test_results
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Program to build changed or given docker images with all "
"dependant images. Example for local running: "
"python docker_images_check.py --no-push-images --no-reports "
"--image-path docker/packager/binary",
)
parser.add_argument(
"--suffix",
type=str,
help="suffix for all built images tags and resulting json file; the parameter "
"significantly changes the script behavior, e.g. changed_images.json is called "
"changed_images_{suffix}.json and contains list of all tags",
)
parser.add_argument(
"--repo",
type=str,
default="clickhouse",
help="docker hub repository prefix",
)
parser.add_argument(
"--image-path",
type=str,
action="append",
help="list of image paths to build instead of using pr_info + diff URL, "
"e.g. 'docker/packager/binary'",
)
parser.add_argument(
"--no-reports",
action="store_true",
help="don't push reports to S3 and github",
)
parser.add_argument(
"--no-push-images",
action="store_true",
help="don't push images to docker hub",
)
return parser.parse_args()
def main():
logging.basicConfig(level=logging.INFO)
stopwatch = Stopwatch() stopwatch = Stopwatch()
args = parse_args()
if args.suffix:
global NAME
NAME += f" {args.suffix}"
changed_json = os.path.join(TEMP_PATH, f"changed_images_{args.suffix}.json")
else:
changed_json = os.path.join(TEMP_PATH, "changed_images.json")
push = not args.no_push_images
if push:
subprocess.check_output( # pylint: disable=unexpected-keyword-arg
"docker login --username 'robotclickhouse' --password-stdin",
input=get_parameter_from_ssm("dockerhub_robot_password"),
encoding="utf-8",
shell=True,
)
repo_path = GITHUB_WORKSPACE repo_path = GITHUB_WORKSPACE
temp_path = os.path.join(RUNNER_TEMP, 'docker_images_check')
dockerhub_password = get_parameter_from_ssm('dockerhub_robot_password')
if os.path.exists(temp_path): if os.path.exists(TEMP_PATH):
shutil.rmtree(temp_path) shutil.rmtree(TEMP_PATH)
os.makedirs(TEMP_PATH)
if not os.path.exists(temp_path):
os.makedirs(temp_path)
if args.image_path:
pr_info = PRInfo()
pr_info.changed_files = set(i for i in args.image_path)
else:
pr_info = PRInfo(need_changed_files=True) pr_info = PRInfo(need_changed_files=True)
changed_images, dockerhub_repo_name = get_changed_docker_images(pr_info, repo_path, "docker/images.json")
logging.info("Has changed images %s", ', '.join([str(image[0]) for image in changed_images]))
pr_commit_version = str(pr_info.number) + '-' + pr_info.sha
versions = [str(pr_info.number), pr_commit_version]
if pr_info.number == 0:
versions.append("latest")
subprocess.check_output("docker login --username 'robotclickhouse' --password '{}'".format(dockerhub_password), shell=True) changed_images = get_changed_docker_images(pr_info, repo_path, "docker/images.json")
logging.info(
"Has changed images %s", ", ".join([str(image[0]) for image in changed_images])
)
pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
# The order is important, PR number is used as cache during the build
versions = [str(pr_info.number), pr_commit_version]
result_version = pr_commit_version
if pr_info.number == 0:
# First get the latest for cache
versions.insert(0, "latest")
if args.suffix:
# We should build architecture specific images separately and merge a
# manifest lately in a different script
versions = [f"{v}-{args.suffix}" for v in versions]
# changed_images_{suffix}.json should contain all changed images
result_version = versions
result_images = {} result_images = {}
images_processing_result = [] images_processing_result = []
for rel_path, image_name in changed_images: for rel_path, image_name in changed_images:
full_path = os.path.join(repo_path, rel_path) full_path = os.path.join(repo_path, rel_path)
images_processing_result += process_single_image(versions, full_path, image_name) images_processing_result += process_single_image(
result_images[image_name] = pr_commit_version versions, full_path, image_name, push
)
result_images[image_name] = result_version
if changed_images: if changed_images:
description = "Updated " + ','.join([im[1] for im in changed_images]) description = "Updated " + ",".join([im[1] for im in changed_images])
else: else:
description = "Nothing to update" description = "Nothing to update"
if len(description) >= 140: if len(description) >= 140:
description = description[:136] + "..." description = description[:136] + "..."
s3_helper = S3Helper('https://s3.amazonaws.com') with open(changed_json, "w") as images_file:
s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(' ', '_')
status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix)
ch_helper = ClickHouseHelper()
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
with open(os.path.join(temp_path, 'changed_images.json'), 'w') as images_file:
json.dump(result_images, images_file) json.dump(result_images, images_file)
print("::notice ::Report url: {}".format(url)) s3_helper = S3Helper("https://s3.amazonaws.com")
print("::set-output name=url_output::\"{}\"".format(url))
gh = Github(get_best_robot_token())
commit = get_commit(gh, pr_info.sha)
commit.create_status(context=NAME, description=description, state=status, target_url=url)
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME) s3_path_prefix = (
str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")
)
status, test_results = process_test_results(
s3_helper, images_processing_result, s3_path_prefix
)
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
print("::notice ::Report url: {}".format(url))
print('::set-output name=url_output::"{}"'.format(url))
if args.no_reports:
return
gh = Github(get_best_robot_token())
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_results,
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
url,
NAME,
)
ch_helper = ClickHouseHelper()
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,235 @@
#!/usr/bin/env python3
import argparse
import json
import logging
import os
import subprocess
from typing import List, Dict, Tuple
from github import Github
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status
from env_helper import RUNNER_TEMP
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from s3_helper import S3Helper
from stopwatch import Stopwatch
from upload_result_helper import upload_results
NAME = "Push multi-arch images to Dockerhub (actions)"
CHANGED_IMAGES = "changed_images_{}.json"
Images = Dict[str, List[str]]
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="The program gets images from changed_images_*.json, merges imeges "
"with different architectures into one manifest and pushes back to docker hub",
)
parser.add_argument(
"--suffix",
dest="suffixes",
type=str,
required=True,
action="append",
help="suffixes for existing images' tags. More than two should be given",
)
parser.add_argument(
"--path",
type=str,
default=RUNNER_TEMP,
help="path to changed_images_*.json files",
)
parser.add_argument(
"--no-reports",
action="store_true",
help="don't push reports to S3 and github",
)
parser.add_argument(
"--no-push-images",
action="store_true",
help="don't push images to docker hub",
)
args = parser.parse_args()
if len(args.suffixes) < 2:
raise parser.error("more than two --suffix should be given")
return args
def load_images(path: str, suffix: str) -> Images:
with open(os.path.join(path, CHANGED_IMAGES.format(suffix)), "r") as images:
return json.load(images)
def strip_suffix(suffix: str, images: Images) -> Images:
result = {}
for image, versions in images.items():
for v in versions:
if not v.endswith(f"-{suffix}"):
raise ValueError(
f"version {image}:{v} does not contain suffix {suffix}"
)
result[image] = [v[: -len(suffix) - 1] for v in versions]
return result
def check_sources(to_merge: Dict[str, Images]) -> Images:
result = {} # type: Images
first_suffix = ""
for suffix, images in to_merge.items():
if not result:
first_suffix = suffix
result = strip_suffix(suffix, images)
continue
if not result == strip_suffix(suffix, images):
raise ValueError(
f"images in {images} are not equal to {to_merge[first_suffix]}"
)
return result
def get_changed_images(images: Images) -> Dict[str, str]:
"""The original json format is {"image": "tag"}, so the output artifact is
produced here. The latest version is {PR_NUMBER}-{SHA1}
"""
return {k: v[-1] for k, v in images.items()}
def merge_images(to_merge: Dict[str, Images]) -> Dict[str, List[List[str]]]:
"""The function merges image-name:version-suffix1 and image-name:version-suffix2
into image-name:version"""
suffixes = to_merge.keys()
result_images = check_sources(to_merge)
merge = {} # type: Dict[str, List[List[str]]]
for image, versions in result_images.items():
merge[image] = []
for i, v in enumerate(versions):
merged_v = [v] # type: List[str]
for suf in suffixes:
merged_v.append(to_merge[suf][image][i])
merge[image].append(merged_v)
return merge
def create_manifest(image: str, tags: List[str], push: bool) -> Tuple[str, str]:
tag = tags[0]
manifest = f"{image}:{tag}"
cmd = "docker manifest create --amend {}".format(
" ".join((f"{image}:{t}" for t in tags))
)
logging.info("running: %s", cmd)
popen = subprocess.Popen(
cmd,
shell=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE,
universal_newlines=True,
)
retcode = popen.wait()
if retcode != 0:
output = popen.stdout.read() # type: ignore
logging.error("failed to create manifest for %s:\n %s\n", manifest, output)
return manifest, "FAIL"
if not push:
return manifest, "OK"
cmd = f"docker manifest push {manifest}"
logging.info("running: %s", cmd)
popen = subprocess.Popen(
cmd,
shell=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE,
universal_newlines=True,
)
retcode = popen.wait()
if retcode != 0:
output = popen.stdout.read() # type: ignore
logging.error("failed to push %s:\n %s\n", manifest, output)
return manifest, "FAIL"
return manifest, "OK"
def main():
logging.basicConfig(level=logging.INFO)
stopwatch = Stopwatch()
args = parse_args()
push = not args.no_push_images
if push:
subprocess.check_output( # pylint: disable=unexpected-keyword-arg
"docker login --username 'robotclickhouse' --password-stdin",
input=get_parameter_from_ssm("dockerhub_robot_password"),
encoding="utf-8",
shell=True,
)
to_merge = {}
for suf in args.suffixes:
to_merge[suf] = load_images(args.path, suf)
changed_images = get_changed_images(check_sources(to_merge))
os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
merged = merge_images(to_merge)
status = "success"
test_results = [] # type: List[Tuple[str, str]]
for image, versions in merged.items():
for tags in versions:
manifest, test_result = create_manifest(image, tags, push)
test_results.append((manifest, test_result))
if test_result != "OK":
status = "failure"
with open(os.path.join(args.path, "changed_images.json"), "w") as ci:
json.dump(changed_images, ci)
pr_info = PRInfo()
s3_helper = S3Helper("https://s3.amazonaws.com")
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
print("::notice ::Report url: {}".format(url))
print('::set-output name=url_output::"{}"'.format(url))
if args.no_reports:
return
if changed_images:
description = "Updated " + ", ".join(changed_images.keys())
else:
description = "Nothing to update"
if len(description) >= 140:
description = description[:136] + "..."
gh = Github(get_best_robot_token())
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_results,
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
url,
NAME,
)
ch_helper = ClickHouseHelper()
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
if __name__ == "__main__":
main()

View File

@ -15,7 +15,7 @@ from pr_info import PRInfo
from build_download_helper import download_all_deb_packages from build_download_helper import download_all_deb_packages
from upload_result_helper import upload_results from upload_result_helper import upload_results
from docker_pull_helper import get_image_with_version from docker_pull_helper import get_image_with_version
from commit_status_helper import post_commit_status, get_commit from commit_status_helper import post_commit_status, get_commit, override_status
from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse
from stopwatch import Stopwatch from stopwatch import Stopwatch
from rerun_helper import RerunHelper from rerun_helper import RerunHelper
@ -197,7 +197,9 @@ if __name__ == "__main__":
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
s3_helper = S3Helper('https://s3.amazonaws.com') s3_helper = S3Helper('https://s3.amazonaws.com')
state, description, test_results, additional_logs = process_results(result_path, server_log_path) state, description, test_results, additional_logs = process_results(result_path, server_log_path)
state = override_status(state, check_name)
ch_helper = ClickHouseHelper() ch_helper = ClickHouseHelper()
mark_flaky_tests(ch_helper, check_name, test_results) mark_flaky_tests(ch_helper, check_name, test_results)

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import boto3 import boto3 # type: ignore
from github import Github from github import Github # type: ignore
def get_parameter_from_ssm(name, decrypt=True, client=None): def get_parameter_from_ssm(name, decrypt=True, client=None):
if not client: if not client:

View File

@ -9,20 +9,23 @@ import time
from collections import namedtuple from collections import namedtuple
import boto3 import boto3
def get_dead_runners_in_ec2(runners): def get_dead_runners_in_ec2(runners):
ids = {runner.name: runner for runner in runners if runner.offline == True and runner.busy == False} ids = {
runner.name: runner for runner in runners if runner.offline and not runner.busy
}
if not ids: if not ids:
return [] return []
client = boto3.client('ec2') client = boto3.client("ec2")
print("Checking ids", list(ids.keys())) print("Checking ids", list(ids.keys()))
instances_statuses = client.describe_instance_status(InstanceIds=list(ids.keys())) instances_statuses = client.describe_instance_status(InstanceIds=list(ids.keys()))
found_instances = set([]) found_instances = set([])
print("Response", instances_statuses) print("Response", instances_statuses)
for instance_status in instances_statuses['InstanceStatuses']: for instance_status in instances_statuses["InstanceStatuses"]:
if instance_status['InstanceState']['Name'] in ('pending', 'running'): if instance_status["InstanceState"]["Name"] in ("pending", "running"):
found_instances.add(instance_status['InstanceId']) found_instances.add(instance_status["InstanceId"])
print("Found instances", found_instances) print("Found instances", found_instances)
result_to_delete = [] result_to_delete = []
@ -32,23 +35,25 @@ def get_dead_runners_in_ec2(runners):
result_to_delete.append(runner) result_to_delete.append(runner)
return result_to_delete return result_to_delete
def get_key_and_app_from_aws(): def get_key_and_app_from_aws():
import boto3 import boto3
secret_name = "clickhouse_github_secret_key" secret_name = "clickhouse_github_secret_key"
session = boto3.session.Session() session = boto3.session.Session()
client = session.client( client = session.client(
service_name='secretsmanager', service_name="secretsmanager",
) )
get_secret_value_response = client.get_secret_value( get_secret_value_response = client.get_secret_value(SecretId=secret_name)
SecretId=secret_name data = json.loads(get_secret_value_response["SecretString"])
) return data["clickhouse-app-key"], int(data["clickhouse-app-id"])
data = json.loads(get_secret_value_response['SecretString'])
return data['clickhouse-app-key'], int(data['clickhouse-app-id'])
def handler(event, context): def handler(event, context):
private_key, app_id = get_key_and_app_from_aws() private_key, app_id = get_key_and_app_from_aws()
main(private_key, app_id, True, True) main(private_key, app_id, True, True)
def get_installation_id(jwt_token): def get_installation_id(jwt_token):
headers = { headers = {
"Authorization": f"Bearer {jwt_token}", "Authorization": f"Bearer {jwt_token}",
@ -57,54 +62,81 @@ def get_installation_id(jwt_token):
response = requests.get("https://api.github.com/app/installations", headers=headers) response = requests.get("https://api.github.com/app/installations", headers=headers)
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
return data[0]['id'] return data[0]["id"]
def get_access_token(jwt_token, installation_id): def get_access_token(jwt_token, installation_id):
headers = { headers = {
"Authorization": f"Bearer {jwt_token}", "Authorization": f"Bearer {jwt_token}",
"Accept": "application/vnd.github.v3+json", "Accept": "application/vnd.github.v3+json",
} }
response = requests.post(f"https://api.github.com/app/installations/{installation_id}/access_tokens", headers=headers) response = requests.post(
f"https://api.github.com/app/installations/{installation_id}/access_tokens",
headers=headers,
)
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
return data['token'] return data["token"]
RunnerDescription = namedtuple('RunnerDescription', ['id', 'name', 'tags', 'offline', 'busy']) RunnerDescription = namedtuple(
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
)
def list_runners(access_token): def list_runners(access_token):
headers = { headers = {
"Authorization": f"token {access_token}", "Authorization": f"token {access_token}",
"Accept": "application/vnd.github.v3+json", "Accept": "application/vnd.github.v3+json",
} }
response = requests.get("https://api.github.com/orgs/ClickHouse/actions/runners?per_page=100", headers=headers) response = requests.get(
"https://api.github.com/orgs/ClickHouse/actions/runners?per_page=100",
headers=headers,
)
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
total_runners = data['total_count'] total_runners = data["total_count"]
runners = data['runners'] runners = data["runners"]
total_pages = int(total_runners / 100 + 1) total_pages = int(total_runners / 100 + 1)
print("Total pages", total_pages) print("Total pages", total_pages)
for i in range(2, total_pages + 1): for i in range(2, total_pages + 1):
response = requests.get(f"https://api.github.com/orgs/ClickHouse/actions/runners?page={i}&per_page=100", headers=headers) response = requests.get(
"https://api.github.com/orgs/ClickHouse/actions/runners"
f"?page={i}&per_page=100",
headers=headers,
)
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
runners += data['runners'] runners += data["runners"]
print("Total runners", len(runners)) print("Total runners", len(runners))
result = [] result = []
for runner in runners: for runner in runners:
tags = [tag['name'] for tag in runner['labels']] tags = [tag["name"] for tag in runner["labels"]]
desc = RunnerDescription(id=runner['id'], name=runner['name'], tags=tags, desc = RunnerDescription(
offline=runner['status']=='offline', busy=runner['busy']) id=runner["id"],
name=runner["name"],
tags=tags,
offline=runner["status"] == "offline",
busy=runner["busy"],
)
result.append(desc) result.append(desc)
return result return result
def group_runners_by_tag(listed_runners): def group_runners_by_tag(listed_runners):
result = {} result = {}
RUNNER_TYPE_LABELS = ['style-checker', 'builder', 'func-tester', 'stress-tester', 'fuzzer-unit-tester'] RUNNER_TYPE_LABELS = [
"builder",
"func-tester",
"func-tester-aarch64",
"fuzzer-unit-tester",
"stress-tester",
"style-checker",
]
for runner in listed_runners: for runner in listed_runners:
for tag in runner.tags: for tag in runner.tags:
if tag in RUNNER_TYPE_LABELS: if tag in RUNNER_TYPE_LABELS:
@ -113,57 +145,72 @@ def group_runners_by_tag(listed_runners):
result[tag].append(runner) result[tag].append(runner)
break break
else: else:
if 'unlabeled' not in result: if "unlabeled" not in result:
result['unlabeled'] = [] result["unlabeled"] = []
result['unlabeled'].append(runner) result["unlabeled"].append(runner)
return result return result
def push_metrics_to_cloudwatch(listed_runners, namespace): def push_metrics_to_cloudwatch(listed_runners, namespace):
client = boto3.client('cloudwatch') client = boto3.client("cloudwatch")
metrics_data = [] metrics_data = []
busy_runners = sum(1 for runner in listed_runners if runner.busy and not runner.offline) busy_runners = sum(
metrics_data.append({ 1 for runner in listed_runners if runner.busy and not runner.offline
'MetricName': 'BusyRunners', )
'Value': busy_runners, metrics_data.append(
'Unit': 'Count', {
}) "MetricName": "BusyRunners",
"Value": busy_runners,
"Unit": "Count",
}
)
total_active_runners = sum(1 for runner in listed_runners if not runner.offline) total_active_runners = sum(1 for runner in listed_runners if not runner.offline)
metrics_data.append({ metrics_data.append(
'MetricName': 'ActiveRunners', {
'Value': total_active_runners, "MetricName": "ActiveRunners",
'Unit': 'Count', "Value": total_active_runners,
}) "Unit": "Count",
}
)
total_runners = len(listed_runners) total_runners = len(listed_runners)
metrics_data.append({ metrics_data.append(
'MetricName': 'TotalRunners', {
'Value': total_runners, "MetricName": "TotalRunners",
'Unit': 'Count', "Value": total_runners,
}) "Unit": "Count",
}
)
if total_active_runners == 0: if total_active_runners == 0:
busy_ratio = 100 busy_ratio = 100
else: else:
busy_ratio = busy_runners / total_active_runners * 100 busy_ratio = busy_runners / total_active_runners * 100
metrics_data.append({ metrics_data.append(
'MetricName': 'BusyRunnersRatio', {
'Value': busy_ratio, "MetricName": "BusyRunnersRatio",
'Unit': 'Percent', "Value": busy_ratio,
}) "Unit": "Percent",
}
)
client.put_metric_data(Namespace=namespace, MetricData=metrics_data) client.put_metric_data(Namespace=namespace, MetricData=metrics_data)
def delete_runner(access_token, runner): def delete_runner(access_token, runner):
headers = { headers = {
"Authorization": f"token {access_token}", "Authorization": f"token {access_token}",
"Accept": "application/vnd.github.v3+json", "Accept": "application/vnd.github.v3+json",
} }
response = requests.delete(f"https://api.github.com/orgs/ClickHouse/actions/runners/{runner.id}", headers=headers) response = requests.delete(
f"https://api.github.com/orgs/ClickHouse/actions/runners/{runner.id}",
headers=headers,
)
response.raise_for_status() response.raise_for_status()
print(f"Response code deleting {runner.name} is {response.status_code}") print(f"Response code deleting {runner.name} is {response.status_code}")
return response.status_code == 204 return response.status_code == 204
def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_runners): def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_runners):
payload = { payload = {
"iat": int(time.time()) - 60, "iat": int(time.time()) - 60,
@ -179,11 +226,11 @@ def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_ru
for group, group_runners in grouped_runners.items(): for group, group_runners in grouped_runners.items():
if push_to_cloudwatch: if push_to_cloudwatch:
print(group) print(group)
push_metrics_to_cloudwatch(group_runners, 'RunnersMetrics/' + group) push_metrics_to_cloudwatch(group_runners, "RunnersMetrics/" + group)
else: else:
print(group, f"({len(group_runners)})") print(group, f"({len(group_runners)})")
for runner in group_runners: for runner in group_runners:
print('\t', runner) print("\t", runner)
if delete_offline_runners: if delete_offline_runners:
print("Going to delete offline runners") print("Going to delete offline runners")
@ -192,26 +239,43 @@ def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_ru
print("Deleting runner", runner) print("Deleting runner", runner)
delete_runner(access_token, runner) delete_runner(access_token, runner)
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Get list of runners and their states') parser = argparse.ArgumentParser(description="Get list of runners and their states")
parser.add_argument('-p', '--private-key-path', help='Path to file with private key') parser.add_argument(
parser.add_argument('-k', '--private-key', help='Private key') "-p", "--private-key-path", help="Path to file with private key"
parser.add_argument('-a', '--app-id', type=int, help='GitHub application ID', required=True) )
parser.add_argument('--push-to-cloudwatch', action='store_true', help='Store received token in parameter store') parser.add_argument("-k", "--private-key", help="Private key")
parser.add_argument('--delete-offline', action='store_true', help='Remove offline runners') parser.add_argument(
"-a", "--app-id", type=int, help="GitHub application ID", required=True
)
parser.add_argument(
"--push-to-cloudwatch",
action="store_true",
help="Store received token in parameter store",
)
parser.add_argument(
"--delete-offline", action="store_true", help="Remove offline runners"
)
args = parser.parse_args() args = parser.parse_args()
if not args.private_key_path and not args.private_key: if not args.private_key_path and not args.private_key:
print("Either --private-key-path or --private-key must be specified", file=sys.stderr) print(
"Either --private-key-path or --private-key must be specified",
file=sys.stderr,
)
if args.private_key_path and args.private_key: if args.private_key_path and args.private_key:
print("Either --private-key-path or --private-key must be specified", file=sys.stderr) print(
"Either --private-key-path or --private-key must be specified",
file=sys.stderr,
)
if args.private_key: if args.private_key:
private_key = args.private_key private_key = args.private_key
else: else:
with open(args.private_key_path, 'r') as key_file: with open(args.private_key_path, "r") as key_file:
private_key = key_file.read() private_key = key_file.read()
main(private_key, args.app_id, args.push_to_cloudwatch, args.delete_offline) main(private_key, args.app_id, args.push_to_cloudwatch, args.delete_offline)

View File

@ -2,8 +2,8 @@
import json import json
import os import os
import requests import requests # type: ignore
from unidiff import PatchSet from unidiff import PatchSet # type: ignore
from env_helper import GITHUB_REPOSITORY, GITHUB_SERVER_URL, GITHUB_RUN_ID, GITHUB_EVENT_PATH from env_helper import GITHUB_REPOSITORY, GITHUB_SERVER_URL, GITHUB_RUN_ID, GITHUB_EVENT_PATH
@ -38,7 +38,12 @@ class PRInfo:
with open(GITHUB_EVENT_PATH, 'r', encoding='utf-8') as event_file: with open(GITHUB_EVENT_PATH, 'r', encoding='utf-8') as event_file:
github_event = json.load(event_file) github_event = json.load(event_file)
else: else:
github_event = {'commits': 1, 'after': 'HEAD', 'ref': None} github_event = {
'commits': 1,
'before': 'HEAD~',
'after': 'HEAD',
'ref': None,
}
self.event = github_event self.event = github_event
self.changed_files = set([]) self.changed_files = set([])
self.body = "" self.body = ""
@ -95,7 +100,8 @@ class PRInfo:
self.task_url = f"{repo_prefix}/actions/runs/{GITHUB_RUN_ID or '0'}" self.task_url = f"{repo_prefix}/actions/runs/{GITHUB_RUN_ID or '0'}"
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}" self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
self.repo_full_name = GITHUB_REPOSITORY self.repo_full_name = GITHUB_REPOSITORY
if pull_request is None or pull_request['state'] == 'closed': # it's merged PR to master if pull_request is None or pull_request['state'] == 'closed':
# it's merged PR to master
self.number = 0 self.number = 0
self.labels = {} self.labels = {}
self.pr_html_url = f"{repo_prefix}/commits/{ref}" self.pr_html_url = f"{repo_prefix}/commits/{ref}"

View File

@ -7,7 +7,7 @@ import shutil
import time import time
from multiprocessing.dummy import Pool from multiprocessing.dummy import Pool
import boto3 import boto3 # type: ignore
from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI
from compress_files import compress_file_fast from compress_files import compress_file_fast

View File

@ -28,6 +28,7 @@ apt-get update
apt-get install --yes --no-install-recommends \ apt-get install --yes --no-install-recommends \
apt-transport-https \ apt-transport-https \
binfmt-support \
build-essential \ build-essential \
ca-certificates \ ca-certificates \
curl \ curl \
@ -37,6 +38,7 @@ apt-get install --yes --no-install-recommends \
pigz \ pigz \
python3-dev \ python3-dev \
python3-pip \ python3-pip \
qemu-user-static \
unzip unzip
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
@ -61,6 +63,10 @@ EOT
systemctl restart docker systemctl restart docker
# buildx builder is user-specific
sudo -u ubuntu docker buildx version
sudo -u ubuntu docker buildx create --use --name default-builder
pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory
mkdir -p $RUNNER_HOME && cd $RUNNER_HOME mkdir -p $RUNNER_HOME && cd $RUNNER_HOME