Merge pull request #21019 from azat/yamllint

yamllint style check
This commit is contained in:
alexey-milovidov 2021-02-22 13:22:27 +03:00 committed by GitHub
commit 9af4bfdff6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 138 additions and 115 deletions

4
.github/codecov.yml vendored
View File

@ -1,5 +1,5 @@
codecov:
max_report_age: off
max_report_age: "off"
strict_yaml_branch: "master"
ignore:
@ -14,4 +14,4 @@ ignore:
comment: false
github_checks:
annotations: false
annotations: false

View File

@ -8,9 +8,9 @@
name: Docker Container Scan (clickhouse-server)
on:
"on":
pull_request:
paths:
paths:
- docker/server/Dockerfile
- .github/workflows/anchore-analysis.yml
schedule:
@ -20,20 +20,20 @@ jobs:
Anchore-Build-Scan:
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v2
- name: Build the Docker image
run: |
cd docker/server
perl -pi -e 's|=\$version||g' Dockerfile
docker build . --file Dockerfile --tag localbuild/testimage:latest
- name: Run the local Anchore scan action itself with GitHub Advanced Security code scanning integration enabled
uses: anchore/scan-action@v2
id: scan
with:
image: "localbuild/testimage:latest"
acs-report-enable: true
- name: Upload Anchore Scan Report
uses: github/codeql-action/upload-sarif@v1
with:
sarif_file: ${{ steps.scan.outputs.sarif }}
- name: Checkout the code
uses: actions/checkout@v2
- name: Build the Docker image
run: |
cd docker/server
perl -pi -e 's|=\$version||g' Dockerfile
docker build . --file Dockerfile --tag localbuild/testimage:latest
- name: Run the local Anchore scan action itself with GitHub Advanced Security code scanning integration enabled
uses: anchore/scan-action@v2
id: scan
with:
image: "localbuild/testimage:latest"
acs-report-enable: true
- name: Upload Anchore Scan Report
uses: github/codeql-action/upload-sarif@v1
with:
sarif_file: ${{ steps.scan.outputs.sarif }}

View File

@ -14,14 +14,14 @@ handlers:
# The trigger for creating the Yandex.Tracker issue. When the specified event occurs, it transfers PR data to Yandex.Tracker.
github:pullRequest:labeled:
data:
# The Yandex.Tracker queue to create the issue in. Each issue in Tracker belongs to one of the project queues.
queue: CLICKHOUSEDOCS
# The issue title.
summary: '[Potato] Pull Request #{{pullRequest.number}}'
# The issue description.
description: >
# The Yandex.Tracker queue to create the issue in. Each issue in Tracker belongs to one of the project queues.
queue: CLICKHOUSEDOCS
# The issue title.
summary: '[Potato] Pull Request #{{pullRequest.number}}'
# The issue description.
description: >
{{pullRequest.description}}
Ссылка на Pull Request: {{pullRequest.webUrl}}
# The condition for creating the Yandex.Tracker issue.
condition: eventPayload.labels.filter(label => ['pr-feature'].includes(label.name)).length
# The condition for creating the Yandex.Tracker issue.
condition: eventPayload.labels.filter(label => ['pr-feature'].includes(label.name)).length

15
.yamllint Normal file
View File

@ -0,0 +1,15 @@
# vi: ft=yaml
extends: default
rules:
indentation:
level: warning
indent-sequences: consistent
line-length:
# there are some bash -c "", so this is OK
max: 300
level: warning
comments:
min-spaces-from-content: 1
document-start:
present: false

View File

@ -4,4 +4,4 @@ services:
image: cassandra
restart: always
ports:
- 9043:9042
- 9043:9042

View File

@ -5,6 +5,6 @@ services:
hostname: hdfs1
restart: always
ports:
- 50075:50075
- 50070:50070
- 50075:50075
- 50070:50070
entrypoint: /etc/bootstrap.sh -d

View File

@ -5,42 +5,42 @@ services:
image: zookeeper:3.4.9
hostname: kafka_zookeeper
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=kafka_zookeeper:2888:3888
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=kafka_zookeeper:2888:3888
security_opt:
- label:disable
- label:disable
kafka1:
image: confluentinc/cp-kafka:5.2.0
hostname: kafka1
ports:
- "9092:9092"
- "9092:9092"
environment:
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kafka1:19092
KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:19092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kafka1:19092
KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:19092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
depends_on:
- kafka_zookeeper
- kafka_zookeeper
security_opt:
- label:disable
- label:disable
schema-registry:
image: confluentinc/cp-schema-registry:5.2.0
hostname: schema-registry
ports:
- "8081:8081"
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
depends_on:
- kafka_zookeeper
- kafka1
- kafka_zookeeper
- kafka1
security_opt:
- label:disable
- label:disable

View File

@ -8,22 +8,22 @@ services:
hostname: kerberizedhdfs1
restart: always
volumes:
- ${KERBERIZED_HDFS_DIR}/../../hdfs_configs/bootstrap.sh:/etc/bootstrap.sh:ro
- ${KERBERIZED_HDFS_DIR}/secrets:/usr/local/hadoop/etc/hadoop/conf
- ${KERBERIZED_HDFS_DIR}/secrets/krb_long.conf:/etc/krb5.conf:ro
- ${KERBERIZED_HDFS_DIR}/../../hdfs_configs/bootstrap.sh:/etc/bootstrap.sh:ro
- ${KERBERIZED_HDFS_DIR}/secrets:/usr/local/hadoop/etc/hadoop/conf
- ${KERBERIZED_HDFS_DIR}/secrets/krb_long.conf:/etc/krb5.conf:ro
ports:
- 1006:1006
- 50070:50070
- 9010:9010
depends_on:
- hdfskerberos
- hdfskerberos
entrypoint: /etc/bootstrap.sh -d
hdfskerberos:
image: yandex/clickhouse-kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG}
hostname: hdfskerberos
volumes:
- ${KERBERIZED_HDFS_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_HDFS_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
- ${KERBERIZED_HDFS_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_HDFS_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
ports: [88, 749]

View File

@ -6,54 +6,54 @@ services:
# restart: always
hostname: kafka_kerberized_zookeeper
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVERS: "kafka_kerberized_zookeeper:2888:3888"
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dsun.security.krb5.debug=true"
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVERS: "kafka_kerberized_zookeeper:2888:3888"
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberos
- kafka_kerberos
security_opt:
- label:disable
- label:disable
kerberized_kafka1:
image: confluentinc/cp-kafka:5.2.0
# restart: always
hostname: kerberized_kafka1
ports:
- "9092:9092"
- "9093:9093"
- "9092:9092"
- "9093:9093"
environment:
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://:9093
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:9093
# KAFKA_LISTENERS: INSIDE://kerberized_kafka1:9092,OUTSIDE://kerberized_kafka1:19092
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kerberized_kafka1:19092
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: GSSAPI
KAFKA_SASL_ENABLED_MECHANISMS: GSSAPI
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: OUTSIDE:SASL_PLAINTEXT,UNSECURED_OUTSIDE:PLAINTEXT,UNSECURED_INSIDE:PLAINTEXT,
KAFKA_INTER_BROKER_LISTENER_NAME: OUTSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_kerberized_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://:9093
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:9093
# KAFKA_LISTENERS: INSIDE://kerberized_kafka1:9092,OUTSIDE://kerberized_kafka1:19092
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kerberized_kafka1:19092
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: GSSAPI
KAFKA_SASL_ENABLED_MECHANISMS: GSSAPI
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: OUTSIDE:SASL_PLAINTEXT,UNSECURED_OUTSIDE:PLAINTEXT,UNSECURED_INSIDE:PLAINTEXT,
KAFKA_INTER_BROKER_LISTENER_NAME: OUTSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_kerberized_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberized_zookeeper
- kafka_kerberos
- kafka_kerberized_zookeeper
- kafka_kerberos
security_opt:
- label:disable
- label:disable
kafka_kerberos:
image: yandex/clickhouse-kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
hostname: kafka_kerberos
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_KAFKA_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
- ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_KAFKA_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
ports: [88, 749]

View File

@ -7,5 +7,5 @@ services:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: clickhouse
ports:
- 27018:27017
- 27018:27017
command: --profile=2 --verbose

View File

@ -6,5 +6,5 @@ services:
environment:
MYSQL_ROOT_PASSWORD: clickhouse
ports:
- 3308:3306
- 3308:3306
command: --server_id=100 --log-bin='mysql-bin-1.log' --default-time-zone='+3:00' --gtid-mode="ON" --enforce-gtid-consistency

View File

@ -7,7 +7,7 @@ services:
MYSQL_ALLOW_EMPTY_PASSWORD: 1
command: --federated --socket /var/run/mysqld/mysqld.sock
healthcheck:
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
interval: 1s
timeout: 2s
retries: 100

View File

@ -11,4 +11,4 @@ services:
ports:
- "5433:5433"
environment:
POSTGRES_HOST_AUTH_METHOD: "trust"
POSTGRES_HOST_AUTH_METHOD: "trust"

View File

@ -6,8 +6,8 @@ services:
environment:
POSTGRES_PASSWORD: mysecretpassword
ports:
- 5432:5432
- 5432:5432
networks:
default:
aliases:
- postgre-sql.local
default:
aliases:
- postgre-sql.local

View File

@ -4,5 +4,5 @@ services:
image: redis
restart: always
ports:
- 6380:6379
- 6380:6379
command: redis-server --requirepass "clickhouse" --databases 32

View File

@ -1,7 +1,14 @@
# docker build -t yandex/clickhouse-style-test .
FROM ubuntu:20.04
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes shellcheck libxml2-utils git python3-pip pylint && pip3 install codespell
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
shellcheck \
libxml2-utils \
git \
python3-pip \
pylint \
yamllint \
&& pip3 install codespell
# For |& syntax

View File

@ -56,7 +56,7 @@ services:
zookeeper:
condition: service_healthy
# dummy service which does nothing, but allows to postpone
# dummy service which does nothing, but allows to postpone
# 'docker-compose up -d' till all dependecies will go healthy
all_services_ready:
image: hello-world

View File

@ -20,7 +20,7 @@ services:
zookeeper:
condition: service_healthy
# dummy service which does nothing, but allows to postpone
# dummy service which does nothing, but allows to postpone
# 'docker-compose up -d' till all dependecies will go healthy
all_services_ready:
image: hello-world

View File

@ -135,7 +135,7 @@ services:
zookeeper:
condition: service_healthy
# dummy service which does nothing, but allows to postpone
# dummy service which does nothing, but allows to postpone
# 'docker-compose up -d' till all dependecies will go healthy
all_services_ready:
image: hello-world

View File

@ -28,7 +28,7 @@ services:
environment:
PHPLDAPADMIN_HTTPS=false:
ports:
- "8080:80"
- "8080:80"
healthcheck:
test: echo 1
interval: 10s
@ -37,4 +37,3 @@ services:
start_period: 300s
security_opt:
- label:disable

View File

@ -135,7 +135,7 @@ services:
zookeeper:
condition: service_healthy
# dummy service which does nothing, but allows to postpone
# dummy service which does nothing, but allows to postpone
# 'docker-compose up -d' till all dependecies will go healthy
all_services_ready:
image: hello-world

View File

@ -28,7 +28,7 @@ services:
environment:
PHPLDAPADMIN_HTTPS=false:
ports:
- "8080:80"
- "8080:80"
healthcheck:
test: echo 1
interval: 10s
@ -37,4 +37,3 @@ services:
start_period: 300s
security_opt:
- label:disable

View File

@ -135,7 +135,7 @@ services:
zookeeper:
condition: service_healthy
# dummy service which does nothing, but allows to postpone
# dummy service which does nothing, but allows to postpone
# 'docker-compose up -d' till all dependecies will go healthy
all_services_ready:
image: hello-world

View File

@ -28,7 +28,7 @@ services:
environment:
PHPLDAPADMIN_HTTPS=false:
ports:
- "8080:80"
- "8080:80"
healthcheck:
test: echo 1
interval: 10s
@ -37,4 +37,3 @@ services:
start_period: 300s
security_opt:
- label:disable

View File

@ -57,4 +57,4 @@ services:
clickhouse3:
condition: service_healthy
zookeeper:
condition: service_healthy
condition: service_healthy

View File

@ -72,6 +72,10 @@ find $ROOT_PATH/{src,base,programs,utils} -name '*.xml' |
# FIXME: for now only clickhouse-test
pylint --rcfile=$ROOT_PATH/.pylintrc --score=n $ROOT_PATH/tests/clickhouse-test
find $ROOT_PATH -not -path $ROOT_PATH'/contrib*' \( -name '*.yaml' -or -name '*.yml' \) -type f |
grep -vP $EXCLUDE_DIRS |
xargs yamllint --config-file=$ROOT_PATH/.yamllint
# Machine translation to Russian is strictly prohibited
find $ROOT_PATH/docs/ru -name '*.md' |
grep -vP $EXCLUDE_DIRS |