mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Fix publishing of content.clickhouse.com
- Publish only benchmarks and data - Minimize clickhouse/docs-release - Run it as a normal user - Speed up running by not redownload virtual env deps - Clean out docs and blog buildging - Minimize docs/tools/requirements.txt
This commit is contained in:
parent
fd217da123
commit
cfe98c4aba
11
.github/workflows/docs_release.yml
vendored
11
.github/workflows/docs_release.yml
vendored
@ -7,16 +7,17 @@ env:
|
||||
concurrency:
|
||||
group: master-release
|
||||
cancel-in-progress: true
|
||||
on: # yamllint disable-line rule:truthy
|
||||
'on':
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
- 'benchmark/**'
|
||||
- 'docker/**'
|
||||
- '.github/**'
|
||||
- 'benchmark/**'
|
||||
- 'docker/docs/release/**'
|
||||
- 'docs/**'
|
||||
- 'utils/list-versions/version_date.tsv'
|
||||
- 'website/**'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
DockerHubPushAarch64:
|
||||
|
@ -20,29 +20,23 @@ RUN apt-get update \
|
||||
openssl \
|
||||
python3-pip \
|
||||
software-properties-common \
|
||||
language-pack-zh* \
|
||||
chinese* \
|
||||
fonts-arphic-ukai \
|
||||
fonts-arphic-uming \
|
||||
fonts-ipafont-mincho \
|
||||
fonts-ipafont-gothic \
|
||||
fonts-unfonts-core \
|
||||
xvfb \
|
||||
nodejs \
|
||||
npm \
|
||||
openjdk-11-jdk \
|
||||
ssh-client \
|
||||
&& pip --no-cache-dir install scipy \
|
||||
&& apt-get autoremove --yes \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN wget 'https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb'
|
||||
|
||||
RUN npm i -g purify-css
|
||||
|
||||
RUN pip3 install --ignore-installed --upgrade setuptools pip virtualenv
|
||||
|
||||
# We create the most popular default 1000:1000 ubuntu user to not have ssh issues when running with UID==1000
|
||||
RUN useradd --create-home --uid 1000 --user-group ubuntu \
|
||||
&& ssh-keyscan -t rsa github.com >> /etc/ssh/ssh_known_hosts
|
||||
|
||||
COPY run.sh /
|
||||
|
||||
ENV REPO_PATH=/repo_path
|
||||
|
@ -1,10 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
cd $REPO_PATH/docs/tools
|
||||
mkdir venv
|
||||
virtualenv -p $(which python3) venv
|
||||
cd "$REPO_PATH/docs/tools"
|
||||
if ! [ -d venv ]; then
|
||||
mkdir -p venv
|
||||
virtualenv -p "$(which python3)" venv
|
||||
source venv/bin/activate
|
||||
python3 -m pip install --ignore-installed -r requirements.txt
|
||||
fi
|
||||
source venv/bin/activate
|
||||
python3 -m pip install --ignore-installed -r requirements.txt
|
||||
mkdir -p ~/.ssh && ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts
|
||||
./release.sh 2>&1 | tee tee $OUTPUT_PATH/output.log
|
||||
./release.sh 2>&1 | tee "$OUTPUT_PATH/output.log"
|
||||
|
@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import nav # monkey patches mkdocs
|
||||
|
||||
import mkdocs.commands
|
||||
from mkdocs import config
|
||||
from mkdocs import exceptions
|
||||
|
||||
import mdx_clickhouse
|
||||
import redirects
|
||||
|
||||
import util
|
||||
|
||||
|
||||
def build_for_lang(lang, args):
|
||||
logging.info(f"Building {lang} blog")
|
||||
|
||||
try:
|
||||
theme_cfg = {
|
||||
"name": None,
|
||||
"custom_dir": os.path.join(os.path.dirname(__file__), "..", args.theme_dir),
|
||||
"language": lang,
|
||||
"direction": "ltr",
|
||||
"static_templates": ["404.html"],
|
||||
"extra": {
|
||||
"now": int(
|
||||
time.mktime(datetime.datetime.now().timetuple())
|
||||
) # TODO better way to avoid caching
|
||||
},
|
||||
}
|
||||
|
||||
# the following list of languages is sorted according to
|
||||
# https://en.wikipedia.org/wiki/List_of_languages_by_total_number_of_speakers
|
||||
languages = {"en": "English"}
|
||||
|
||||
site_names = {"en": "ClickHouse Blog"}
|
||||
|
||||
assert len(site_names) == len(languages)
|
||||
|
||||
site_dir = os.path.join(args.blog_output_dir, lang)
|
||||
|
||||
plugins = ["macros"]
|
||||
if args.htmlproofer:
|
||||
plugins.append("htmlproofer")
|
||||
|
||||
website_url = "https://clickhouse.com"
|
||||
site_name = site_names.get(lang, site_names["en"])
|
||||
blog_nav, post_meta = nav.build_blog_nav(lang, args)
|
||||
raw_config = dict(
|
||||
site_name=site_name,
|
||||
site_url=f"{website_url}/blog/{lang}/",
|
||||
docs_dir=os.path.join(args.blog_dir, lang),
|
||||
site_dir=site_dir,
|
||||
strict=True,
|
||||
theme=theme_cfg,
|
||||
nav=blog_nav,
|
||||
copyright="©2016–2022 ClickHouse, Inc.",
|
||||
use_directory_urls=True,
|
||||
repo_name="ClickHouse/ClickHouse",
|
||||
repo_url="https://github.com/ClickHouse/ClickHouse/",
|
||||
edit_uri=f"edit/master/website/blog/{lang}",
|
||||
markdown_extensions=mdx_clickhouse.MARKDOWN_EXTENSIONS,
|
||||
plugins=plugins,
|
||||
extra=dict(
|
||||
now=datetime.datetime.now().isoformat(),
|
||||
rev=args.rev,
|
||||
rev_short=args.rev_short,
|
||||
rev_url=args.rev_url,
|
||||
website_url=website_url,
|
||||
events=args.events,
|
||||
languages=languages,
|
||||
includes_dir=os.path.join(os.path.dirname(__file__), "..", "_includes"),
|
||||
is_blog=True,
|
||||
post_meta=post_meta,
|
||||
today=datetime.date.today().isoformat(),
|
||||
),
|
||||
)
|
||||
|
||||
cfg = config.load_config(**raw_config)
|
||||
mkdocs.commands.build.build(cfg)
|
||||
|
||||
redirects.build_blog_redirects(args)
|
||||
|
||||
env = util.init_jinja2_env(args)
|
||||
with open(
|
||||
os.path.join(args.website_dir, "templates", "blog", "rss.xml"), "rb"
|
||||
) as f:
|
||||
rss_template_string = f.read().decode("utf-8").strip()
|
||||
rss_template = env.from_string(rss_template_string)
|
||||
with open(os.path.join(args.blog_output_dir, lang, "rss.xml"), "w") as f:
|
||||
f.write(rss_template.render({"config": raw_config}))
|
||||
|
||||
logging.info(f"Finished building {lang} blog")
|
||||
|
||||
except exceptions.ConfigurationError as e:
|
||||
raise SystemExit("\n" + str(e))
|
||||
|
||||
|
||||
def build_blog(args):
|
||||
tasks = []
|
||||
for lang in args.blog_lang.split(","):
|
||||
if lang:
|
||||
tasks.append(
|
||||
(
|
||||
lang,
|
||||
args,
|
||||
)
|
||||
)
|
||||
util.run_function_in_parallel(build_for_lang, tasks, threads=False)
|
@ -1,144 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import jinja2
|
||||
import livereload
|
||||
import markdown.util
|
||||
|
||||
import nav # monkey patches mkdocs
|
||||
|
||||
from mkdocs import config
|
||||
from mkdocs import exceptions
|
||||
import mkdocs.commands.build
|
||||
|
||||
import blog
|
||||
import mdx_clickhouse
|
||||
import redirects
|
||||
import util
|
||||
import website
|
||||
|
||||
from cmake_in_clickhouse_generator import generate_cmake_flags_files
|
||||
|
||||
|
||||
class ClickHouseMarkdown(markdown.extensions.Extension):
|
||||
class ClickHousePreprocessor(markdown.util.Processor):
|
||||
def run(self, lines):
|
||||
for line in lines:
|
||||
if "<!--hide-->" not in line:
|
||||
yield line
|
||||
|
||||
def extendMarkdown(self, md):
|
||||
md.preprocessors.register(
|
||||
self.ClickHousePreprocessor(), "clickhouse_preprocessor", 31
|
||||
)
|
||||
|
||||
|
||||
markdown.extensions.ClickHouseMarkdown = ClickHouseMarkdown
|
||||
|
||||
|
||||
def build_for_lang(lang, args):
|
||||
logging.info(f"Building {lang} docs")
|
||||
|
||||
try:
|
||||
theme_cfg = {
|
||||
"name": None,
|
||||
"custom_dir": os.path.join(os.path.dirname(__file__), "..", args.theme_dir),
|
||||
"language": lang,
|
||||
"direction": "rtl" if lang == "fa" else "ltr",
|
||||
"static_templates": ["404.html"],
|
||||
"extra": {
|
||||
"now": int(
|
||||
time.mktime(datetime.datetime.now().timetuple())
|
||||
) # TODO better way to avoid caching
|
||||
},
|
||||
}
|
||||
|
||||
# the following list of languages is sorted according to
|
||||
# https://en.wikipedia.org/wiki/List_of_languages_by_total_number_of_speakers
|
||||
languages = {"en": "English", "zh": "中文", "ru": "Русский", "ja": "日本語"}
|
||||
|
||||
site_names = {
|
||||
"en": "ClickHouse %s Documentation",
|
||||
"zh": "ClickHouse文档 %s",
|
||||
"ru": "Документация ClickHouse %s",
|
||||
"ja": "ClickHouseドキュメント %s",
|
||||
}
|
||||
|
||||
assert len(site_names) == len(languages)
|
||||
|
||||
site_dir = os.path.join(args.docs_output_dir, lang)
|
||||
|
||||
plugins = ["macros"]
|
||||
if args.htmlproofer:
|
||||
plugins.append("htmlproofer")
|
||||
|
||||
website_url = "https://clickhouse.com"
|
||||
site_name = site_names.get(lang, site_names["en"]) % ""
|
||||
site_name = site_name.replace(" ", " ")
|
||||
|
||||
raw_config = dict(
|
||||
site_name=site_name,
|
||||
site_url=f"{website_url}/docs/{lang}/",
|
||||
docs_dir=os.path.join(args.docs_dir, lang),
|
||||
site_dir=site_dir,
|
||||
strict=True,
|
||||
theme=theme_cfg,
|
||||
copyright="©2016–2022 ClickHouse, Inc.",
|
||||
use_directory_urls=True,
|
||||
repo_name="ClickHouse/ClickHouse",
|
||||
repo_url="https://github.com/ClickHouse/ClickHouse/",
|
||||
edit_uri=f"edit/master/docs/{lang}",
|
||||
markdown_extensions=mdx_clickhouse.MARKDOWN_EXTENSIONS,
|
||||
plugins=plugins,
|
||||
extra=dict(
|
||||
now=datetime.datetime.now().isoformat(),
|
||||
rev=args.rev,
|
||||
rev_short=args.rev_short,
|
||||
rev_url=args.rev_url,
|
||||
website_url=website_url,
|
||||
events=args.events,
|
||||
languages=languages,
|
||||
includes_dir=os.path.join(os.path.dirname(__file__), "..", "_includes"),
|
||||
is_blog=False,
|
||||
),
|
||||
)
|
||||
|
||||
raw_config["nav"] = nav.build_docs_nav(lang, args)
|
||||
|
||||
cfg = config.load_config(**raw_config)
|
||||
|
||||
if not args.skip_multi_page:
|
||||
mkdocs.commands.build.build(cfg)
|
||||
|
||||
mdx_clickhouse.PatchedMacrosPlugin.disabled = False
|
||||
|
||||
logging.info(f"Finished building {lang} docs")
|
||||
|
||||
except exceptions.ConfigurationError as e:
|
||||
raise SystemExit("\n" + str(e))
|
||||
|
||||
|
||||
def build_docs(args):
|
||||
tasks = []
|
||||
for lang in args.lang.split(","):
|
||||
if lang:
|
||||
tasks.append(
|
||||
(
|
||||
lang,
|
||||
args,
|
||||
)
|
||||
)
|
||||
util.run_function_in_parallel(build_for_lang, tasks, threads=False)
|
||||
redirects.build_docs_redirects(args)
|
||||
|
||||
|
||||
def build(args):
|
||||
if os.path.exists(args.output_dir):
|
||||
@ -147,14 +20,6 @@ def build(args):
|
||||
if not args.skip_website:
|
||||
website.build_website(args)
|
||||
|
||||
if not args.skip_docs:
|
||||
generate_cmake_flags_files()
|
||||
|
||||
build_docs(args)
|
||||
|
||||
if not args.skip_blog:
|
||||
blog.build_blog(args)
|
||||
|
||||
if not args.skip_website:
|
||||
website.process_benchmark_results(args)
|
||||
website.minify_website(args)
|
||||
@ -171,20 +36,14 @@ if __name__ == "__main__":
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument("--lang", default="en,ru,zh,ja")
|
||||
arg_parser.add_argument("--blog-lang", default="en")
|
||||
arg_parser.add_argument("--docs-dir", default=".")
|
||||
arg_parser.add_argument("--theme-dir", default=website_dir)
|
||||
arg_parser.add_argument("--website-dir", default=website_dir)
|
||||
arg_parser.add_argument("--src-dir", default=src_dir)
|
||||
arg_parser.add_argument("--blog-dir", default=os.path.join(website_dir, "blog"))
|
||||
arg_parser.add_argument("--output-dir", default="build")
|
||||
arg_parser.add_argument("--nav-limit", type=int, default="0")
|
||||
arg_parser.add_argument("--skip-multi-page", action="store_true")
|
||||
arg_parser.add_argument("--skip-website", action="store_true")
|
||||
arg_parser.add_argument("--skip-blog", action="store_true")
|
||||
arg_parser.add_argument("--skip-docs", action="store_true")
|
||||
arg_parser.add_argument("--htmlproofer", action="store_true")
|
||||
arg_parser.add_argument("--no-docs-macros", action="store_true")
|
||||
arg_parser.add_argument("--livereload", type=int, default="0")
|
||||
arg_parser.add_argument("--verbose", action="store_true")
|
||||
|
||||
@ -196,11 +55,6 @@ if __name__ == "__main__":
|
||||
|
||||
logging.getLogger("MARKDOWN").setLevel(logging.INFO)
|
||||
|
||||
args.docs_output_dir = os.path.join(os.path.abspath(args.output_dir), "docs")
|
||||
args.blog_output_dir = os.path.join(os.path.abspath(args.output_dir), "blog")
|
||||
|
||||
from github import get_events
|
||||
|
||||
args.rev = (
|
||||
subprocess.check_output("git rev-parse HEAD", shell=True)
|
||||
.decode("utf-8")
|
||||
@ -212,9 +66,6 @@ if __name__ == "__main__":
|
||||
.strip()
|
||||
)
|
||||
args.rev_url = f"https://github.com/ClickHouse/ClickHouse/commit/{args.rev}"
|
||||
args.events = get_events(args)
|
||||
|
||||
from build import build
|
||||
|
||||
build(args)
|
||||
|
||||
@ -223,9 +74,6 @@ if __name__ == "__main__":
|
||||
new_args = sys.executable + " " + " ".join(new_args)
|
||||
|
||||
server = livereload.Server()
|
||||
server.watch(
|
||||
args.docs_dir + "**/*", livereload.shell(new_args, cwd="tools", shell=True)
|
||||
)
|
||||
server.watch(
|
||||
args.website_dir + "**/*",
|
||||
livereload.shell(new_args, cwd="tools", shell=True),
|
||||
|
@ -1,181 +0,0 @@
|
||||
import re
|
||||
import os
|
||||
from typing import TextIO, List, Tuple, Optional, Dict
|
||||
|
||||
# name, default value, description
|
||||
Entity = Tuple[str, str, str]
|
||||
|
||||
# https://regex101.com/r/R6iogw/12
|
||||
cmake_option_regex: str = (
|
||||
r"^\s*option\s*\(([A-Z_0-9${}]+)\s*(?:\"((?:.|\n)*?)\")?\s*(.*)?\).*$"
|
||||
)
|
||||
|
||||
ch_master_url: str = "https://github.com/clickhouse/clickhouse/blob/master/"
|
||||
|
||||
name_str: str = '<a name="{anchor}"></a>[`{name}`](' + ch_master_url + "{path}#L{line})"
|
||||
default_anchor_str: str = "[`{name}`](#{anchor})"
|
||||
|
||||
comment_var_regex: str = r"\${(.+)}"
|
||||
comment_var_replace: str = "`\\1`"
|
||||
|
||||
table_header: str = """
|
||||
| Name | Default value | Description | Comment |
|
||||
|------|---------------|-------------|---------|
|
||||
"""
|
||||
|
||||
# Needed to detect conditional variables (those which are defined twice)
|
||||
# name -> (path, values)
|
||||
entities: Dict[str, Tuple[str, str]] = {}
|
||||
|
||||
|
||||
def make_anchor(t: str) -> str:
|
||||
return "".join(
|
||||
["-" if i == "_" else i.lower() for i in t if i.isalpha() or i == "_"]
|
||||
)
|
||||
|
||||
|
||||
def process_comment(comment: str) -> str:
|
||||
return re.sub(comment_var_regex, comment_var_replace, comment, flags=re.MULTILINE)
|
||||
|
||||
|
||||
def build_entity(path: str, entity: Entity, line_comment: Tuple[int, str]) -> None:
|
||||
(line, comment) = line_comment
|
||||
(name, description, default) = entity
|
||||
|
||||
if name in entities:
|
||||
return
|
||||
|
||||
if len(default) == 0:
|
||||
formatted_default: str = "`OFF`"
|
||||
elif default[0] == "$":
|
||||
formatted_default: str = "`{}`".format(default[2:-1])
|
||||
else:
|
||||
formatted_default: str = "`" + default + "`"
|
||||
|
||||
formatted_name: str = name_str.format(
|
||||
anchor=make_anchor(name), name=name, path=path, line=line
|
||||
)
|
||||
|
||||
formatted_description: str = "".join(description.split("\n"))
|
||||
|
||||
formatted_comment: str = process_comment(comment)
|
||||
|
||||
formatted_entity: str = "| {} | {} | {} | {} |".format(
|
||||
formatted_name, formatted_default, formatted_description, formatted_comment
|
||||
)
|
||||
|
||||
entities[name] = path, formatted_entity
|
||||
|
||||
|
||||
def process_file(root_path: str, file_path: str, file_name: str) -> None:
|
||||
with open(os.path.join(file_path, file_name), "r") as cmake_file:
|
||||
contents: str = cmake_file.read()
|
||||
|
||||
def get_line_and_comment(target: str) -> Tuple[int, str]:
|
||||
contents_list: List[str] = contents.split("\n")
|
||||
comment: str = ""
|
||||
|
||||
for n, line in enumerate(contents_list):
|
||||
if "option" not in line.lower() or target not in line:
|
||||
continue
|
||||
|
||||
for maybe_comment_line in contents_list[n - 1 :: -1]:
|
||||
if not re.match("\s*#\s*", maybe_comment_line):
|
||||
break
|
||||
|
||||
comment = re.sub("\s*#\s*", "", maybe_comment_line) + " " + comment
|
||||
|
||||
# line numbering starts with 1
|
||||
return n + 1, comment
|
||||
|
||||
matches: Optional[List[Entity]] = re.findall(
|
||||
cmake_option_regex, contents, re.MULTILINE
|
||||
)
|
||||
|
||||
file_rel_path_with_name: str = os.path.join(
|
||||
file_path[len(root_path) :], file_name
|
||||
)
|
||||
if file_rel_path_with_name.startswith("/"):
|
||||
file_rel_path_with_name = file_rel_path_with_name[1:]
|
||||
|
||||
if matches:
|
||||
for entity in matches:
|
||||
build_entity(
|
||||
file_rel_path_with_name, entity, get_line_and_comment(entity[0])
|
||||
)
|
||||
|
||||
|
||||
def process_folder(root_path: str, name: str) -> None:
|
||||
for root, _, files in os.walk(os.path.join(root_path, name)):
|
||||
for f in files:
|
||||
if f == "CMakeLists.txt" or ".cmake" in f:
|
||||
process_file(root_path, root, f)
|
||||
|
||||
|
||||
def generate_cmake_flags_files() -> None:
|
||||
root_path: str = os.path.join(os.path.dirname(__file__), "..", "..")
|
||||
|
||||
output_file_name: str = os.path.join(
|
||||
root_path, "docs/en/development/cmake-in-clickhouse.md"
|
||||
)
|
||||
header_file_name: str = os.path.join(
|
||||
root_path, "docs/_includes/cmake_in_clickhouse_header.md"
|
||||
)
|
||||
footer_file_name: str = os.path.join(
|
||||
root_path, "docs/_includes/cmake_in_clickhouse_footer.md"
|
||||
)
|
||||
|
||||
process_file(root_path, root_path, "CMakeLists.txt")
|
||||
process_file(root_path, os.path.join(root_path, "programs"), "CMakeLists.txt")
|
||||
|
||||
process_folder(root_path, "base")
|
||||
process_folder(root_path, "cmake")
|
||||
process_folder(root_path, "src")
|
||||
|
||||
with open(output_file_name, "w") as f:
|
||||
with open(header_file_name, "r") as header:
|
||||
f.write(header.read())
|
||||
|
||||
sorted_keys: List[str] = sorted(entities.keys())
|
||||
ignored_keys: List[str] = []
|
||||
|
||||
f.write("### ClickHouse modes\n" + table_header)
|
||||
|
||||
for k in sorted_keys:
|
||||
if k.startswith("ENABLE_CLICKHOUSE_"):
|
||||
f.write(entities[k][1] + "\n")
|
||||
ignored_keys.append(k)
|
||||
|
||||
f.write(
|
||||
"\n### External libraries\nNote that ClickHouse uses forks of these libraries, see https://github.com/ClickHouse-Extras.\n"
|
||||
+ table_header
|
||||
)
|
||||
|
||||
for k in sorted_keys:
|
||||
if k.startswith("ENABLE_") and ".cmake" in entities[k][0]:
|
||||
f.write(entities[k][1] + "\n")
|
||||
ignored_keys.append(k)
|
||||
|
||||
f.write("\n\n### Other flags\n" + table_header)
|
||||
|
||||
for k in sorted(set(sorted_keys).difference(set(ignored_keys))):
|
||||
f.write(entities[k][1] + "\n")
|
||||
|
||||
with open(footer_file_name, "r") as footer:
|
||||
f.write(footer.read())
|
||||
|
||||
other_languages = [
|
||||
"docs/ja/development/cmake-in-clickhouse.md",
|
||||
"docs/zh/development/cmake-in-clickhouse.md",
|
||||
"docs/ru/development/cmake-in-clickhouse.md",
|
||||
]
|
||||
|
||||
for lang in other_languages:
|
||||
other_file_name = os.path.join(root_path, lang)
|
||||
if os.path.exists(other_file_name):
|
||||
os.unlink(other_file_name)
|
||||
os.symlink(output_file_name, other_file_name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_cmake_flags_files()
|
@ -1,186 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os, sys
|
||||
import argparse
|
||||
import subprocess
|
||||
import contextlib
|
||||
from git import cmd
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
SCRIPT_DESCRIPTION = """
|
||||
usage: ./easy_diff.py language/document path
|
||||
|
||||
Show the difference between a language document and an English document.
|
||||
|
||||
This script is based on the assumption that documents in other languages are fully synchronized with the en document at a commit.
|
||||
|
||||
For example:
|
||||
Execute:
|
||||
./easy_diff.py --no-pager zh/data_types
|
||||
Output:
|
||||
Need translate document:~/ClickHouse/docs/en/data_types/uuid.md
|
||||
Need link document:~/ClickHouse/docs/en/data_types/decimal.md to ~/ClickHouse/docs/zh/data_types/decimal.md
|
||||
diff --git a/docs/en/data_types/domains/ipv6.md b/docs/en/data_types/domains/ipv6.md
|
||||
index 1bfbe3400b..e2abaff017 100644
|
||||
--- a/docs/en/data_types/domains/ipv6.md
|
||||
+++ b/docs/en/data_types/domains/ipv6.md
|
||||
@@ -4,13 +4,13 @@
|
||||
|
||||
### Basic Usage
|
||||
|
||||
-``` sql
|
||||
+```sql
|
||||
CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY url;
|
||||
|
||||
DESCRIBE TABLE hits;
|
||||
```
|
||||
|
||||
-```
|
||||
+```text
|
||||
┌─name─┬─type───┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┐
|
||||
│ url │ String │ │ │ │ │
|
||||
│ from │ IPv6 │ │ │ │ │
|
||||
@@ -19,19 +19,19 @@ DESCRIBE TABLE hits;
|
||||
|
||||
OR you can use `IPv6` domain as a key:
|
||||
|
||||
-``` sql
|
||||
+```sql
|
||||
CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
... MORE
|
||||
|
||||
OPTIONS:
|
||||
-h, --help show this help message and exit
|
||||
--no-pager use stdout as difference result output
|
||||
"""
|
||||
|
||||
SCRIPT_PATH = os.path.abspath(__file__)
|
||||
CLICKHOUSE_REPO_HOME = os.path.join(os.path.dirname(SCRIPT_PATH), "..", "..")
|
||||
SCRIPT_COMMAND_EXECUTOR = cmd.Git(CLICKHOUSE_REPO_HOME)
|
||||
|
||||
SCRIPT_COMMAND_PARSER = argparse.ArgumentParser(add_help=False)
|
||||
SCRIPT_COMMAND_PARSER.add_argument("path", type=bytes, nargs="?", default=None)
|
||||
SCRIPT_COMMAND_PARSER.add_argument("--no-pager", action="store_true", default=False)
|
||||
SCRIPT_COMMAND_PARSER.add_argument("-h", "--help", action="store_true", default=False)
|
||||
|
||||
|
||||
def execute(commands):
|
||||
return SCRIPT_COMMAND_EXECUTOR.execute(commands)
|
||||
|
||||
|
||||
def get_hash(file_name):
|
||||
return execute(["git", "log", "-n", "1", '--pretty=format:"%H"', file_name])
|
||||
|
||||
|
||||
def diff_file(reference_file, working_file, out):
|
||||
if not os.path.exists(reference_file):
|
||||
raise RuntimeError(
|
||||
"reference file [" + os.path.abspath(reference_file) + "] is not exists."
|
||||
)
|
||||
|
||||
if os.path.islink(working_file):
|
||||
out.writelines(["Need translate document:" + os.path.abspath(reference_file)])
|
||||
elif not os.path.exists(working_file):
|
||||
out.writelines(
|
||||
[
|
||||
"Need link document "
|
||||
+ os.path.abspath(reference_file)
|
||||
+ " to "
|
||||
+ os.path.abspath(working_file)
|
||||
]
|
||||
)
|
||||
elif get_hash(working_file) != get_hash(reference_file):
|
||||
out.writelines(
|
||||
[
|
||||
(
|
||||
execute(
|
||||
[
|
||||
"git",
|
||||
"diff",
|
||||
get_hash(working_file).strip('"'),
|
||||
reference_file,
|
||||
]
|
||||
).encode("utf-8")
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def diff_directory(reference_directory, working_directory, out):
|
||||
if not os.path.isdir(reference_directory):
|
||||
return diff_file(reference_directory, working_directory, out)
|
||||
|
||||
for list_item in os.listdir(reference_directory):
|
||||
working_item = os.path.join(working_directory, list_item)
|
||||
reference_item = os.path.join(reference_directory, list_item)
|
||||
if (
|
||||
diff_file(reference_item, working_item, out)
|
||||
if os.path.isfile(reference_item)
|
||||
else diff_directory(reference_item, working_item, out) != 0
|
||||
):
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def find_language_doc(custom_document, other_language="en", children=[]):
|
||||
if len(custom_document) == 0:
|
||||
raise RuntimeError(
|
||||
"The "
|
||||
+ os.path.join(custom_document, *children)
|
||||
+ " is not in docs directory."
|
||||
)
|
||||
|
||||
if os.path.samefile(os.path.join(CLICKHOUSE_REPO_HOME, "docs"), custom_document):
|
||||
return os.path.join(CLICKHOUSE_REPO_HOME, "docs", other_language, *children[1:])
|
||||
children.insert(0, os.path.split(custom_document)[1])
|
||||
return find_language_doc(
|
||||
os.path.split(custom_document)[0], other_language, children
|
||||
)
|
||||
|
||||
|
||||
class ToPager:
|
||||
def __init__(self, temp_named_file):
|
||||
self.temp_named_file = temp_named_file
|
||||
|
||||
def writelines(self, lines):
|
||||
self.temp_named_file.writelines(lines)
|
||||
|
||||
def close(self):
|
||||
self.temp_named_file.flush()
|
||||
git_pager = execute(["git", "var", "GIT_PAGER"])
|
||||
subprocess.check_call([git_pager, self.temp_named_file.name])
|
||||
self.temp_named_file.close()
|
||||
|
||||
|
||||
class ToStdOut:
|
||||
def writelines(self, lines):
|
||||
self.system_stdout_stream.writelines(lines)
|
||||
|
||||
def close(self):
|
||||
self.system_stdout_stream.flush()
|
||||
|
||||
def __init__(self, system_stdout_stream):
|
||||
self.system_stdout_stream = system_stdout_stream
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
arguments = SCRIPT_COMMAND_PARSER.parse_args()
|
||||
if arguments.help or not arguments.path:
|
||||
sys.stdout.write(SCRIPT_DESCRIPTION)
|
||||
sys.exit(0)
|
||||
|
||||
working_language = os.path.join(CLICKHOUSE_REPO_HOME, "docs", arguments.path)
|
||||
with contextlib.closing(
|
||||
ToStdOut(sys.stdout)
|
||||
if arguments.no_pager
|
||||
else ToPager(NamedTemporaryFile("r+"))
|
||||
) as writer:
|
||||
exit(
|
||||
diff_directory(
|
||||
find_language_doc(working_language), working_language, writer
|
||||
)
|
||||
)
|
@ -1,41 +0,0 @@
|
||||
import collections
|
||||
import copy
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
import util
|
||||
|
||||
|
||||
def get_events(args):
|
||||
events = []
|
||||
skip = True
|
||||
with open(os.path.join(args.docs_dir, "..", "README.md")) as f:
|
||||
for line in f:
|
||||
if skip:
|
||||
if "Upcoming Events" in line:
|
||||
skip = False
|
||||
else:
|
||||
if not line:
|
||||
continue
|
||||
line = line.strip().split("](")
|
||||
if len(line) == 2:
|
||||
tail = line[1].split(") ")
|
||||
events.append(
|
||||
{
|
||||
"signup_link": tail[0],
|
||||
"event_name": line[0].replace("* [", ""),
|
||||
"event_date": tail[1].replace("on ", "").replace(".", ""),
|
||||
}
|
||||
)
|
||||
return events
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG, stream=sys.stderr)
|
@ -1,190 +0,0 @@
|
||||
import collections
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
import mkdocs.structure.nav
|
||||
|
||||
import util
|
||||
|
||||
|
||||
def find_first_header(content):
|
||||
for line in content.split("\n"):
|
||||
if line.startswith("#"):
|
||||
no_hash = line.lstrip("#")
|
||||
return no_hash.split("{", 1)[0].strip()
|
||||
|
||||
|
||||
def build_nav_entry(root, args):
|
||||
if root.endswith("images"):
|
||||
return None, None, None
|
||||
result_items = []
|
||||
index_meta, index_content = util.read_md_file(os.path.join(root, "index.md"))
|
||||
current_title = index_meta.get("toc_folder_title", index_meta.get("toc_title"))
|
||||
current_title = current_title or index_meta.get(
|
||||
"title", find_first_header(index_content)
|
||||
)
|
||||
for filename in os.listdir(root):
|
||||
path = os.path.join(root, filename)
|
||||
if os.path.isdir(path):
|
||||
prio, title, payload = build_nav_entry(path, args)
|
||||
if title and payload:
|
||||
result_items.append((prio, title, payload))
|
||||
elif filename.endswith(".md"):
|
||||
path = os.path.join(root, filename)
|
||||
|
||||
meta = ""
|
||||
content = ""
|
||||
|
||||
try:
|
||||
meta, content = util.read_md_file(path)
|
||||
except:
|
||||
print("Error in file: {}".format(path))
|
||||
raise
|
||||
|
||||
path = path.split("/", 2)[-1]
|
||||
title = meta.get("toc_title", find_first_header(content))
|
||||
if title:
|
||||
title = title.strip().rstrip(".")
|
||||
else:
|
||||
title = meta.get("toc_folder_title", "hidden")
|
||||
prio = meta.get("toc_priority", 9999)
|
||||
logging.debug(f"Nav entry: {prio}, {title}, {path}")
|
||||
if meta.get("toc_hidden") or not content.strip():
|
||||
title = "hidden"
|
||||
if title == "hidden":
|
||||
title = "hidden-" + hashlib.sha1(content.encode("utf-8")).hexdigest()
|
||||
if args.nav_limit and len(result_items) >= args.nav_limit:
|
||||
break
|
||||
result_items.append((prio, title, path))
|
||||
result_items = sorted(result_items, key=lambda x: (x[0], x[1]))
|
||||
result = collections.OrderedDict([(item[1], item[2]) for item in result_items])
|
||||
if index_meta.get("toc_hidden_folder"):
|
||||
current_title += "|hidden-folder"
|
||||
return index_meta.get("toc_priority", 10000), current_title, result
|
||||
|
||||
|
||||
def build_docs_nav(lang, args):
|
||||
docs_dir = os.path.join(args.docs_dir, lang)
|
||||
_, _, nav = build_nav_entry(docs_dir, args)
|
||||
result = []
|
||||
index_key = None
|
||||
for key, value in list(nav.items()):
|
||||
if key and value:
|
||||
if value == "index.md":
|
||||
index_key = key
|
||||
continue
|
||||
result.append({key: value})
|
||||
if args.nav_limit and len(result) >= args.nav_limit:
|
||||
break
|
||||
if index_key:
|
||||
key = list(result[0].keys())[0]
|
||||
result[0][key][index_key] = "index.md"
|
||||
result[0][key].move_to_end(index_key, last=False)
|
||||
return result
|
||||
|
||||
|
||||
def build_blog_nav(lang, args):
|
||||
blog_dir = os.path.join(args.blog_dir, lang)
|
||||
years = sorted(os.listdir(blog_dir), reverse=True)
|
||||
result_nav = [{"hidden": "index.md"}]
|
||||
post_meta = collections.OrderedDict()
|
||||
for year in years:
|
||||
year_dir = os.path.join(blog_dir, year)
|
||||
if not os.path.isdir(year_dir):
|
||||
continue
|
||||
result_nav.append({year: collections.OrderedDict()})
|
||||
posts = []
|
||||
post_meta_items = []
|
||||
for post in os.listdir(year_dir):
|
||||
post_path = os.path.join(year_dir, post)
|
||||
if not post.endswith(".md"):
|
||||
raise RuntimeError(
|
||||
f"Unexpected non-md file in posts folder: {post_path}"
|
||||
)
|
||||
meta, _ = util.read_md_file(post_path)
|
||||
post_date = meta["date"]
|
||||
post_title = meta["title"]
|
||||
if datetime.date.fromisoformat(post_date) > datetime.date.today():
|
||||
continue
|
||||
posts.append(
|
||||
(
|
||||
post_date,
|
||||
post_title,
|
||||
os.path.join(year, post),
|
||||
)
|
||||
)
|
||||
if post_title in post_meta:
|
||||
raise RuntimeError(f"Duplicate post title: {post_title}")
|
||||
if not post_date.startswith(f"{year}-"):
|
||||
raise RuntimeError(
|
||||
f"Post date {post_date} doesn't match the folder year {year}: {post_title}"
|
||||
)
|
||||
post_url_part = post.replace(".md", "")
|
||||
post_meta_items.append(
|
||||
(
|
||||
post_date,
|
||||
{
|
||||
"date": post_date,
|
||||
"title": post_title,
|
||||
"image": meta.get("image"),
|
||||
"url": f"/blog/{lang}/{year}/{post_url_part}/",
|
||||
},
|
||||
)
|
||||
)
|
||||
for _, title, path in sorted(posts, reverse=True):
|
||||
result_nav[-1][year][title] = path
|
||||
for _, post_meta_item in sorted(
|
||||
post_meta_items, reverse=True, key=lambda item: item[0]
|
||||
):
|
||||
post_meta[post_meta_item["title"]] = post_meta_item
|
||||
return result_nav, post_meta
|
||||
|
||||
|
||||
def _custom_get_navigation(files, config):
|
||||
nav_config = config["nav"] or mkdocs.structure.nav.nest_paths(
|
||||
f.src_path for f in files.documentation_pages()
|
||||
)
|
||||
items = mkdocs.structure.nav._data_to_navigation(nav_config, files, config)
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
|
||||
pages = mkdocs.structure.nav._get_by_type(items, mkdocs.structure.nav.Page)
|
||||
|
||||
mkdocs.structure.nav._add_previous_and_next_links(pages)
|
||||
mkdocs.structure.nav._add_parent_links(items)
|
||||
|
||||
missing_from_config = [
|
||||
file for file in files.documentation_pages() if file.page is None
|
||||
]
|
||||
if missing_from_config:
|
||||
files._files = [
|
||||
file for file in files._files if file not in missing_from_config
|
||||
]
|
||||
|
||||
links = mkdocs.structure.nav._get_by_type(items, mkdocs.structure.nav.Link)
|
||||
for link in links:
|
||||
scheme, netloc, path, params, query, fragment = mkdocs.structure.nav.urlparse(
|
||||
link.url
|
||||
)
|
||||
if scheme or netloc:
|
||||
mkdocs.structure.nav.log.debug(
|
||||
"An external link to '{}' is included in "
|
||||
"the 'nav' configuration.".format(link.url)
|
||||
)
|
||||
elif link.url.startswith("/"):
|
||||
mkdocs.structure.nav.log.debug(
|
||||
"An absolute path to '{}' is included in the 'nav' configuration, "
|
||||
"which presumably points to an external resource.".format(link.url)
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
"A relative path to '{}' is included in the 'nav' configuration, "
|
||||
"which is not found in the documentation files".format(link.url)
|
||||
)
|
||||
mkdocs.structure.nav.log.warning(msg)
|
||||
return mkdocs.structure.nav.Navigation(items, pages)
|
||||
|
||||
|
||||
mkdocs.structure.nav.get_navigation = _custom_get_navigation
|
@ -27,45 +27,6 @@ def write_redirect_html(out_path, to_url):
|
||||
)
|
||||
|
||||
|
||||
def build_redirect_html(args, base_prefix, lang, output_dir, from_path, to_path):
|
||||
out_path = os.path.join(
|
||||
output_dir,
|
||||
lang,
|
||||
from_path.replace("/index.md", "/index.html").replace(".md", "/index.html"),
|
||||
)
|
||||
target_path = to_path.replace("/index.md", "/").replace(".md", "/")
|
||||
|
||||
if target_path[0:7] != "http://" and target_path[0:8] != "https://":
|
||||
to_url = f"/{base_prefix}/{lang}/{target_path}"
|
||||
else:
|
||||
to_url = target_path
|
||||
|
||||
to_url = to_url.strip()
|
||||
write_redirect_html(out_path, to_url)
|
||||
|
||||
|
||||
def build_docs_redirects(args):
|
||||
with open(os.path.join(args.docs_dir, "redirects.txt"), "r") as f:
|
||||
for line in f:
|
||||
for lang in args.lang.split(","):
|
||||
from_path, to_path = line.split(" ", 1)
|
||||
build_redirect_html(
|
||||
args, "docs", lang, args.docs_output_dir, from_path, to_path
|
||||
)
|
||||
|
||||
|
||||
def build_blog_redirects(args):
|
||||
for lang in args.blog_lang.split(","):
|
||||
redirects_path = os.path.join(args.blog_dir, lang, "redirects.txt")
|
||||
if os.path.exists(redirects_path):
|
||||
with open(redirects_path, "r") as f:
|
||||
for line in f:
|
||||
from_path, to_path = line.split(" ", 1)
|
||||
build_redirect_html(
|
||||
args, "blog", lang, args.blog_output_dir, from_path, to_path
|
||||
)
|
||||
|
||||
|
||||
def build_static_redirects(args):
|
||||
for static_redirect in [
|
||||
("benchmark.html", "/benchmark/dbms/"),
|
||||
|
@ -1,39 +1,32 @@
|
||||
Babel==2.9.1
|
||||
backports-abc==0.5
|
||||
backports.functools-lru-cache==1.6.1
|
||||
beautifulsoup4==4.9.1
|
||||
certifi==2020.4.5.2
|
||||
chardet==3.0.4
|
||||
click==7.1.2
|
||||
closure==20191111
|
||||
cssmin==0.2.0
|
||||
future==0.18.2
|
||||
htmlmin==0.1.12
|
||||
idna==2.10
|
||||
Jinja2==3.0.3
|
||||
jinja2-highlight==0.6.1
|
||||
jsmin==3.0.0
|
||||
livereload==2.6.3
|
||||
Markdown==3.3.2
|
||||
MarkupSafe==2.1.0
|
||||
mkdocs==1.3.0
|
||||
mkdocs-htmlproofer-plugin==0.0.3
|
||||
mkdocs-macros-plugin==0.4.20
|
||||
nltk==3.7
|
||||
nose==1.3.7
|
||||
protobuf==3.14.0
|
||||
numpy==1.21.2
|
||||
pymdown-extensions==8.0
|
||||
python-slugify==4.0.1
|
||||
MarkupSafe==2.1.1
|
||||
MarkupSafe==2.1.1
|
||||
PyYAML==6.0
|
||||
repackage==0.7.3
|
||||
requests==2.25.1
|
||||
singledispatch==3.4.0.3
|
||||
Pygments>=2.12.0
|
||||
beautifulsoup4==4.9.1
|
||||
click==7.1.2
|
||||
ghp_import==2.1.0
|
||||
importlib_metadata==4.11.4
|
||||
jinja2-highlight==0.6.1
|
||||
livereload==2.6.3
|
||||
mergedeep==1.3.4
|
||||
mkdocs-macros-plugin==0.4.20
|
||||
mkdocs-macros-test==0.1.0
|
||||
mkdocs-material==8.2.15
|
||||
mkdocs==1.3.0
|
||||
mkdocs_material_extensions==1.0.3
|
||||
packaging==21.3
|
||||
pygments==2.12.0
|
||||
pymdown_extensions==9.4
|
||||
pyparsing==3.0.9
|
||||
python-slugify==4.0.1
|
||||
python_dateutil==2.8.2
|
||||
pytz==2022.1
|
||||
six==1.15.0
|
||||
soupsieve==2.0.1
|
||||
soupsieve==2.3.2
|
||||
termcolor==1.1.0
|
||||
text_unidecode==1.3
|
||||
tornado==6.1
|
||||
Unidecode==1.1.1
|
||||
urllib3>=1.26.8
|
||||
Pygments>=2.11.2
|
||||
|
||||
zipp==3.8.0
|
||||
|
@ -124,7 +124,7 @@ def init_jinja2_env(args):
|
||||
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(
|
||||
[args.website_dir, os.path.join(args.docs_dir, "_includes")]
|
||||
[args.website_dir, os.path.join(args.src_dir, "docs", "_includes")]
|
||||
),
|
||||
extensions=["jinja2.ext.i18n", "jinja2_highlight.HighlightExtension"],
|
||||
)
|
||||
|
@ -1,81 +0,0 @@
|
||||
const path = require('path');
|
||||
const jsPath = path.resolve(__dirname, '../../website/src/js');
|
||||
const scssPath = path.resolve(__dirname, '../../website/src/scss');
|
||||
|
||||
console.log(path.resolve(__dirname, 'node_modules/bootstrap', require('bootstrap/package.json').sass));
|
||||
|
||||
module.exports = {
|
||||
|
||||
mode: ('development' === process.env.NODE_ENV) && 'development' || 'production',
|
||||
|
||||
...(('development' === process.env.NODE_ENV) && {
|
||||
watch: true,
|
||||
}),
|
||||
|
||||
entry: [
|
||||
path.resolve(scssPath, 'bootstrap.scss'),
|
||||
path.resolve(scssPath, 'main.scss'),
|
||||
path.resolve(jsPath, 'main.js'),
|
||||
],
|
||||
|
||||
output: {
|
||||
path: path.resolve(__dirname, '../../website'),
|
||||
filename: 'js/main.js',
|
||||
},
|
||||
|
||||
resolve: {
|
||||
alias: {
|
||||
bootstrap: path.resolve(__dirname, 'node_modules/bootstrap', require('bootstrap/package.json').sass),
|
||||
},
|
||||
},
|
||||
|
||||
module: {
|
||||
rules: [{
|
||||
test: /\.js$/,
|
||||
exclude: /(node_modules)/,
|
||||
use: [{
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
presets: ['@babel/preset-env'],
|
||||
},
|
||||
}],
|
||||
}, {
|
||||
test: /\.scss$/,
|
||||
use: [{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
sourceMap: true,
|
||||
outputPath: (url, entryPath, context) => {
|
||||
if (0 === entryPath.indexOf(scssPath)) {
|
||||
const outputFile = entryPath.slice(entryPath.lastIndexOf('/') + 1, -5)
|
||||
const outputPath = entryPath.slice(0, entryPath.lastIndexOf('/')).slice(scssPath.length + 1)
|
||||
return `./css/${outputPath}/${outputFile}.css`
|
||||
}
|
||||
return `./css/${url}`
|
||||
},
|
||||
},
|
||||
}, {
|
||||
loader: 'postcss-loader',
|
||||
options: {
|
||||
options: {},
|
||||
plugins: () => ([
|
||||
require('autoprefixer'),
|
||||
('production' === process.env.NODE_ENV) && require('cssnano'),
|
||||
].filter(plugin => plugin)),
|
||||
}
|
||||
}, {
|
||||
loader: 'sass-loader',
|
||||
options: {
|
||||
implementation: require('sass'),
|
||||
implementation: require('sass'),
|
||||
sourceMap: ('development' === process.env.NODE_ENV),
|
||||
sassOptions: {
|
||||
importer: require('node-sass-glob-importer')(),
|
||||
precision: 10,
|
||||
},
|
||||
},
|
||||
}],
|
||||
}],
|
||||
},
|
||||
|
||||
};
|
@ -42,8 +42,10 @@ if __name__ == "__main__":
|
||||
|
||||
token = CLOUDFLARE_TOKEN
|
||||
cmd = (
|
||||
"docker run --cap-add=SYS_PTRACE --volume=$SSH_AUTH_SOCK:/ssh-agent -e SSH_AUTH_SOCK=/ssh-agent "
|
||||
f"-e CLOUDFLARE_TOKEN={token} --volume={repo_path}:/repo_path --volume={test_output}:/output_path {docker_image}"
|
||||
"docker run --cap-add=SYS_PTRACE --volume=$SSH_AUTH_SOCK:/ssh-agent "
|
||||
f"-e SSH_AUTH_SOCK=/ssh-agent -e CLOUDFLARE_TOKEN={token} "
|
||||
f"-e EXTRA_BUILD_ARGS='--verbose' --volume={repo_path}:/repo_path"
|
||||
f" --volume={test_output}:/output_path {docker_image}"
|
||||
)
|
||||
|
||||
run_log_path = os.path.join(test_output, "runlog.log")
|
||||
|
Loading…
Reference in New Issue
Block a user