2020-03-18 13:02:32 +00:00
|
|
|
|
#!/usr/bin/env python3
|
2020-03-30 08:25:29 +00:00
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
import argparse
|
2020-04-03 13:23:32 +00:00
|
|
|
|
import collections
|
2018-08-10 14:44:49 +00:00
|
|
|
|
import datetime
|
2018-07-09 19:59:07 +00:00
|
|
|
|
import logging
|
|
|
|
|
import os
|
|
|
|
|
import shutil
|
2018-10-12 15:43:16 +00:00
|
|
|
|
import subprocess
|
2018-07-09 19:59:07 +00:00
|
|
|
|
import sys
|
2018-10-16 10:47:17 +00:00
|
|
|
|
import time
|
|
|
|
|
|
2020-04-01 09:45:48 +00:00
|
|
|
|
import bs4
|
2020-03-30 08:25:29 +00:00
|
|
|
|
import jinja2
|
2020-03-13 19:35:03 +00:00
|
|
|
|
import livereload
|
2018-10-16 10:47:17 +00:00
|
|
|
|
import markdown.util
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
from mkdocs import config
|
|
|
|
|
from mkdocs import exceptions
|
|
|
|
|
from mkdocs.commands import build as mkdocs_build
|
|
|
|
|
|
|
|
|
|
from concatenate import concatenate
|
2019-04-08 16:01:54 +00:00
|
|
|
|
|
2018-12-12 17:28:00 +00:00
|
|
|
|
import mdx_clickhouse
|
2018-12-18 11:32:08 +00:00
|
|
|
|
import test
|
2019-04-08 16:01:54 +00:00
|
|
|
|
import util
|
2020-03-30 11:39:26 +00:00
|
|
|
|
import website
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
|
2018-10-16 10:47:17 +00:00
|
|
|
|
class ClickHouseMarkdown(markdown.extensions.Extension):
|
|
|
|
|
class ClickHousePreprocessor(markdown.util.Processor):
|
|
|
|
|
def run(self, lines):
|
|
|
|
|
for line in lines:
|
|
|
|
|
if '<!--hide-->' not in line:
|
|
|
|
|
yield line
|
|
|
|
|
|
|
|
|
|
def extendMarkdown(self, md):
|
|
|
|
|
md.preprocessors.register(self.ClickHousePreprocessor(), 'clickhouse_preprocessor', 31)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
|
2018-10-16 10:47:17 +00:00
|
|
|
|
markdown.extensions.ClickHouseMarkdown = ClickHouseMarkdown
|
2018-10-16 06:58:03 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
|
2020-04-03 13:23:32 +00:00
|
|
|
|
def build_nav_entry(root):
|
|
|
|
|
if root.endswith('images'):
|
|
|
|
|
return None, None, None
|
|
|
|
|
result_items = []
|
|
|
|
|
index_meta, _ = util.read_md_file(os.path.join(root, 'index.md'))
|
|
|
|
|
current_title = index_meta.get('toc_folder_title', index_meta.get('toc_title', 'hidden'))
|
|
|
|
|
for filename in os.listdir(root):
|
|
|
|
|
path = os.path.join(root, filename)
|
|
|
|
|
if os.path.isdir(path):
|
|
|
|
|
prio, title, payload = build_nav_entry(path)
|
|
|
|
|
if title and payload:
|
|
|
|
|
result_items.append((prio, title, payload))
|
|
|
|
|
elif filename.endswith('.md'):
|
|
|
|
|
path = os.path.join(root, filename)
|
|
|
|
|
meta, _ = util.read_md_file(path)
|
|
|
|
|
path = path.split('/', 2)[-1]
|
|
|
|
|
title = meta.get('toc_title', 'hidden')
|
|
|
|
|
prio = meta.get('toc_priority', 9999)
|
|
|
|
|
result_items.append((prio, title, path))
|
|
|
|
|
result_items = sorted(result_items, key=lambda x: (x[0], x[1]))
|
|
|
|
|
result = collections.OrderedDict([(item[1], item[2]) for item in result_items])
|
|
|
|
|
return index_meta.get('toc_priority', 10000), current_title, result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_nav(lang, args):
|
|
|
|
|
docs_dir = os.path.join(args.docs_dir, lang)
|
|
|
|
|
_, _, nav = build_nav_entry(docs_dir)
|
|
|
|
|
result = []
|
|
|
|
|
for key, value in nav.items():
|
|
|
|
|
result.append({key: value})
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
def build_for_lang(lang, args):
|
2020-03-22 12:12:53 +00:00
|
|
|
|
logging.info(f'Building {lang} docs')
|
2018-12-12 17:28:00 +00:00
|
|
|
|
os.environ['SINGLE_PAGE'] = '0'
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2020-03-22 12:12:53 +00:00
|
|
|
|
config_path = os.path.join(args.docs_dir, f'toc_{lang}.yml')
|
2019-12-10 08:17:26 +00:00
|
|
|
|
if args.is_stable_release and not os.path.exists(config_path):
|
2020-03-22 12:12:53 +00:00
|
|
|
|
logging.warning(f'Skipping {lang} docs, because {config} does not exist')
|
2019-12-10 08:17:26 +00:00
|
|
|
|
return
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
theme_cfg = {
|
2020-03-22 12:12:53 +00:00
|
|
|
|
'name': None,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
'custom_dir': os.path.join(os.path.dirname(__file__), '..', args.theme_dir),
|
2018-07-09 19:59:07 +00:00
|
|
|
|
'language': lang,
|
2018-09-06 10:22:06 +00:00
|
|
|
|
'direction': 'rtl' if lang == 'fa' else 'ltr',
|
2020-03-30 08:25:29 +00:00
|
|
|
|
# TODO: cleanup
|
2018-07-09 19:59:07 +00:00
|
|
|
|
'feature': {
|
|
|
|
|
'tabs': False
|
|
|
|
|
},
|
|
|
|
|
'palette': {
|
|
|
|
|
'primary': 'white',
|
|
|
|
|
'accent': 'white'
|
|
|
|
|
},
|
|
|
|
|
'font': False,
|
|
|
|
|
'logo': 'images/logo.svg',
|
|
|
|
|
'favicon': 'assets/images/favicon.ico',
|
|
|
|
|
'static_templates': ['404.html'],
|
|
|
|
|
'extra': {
|
2020-02-18 14:19:44 +00:00
|
|
|
|
'now': int(time.mktime(datetime.datetime.now().timetuple())) # TODO better way to avoid caching
|
2018-07-09 19:59:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-08-10 14:44:49 +00:00
|
|
|
|
|
2020-03-30 13:07:08 +00:00
|
|
|
|
# the following list of languages is sorted according to
|
|
|
|
|
# https://en.wikipedia.org/wiki/List_of_languages_by_total_number_of_speakers
|
2020-03-30 08:25:29 +00:00
|
|
|
|
languages = {
|
|
|
|
|
'en': 'English',
|
|
|
|
|
'zh': '中文',
|
|
|
|
|
'es': 'Español',
|
2020-03-30 12:48:55 +00:00
|
|
|
|
'fr': 'Français',
|
2020-03-30 08:25:29 +00:00
|
|
|
|
'ru': 'Русский',
|
|
|
|
|
'ja': '日本語',
|
|
|
|
|
'fa': 'فارسی'
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-30 13:07:08 +00:00
|
|
|
|
site_names = {
|
|
|
|
|
'en': 'ClickHouse %s Documentation',
|
|
|
|
|
'es': 'Documentación de ClickHouse %s',
|
|
|
|
|
'fr': 'Documentation ClickHouse %s',
|
|
|
|
|
'ru': 'Документация ClickHouse %s',
|
|
|
|
|
'zh': 'ClickHouse文档 %s',
|
|
|
|
|
'ja': 'ClickHouseドキュメント %s',
|
|
|
|
|
'fa': 'مستندات %sClickHouse'
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-30 08:25:29 +00:00
|
|
|
|
assert len(site_names) == len(languages)
|
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
if args.version_prefix:
|
|
|
|
|
site_dir = os.path.join(args.docs_output_dir, args.version_prefix, lang)
|
|
|
|
|
else:
|
|
|
|
|
site_dir = os.path.join(args.docs_output_dir, lang)
|
2020-02-18 14:19:44 +00:00
|
|
|
|
|
2020-03-30 08:25:29 +00:00
|
|
|
|
markdown_extensions = [
|
|
|
|
|
'mdx_clickhouse',
|
|
|
|
|
'admonition',
|
|
|
|
|
'attr_list',
|
|
|
|
|
'codehilite',
|
|
|
|
|
'nl2br',
|
|
|
|
|
'sane_lists',
|
|
|
|
|
'pymdownx.magiclink',
|
|
|
|
|
'pymdownx.superfences',
|
|
|
|
|
'extra',
|
|
|
|
|
{
|
|
|
|
|
'toc': {
|
|
|
|
|
'permalink': True,
|
|
|
|
|
'slugify': mdx_clickhouse.slugify
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
plugins = ['macros']
|
2020-03-18 13:02:32 +00:00
|
|
|
|
if args.htmlproofer:
|
|
|
|
|
plugins.append('htmlproofer')
|
|
|
|
|
|
2020-03-30 08:25:29 +00:00
|
|
|
|
raw_config = dict(
|
2019-04-08 16:01:54 +00:00
|
|
|
|
site_name=site_names.get(lang, site_names['en']) % args.version_prefix,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
site_url=f'https://clickhouse.tech/docs/{lang}/',
|
2018-07-09 19:59:07 +00:00
|
|
|
|
docs_dir=os.path.join(args.docs_dir, lang),
|
2019-04-08 16:01:54 +00:00
|
|
|
|
site_dir=site_dir,
|
|
|
|
|
strict=not args.version_prefix,
|
2018-07-09 19:59:07 +00:00
|
|
|
|
theme=theme_cfg,
|
2020-01-30 14:00:45 +00:00
|
|
|
|
copyright='©2016–2020 Yandex LLC',
|
2018-07-09 19:59:07 +00:00
|
|
|
|
use_directory_urls=True,
|
2019-11-08 14:26:25 +00:00
|
|
|
|
repo_name='ClickHouse/ClickHouse',
|
|
|
|
|
repo_url='https://github.com/ClickHouse/ClickHouse/',
|
2020-03-22 12:12:53 +00:00
|
|
|
|
edit_uri=f'edit/master/docs/{lang}',
|
|
|
|
|
extra_css=[f'assets/stylesheets/custom.css?{args.rev_short}'],
|
2020-03-30 08:25:29 +00:00
|
|
|
|
markdown_extensions=markdown_extensions,
|
2020-03-18 13:02:32 +00:00
|
|
|
|
plugins=plugins,
|
2018-08-10 14:44:49 +00:00
|
|
|
|
extra={
|
2019-04-08 16:01:54 +00:00
|
|
|
|
'stable_releases': args.stable_releases,
|
2020-02-18 14:19:44 +00:00
|
|
|
|
'version_prefix': args.version_prefix,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
'single_page': False,
|
2020-02-18 14:19:44 +00:00
|
|
|
|
'rev': args.rev,
|
|
|
|
|
'rev_short': args.rev_short,
|
2020-03-13 19:35:03 +00:00
|
|
|
|
'rev_url': args.rev_url,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
'events': args.events,
|
|
|
|
|
'languages': languages
|
2018-08-10 14:44:49 +00:00
|
|
|
|
}
|
2018-07-09 19:59:07 +00:00
|
|
|
|
)
|
|
|
|
|
|
2020-04-03 13:23:32 +00:00
|
|
|
|
if os.path.exists(config_path):
|
|
|
|
|
nav = None
|
|
|
|
|
raw_config['config_file'] = config_path
|
|
|
|
|
else:
|
|
|
|
|
nav = build_nav(lang, args)
|
|
|
|
|
raw_config['nav'] = nav
|
|
|
|
|
|
2020-03-30 08:25:29 +00:00
|
|
|
|
cfg = config.load_config(**raw_config)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
mkdocs_build.build(cfg)
|
|
|
|
|
except jinja2.exceptions.TemplateError:
|
|
|
|
|
if not args.version_prefix:
|
|
|
|
|
raise
|
|
|
|
|
mdx_clickhouse.PatchedMacrosPlugin.disabled = True
|
|
|
|
|
mkdocs_build.build(cfg)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
if not args.skip_single_page:
|
2020-04-03 13:23:32 +00:00
|
|
|
|
build_single_page_version(lang, args, nav, cfg)
|
2020-03-30 08:25:29 +00:00
|
|
|
|
|
|
|
|
|
mdx_clickhouse.PatchedMacrosPlugin.disabled = False
|
|
|
|
|
|
2020-03-22 15:20:11 +00:00
|
|
|
|
logging.info(f'Finished building {lang} docs')
|
2020-03-22 14:48:36 +00:00
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
except exceptions.ConfigurationError as e:
|
|
|
|
|
raise SystemExit('\n' + str(e))
|
|
|
|
|
|
|
|
|
|
|
2020-04-03 13:23:32 +00:00
|
|
|
|
def build_single_page_version(lang, args, nav, cfg):
|
2020-03-22 14:48:36 +00:00
|
|
|
|
logging.info(f'Building single page version for {lang}')
|
2018-12-12 17:28:00 +00:00
|
|
|
|
os.environ['SINGLE_PAGE'] = '1'
|
2020-03-30 08:25:29 +00:00
|
|
|
|
extra = cfg.data['extra']
|
|
|
|
|
extra['single_page'] = True
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
with util.autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md:
|
2020-04-03 13:23:32 +00:00
|
|
|
|
concatenate(lang, args.docs_dir, single_md, nav)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
with util.temp_dir() as site_temp:
|
|
|
|
|
with util.temp_dir() as docs_temp:
|
|
|
|
|
docs_src_lang = os.path.join(args.docs_dir, lang)
|
2018-08-10 14:44:49 +00:00
|
|
|
|
docs_temp_lang = os.path.join(docs_temp, lang)
|
2019-04-08 16:01:54 +00:00
|
|
|
|
shutil.copytree(docs_src_lang, docs_temp_lang)
|
2018-08-10 14:44:49 +00:00
|
|
|
|
for root, _, filenames in os.walk(docs_temp_lang):
|
|
|
|
|
for filename in filenames:
|
|
|
|
|
if filename != 'single.md' and filename.endswith('.md'):
|
|
|
|
|
os.unlink(os.path.join(root, filename))
|
|
|
|
|
|
|
|
|
|
cfg.load_dict({
|
|
|
|
|
'docs_dir': docs_temp_lang,
|
|
|
|
|
'site_dir': site_temp,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
'extra': extra,
|
2018-08-10 14:44:49 +00:00
|
|
|
|
'nav': [
|
|
|
|
|
{cfg.data.get('site_name'): 'single.md'}
|
|
|
|
|
]
|
|
|
|
|
})
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2018-08-10 14:44:49 +00:00
|
|
|
|
mkdocs_build.build(cfg)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
if args.version_prefix:
|
|
|
|
|
single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, args.version_prefix, lang, 'single')
|
|
|
|
|
else:
|
|
|
|
|
single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, lang, 'single')
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2018-08-10 14:44:49 +00:00
|
|
|
|
if os.path.exists(single_page_output_path):
|
|
|
|
|
shutil.rmtree(single_page_output_path)
|
|
|
|
|
|
|
|
|
|
shutil.copytree(
|
|
|
|
|
os.path.join(site_temp, 'single'),
|
|
|
|
|
single_page_output_path
|
|
|
|
|
)
|
2020-03-30 19:33:33 +00:00
|
|
|
|
|
|
|
|
|
logging.info(f'Re-building single page for {lang} pdf/test')
|
|
|
|
|
with util.temp_dir() as test_dir:
|
|
|
|
|
extra['single_page'] = False
|
|
|
|
|
cfg.load_dict({
|
|
|
|
|
'docs_dir': docs_temp_lang,
|
|
|
|
|
'site_dir': test_dir,
|
|
|
|
|
'extra': extra,
|
|
|
|
|
'nav': [
|
|
|
|
|
{cfg.data.get('site_name'): 'single.md'}
|
|
|
|
|
]
|
|
|
|
|
})
|
|
|
|
|
mkdocs_build.build(cfg)
|
|
|
|
|
|
|
|
|
|
css_in = ' '.join(website.get_css_in(args))
|
|
|
|
|
js_in = ' '.join(website.get_js_in(args))
|
|
|
|
|
subprocess.check_call(f'cat {css_in} > {test_dir}/css/base.css', shell=True)
|
|
|
|
|
subprocess.check_call(f'cat {js_in} > {test_dir}/js/base.js', shell=True)
|
2020-04-01 09:45:48 +00:00
|
|
|
|
if args.save_raw_single_page:
|
|
|
|
|
shutil.copytree(test_dir, args.save_raw_single_page)
|
2020-03-30 11:39:26 +00:00
|
|
|
|
|
2020-03-30 19:33:33 +00:00
|
|
|
|
if not args.version_prefix: # maybe enable in future
|
|
|
|
|
logging.info(f'Running tests for {lang}')
|
|
|
|
|
test.test_single_page(
|
|
|
|
|
os.path.join(test_dir, 'single', 'index.html'), lang)
|
2020-03-30 11:39:26 +00:00
|
|
|
|
|
2020-04-01 09:45:48 +00:00
|
|
|
|
if not args.skip_pdf:
|
|
|
|
|
single_page_index_html = os.path.join(test_dir, 'single', 'index.html')
|
|
|
|
|
single_page_pdf = os.path.abspath(
|
|
|
|
|
os.path.join(single_page_output_path, f'clickhouse_{lang}.pdf')
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with open(single_page_index_html, 'r') as f:
|
|
|
|
|
soup = bs4.BeautifulSoup(
|
|
|
|
|
f.read(),
|
|
|
|
|
features='html.parser'
|
|
|
|
|
)
|
|
|
|
|
soup_prefix = f'file://{test_dir}'
|
|
|
|
|
for img in soup.findAll('img'):
|
|
|
|
|
if img['src'].startswith('/'):
|
|
|
|
|
img['src'] = soup_prefix + img['src']
|
|
|
|
|
for script in soup.findAll('script'):
|
|
|
|
|
script['src'] = soup_prefix + script['src'].split('?', 1)[0]
|
|
|
|
|
for link in soup.findAll('link'):
|
|
|
|
|
link['href'] = soup_prefix + link['href'].split('?', 1)[0]
|
|
|
|
|
|
|
|
|
|
with open(single_page_index_html, 'w') as f:
|
|
|
|
|
f.write(str(soup))
|
|
|
|
|
|
|
|
|
|
create_pdf_command = [
|
|
|
|
|
'wkhtmltopdf',
|
|
|
|
|
'--print-media-type',
|
|
|
|
|
'--log-level', 'warn',
|
|
|
|
|
single_page_index_html, single_page_pdf
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
logging.info(' '.join(create_pdf_command))
|
|
|
|
|
subprocess.check_call(' '.join(create_pdf_command), shell=True)
|
|
|
|
|
|
2020-03-30 11:39:26 +00:00
|
|
|
|
logging.info(f'Finished building single page version for {lang}')
|
2018-12-18 11:32:08 +00:00
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2020-01-29 20:27:36 +00:00
|
|
|
|
def write_redirect_html(out_path, to_url):
|
2020-03-30 08:25:29 +00:00
|
|
|
|
out_dir = os.path.dirname(out_path)
|
|
|
|
|
try:
|
|
|
|
|
os.makedirs(out_dir)
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
2020-01-29 20:27:36 +00:00
|
|
|
|
with open(out_path, 'w') as f:
|
2020-03-21 09:17:06 +00:00
|
|
|
|
f.write(f'''<!DOCTYPE HTML>
|
2020-01-29 15:57:49 +00:00
|
|
|
|
<html lang="en-US">
|
|
|
|
|
<head>
|
|
|
|
|
<meta charset="UTF-8">
|
2020-03-21 09:17:06 +00:00
|
|
|
|
<meta http-equiv="refresh" content="0; url={to_url}">
|
2020-01-29 15:57:49 +00:00
|
|
|
|
<script type="text/javascript">
|
2020-03-21 09:17:06 +00:00
|
|
|
|
window.location.href = "{to_url}"
|
2020-01-29 15:57:49 +00:00
|
|
|
|
</script>
|
|
|
|
|
<title>Page Redirection</title>
|
|
|
|
|
</head>
|
|
|
|
|
<body>
|
2020-03-21 09:17:06 +00:00
|
|
|
|
If you are not redirected automatically, follow this <a href="{to_url}">link</a>.
|
2020-01-29 15:57:49 +00:00
|
|
|
|
</body>
|
2020-03-21 10:50:48 +00:00
|
|
|
|
</html>''')
|
2020-01-29 15:57:49 +00:00
|
|
|
|
|
|
|
|
|
|
2020-01-29 20:27:36 +00:00
|
|
|
|
def build_redirect_html(args, from_path, to_path):
|
2020-04-03 13:23:32 +00:00
|
|
|
|
for lang in ['en']: # TODO: restore args.lang.split(','):
|
2020-01-29 20:27:36 +00:00
|
|
|
|
out_path = os.path.join(args.docs_output_dir, lang, from_path.replace('.md', '/index.html'))
|
|
|
|
|
version_prefix = args.version_prefix + '/' if args.version_prefix else '/'
|
2020-03-22 12:12:53 +00:00
|
|
|
|
target_path = to_path.replace('.md', '/')
|
|
|
|
|
to_url = f'/docs{version_prefix}{lang}/{target_path}'
|
2020-01-29 20:27:36 +00:00
|
|
|
|
to_url = to_url.strip()
|
|
|
|
|
write_redirect_html(out_path, to_url)
|
|
|
|
|
|
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
def build_redirects(args):
|
|
|
|
|
with open(os.path.join(args.docs_dir, 'redirects.txt'), 'r') as f:
|
|
|
|
|
for line in f:
|
|
|
|
|
from_path, to_path = line.split(' ', 1)
|
2020-01-29 15:57:49 +00:00
|
|
|
|
build_redirect_html(args, from_path, to_path)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
def build_docs(args):
|
2020-01-29 13:34:12 +00:00
|
|
|
|
tasks = []
|
2019-04-08 16:01:54 +00:00
|
|
|
|
for lang in args.lang.split(','):
|
2020-02-18 14:19:44 +00:00
|
|
|
|
if lang:
|
|
|
|
|
tasks.append((lang, args,))
|
2020-02-03 08:58:13 +00:00
|
|
|
|
util.run_function_in_parallel(build_for_lang, tasks, threads=False)
|
2020-01-29 18:01:51 +00:00
|
|
|
|
build_redirects(args)
|
2019-04-08 16:01:54 +00:00
|
|
|
|
|
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
def build(args):
|
2019-03-26 09:50:28 +00:00
|
|
|
|
if os.path.exists(args.output_dir):
|
|
|
|
|
shutil.rmtree(args.output_dir)
|
|
|
|
|
|
|
|
|
|
if not args.skip_website:
|
2020-03-30 11:39:26 +00:00
|
|
|
|
website.build_website(args)
|
2019-03-26 09:50:28 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
build_docs(args)
|
2020-02-18 14:19:44 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
from github import build_releases
|
|
|
|
|
build_releases(args, build_docs)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2019-03-26 09:50:28 +00:00
|
|
|
|
if not args.skip_website:
|
2020-03-30 11:39:26 +00:00
|
|
|
|
website.minify_website(args)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
2020-01-30 10:38:04 +00:00
|
|
|
|
for static_redirect in [
|
|
|
|
|
('tutorial.html', '/docs/en/getting_started/tutorial/',),
|
|
|
|
|
('reference_en.html', '/docs/en/single/', ),
|
|
|
|
|
('reference_ru.html', '/docs/ru/single/',),
|
2020-01-30 10:39:37 +00:00
|
|
|
|
('docs/index.html', '/docs/en/',),
|
2020-01-30 10:38:04 +00:00
|
|
|
|
]:
|
|
|
|
|
write_redirect_html(
|
|
|
|
|
os.path.join(args.output_dir, static_redirect[0]),
|
|
|
|
|
static_redirect[1]
|
|
|
|
|
)
|
2020-01-29 20:27:36 +00:00
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
|
2018-07-09 19:59:07 +00:00
|
|
|
|
if __name__ == '__main__':
|
2019-04-08 16:01:54 +00:00
|
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
|
2020-03-30 08:25:29 +00:00
|
|
|
|
website_dir = os.path.join('..', 'website')
|
2018-07-09 19:59:07 +00:00
|
|
|
|
arg_parser = argparse.ArgumentParser()
|
2020-03-30 12:48:55 +00:00
|
|
|
|
arg_parser.add_argument('--lang', default='en,es,fr,ru,zh,ja,fa')
|
2018-08-10 14:44:49 +00:00
|
|
|
|
arg_parser.add_argument('--docs-dir', default='.')
|
2020-03-30 08:25:29 +00:00
|
|
|
|
arg_parser.add_argument('--theme-dir', default=website_dir)
|
|
|
|
|
arg_parser.add_argument('--website-dir', default=website_dir)
|
2018-08-10 14:44:49 +00:00
|
|
|
|
arg_parser.add_argument('--output-dir', default='build')
|
2019-04-08 16:01:54 +00:00
|
|
|
|
arg_parser.add_argument('--enable-stable-releases', action='store_true')
|
2020-03-30 08:25:29 +00:00
|
|
|
|
arg_parser.add_argument('--stable-releases-limit', type=int, default='10')
|
2019-04-08 16:01:54 +00:00
|
|
|
|
arg_parser.add_argument('--version-prefix', type=str, default='')
|
2019-12-10 08:17:26 +00:00
|
|
|
|
arg_parser.add_argument('--is-stable-release', action='store_true')
|
2018-07-09 19:59:07 +00:00
|
|
|
|
arg_parser.add_argument('--skip-single-page', action='store_true')
|
2018-12-21 19:40:49 +00:00
|
|
|
|
arg_parser.add_argument('--skip-pdf', action='store_true')
|
2019-03-26 09:50:28 +00:00
|
|
|
|
arg_parser.add_argument('--skip-website', action='store_true')
|
2020-02-14 09:34:57 +00:00
|
|
|
|
arg_parser.add_argument('--minify', action='store_true')
|
2020-03-18 13:02:32 +00:00
|
|
|
|
arg_parser.add_argument('--htmlproofer', action='store_true')
|
2020-03-19 08:31:06 +00:00
|
|
|
|
arg_parser.add_argument('--no-docs-macros', action='store_true')
|
2018-12-18 11:32:08 +00:00
|
|
|
|
arg_parser.add_argument('--save-raw-single-page', type=str)
|
2020-03-13 19:35:03 +00:00
|
|
|
|
arg_parser.add_argument('--livereload', type=int, default='0')
|
2018-07-09 19:59:07 +00:00
|
|
|
|
arg_parser.add_argument('--verbose', action='store_true')
|
|
|
|
|
|
|
|
|
|
args = arg_parser.parse_args()
|
2020-03-19 06:28:58 +00:00
|
|
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
|
level=logging.DEBUG if args.verbose else logging.INFO,
|
|
|
|
|
stream=sys.stderr
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logging.getLogger('MARKDOWN').setLevel(logging.INFO)
|
|
|
|
|
|
2019-04-08 16:01:54 +00:00
|
|
|
|
args.docs_output_dir = os.path.join(os.path.abspath(args.output_dir), 'docs')
|
2020-02-18 14:19:44 +00:00
|
|
|
|
|
2020-03-13 19:35:03 +00:00
|
|
|
|
from github import choose_latest_releases, get_events
|
2020-03-30 08:25:29 +00:00
|
|
|
|
args.stable_releases = choose_latest_releases(args) if args.enable_stable_releases else []
|
2020-03-18 13:02:32 +00:00
|
|
|
|
args.rev = subprocess.check_output('git rev-parse HEAD', shell=True).decode('utf-8').strip()
|
|
|
|
|
args.rev_short = subprocess.check_output('git rev-parse --short HEAD', shell=True).decode('utf-8').strip()
|
2020-03-22 12:12:53 +00:00
|
|
|
|
args.rev_url = f'https://github.com/ClickHouse/ClickHouse/commit/{args.rev}'
|
2020-03-13 19:35:03 +00:00
|
|
|
|
args.events = get_events(args)
|
2018-07-09 19:59:07 +00:00
|
|
|
|
|
|
|
|
|
from build import build
|
|
|
|
|
build(args)
|
2020-03-13 19:35:03 +00:00
|
|
|
|
|
|
|
|
|
if args.livereload:
|
|
|
|
|
new_args = [arg for arg in sys.argv if not arg.startswith('--livereload')]
|
|
|
|
|
new_args = sys.executable + ' ' + ' '.join(new_args)
|
|
|
|
|
|
|
|
|
|
server = livereload.Server()
|
|
|
|
|
server.watch(args.docs_dir + '**/*', livereload.shell(new_args, cwd='tools', shell=True))
|
2020-03-30 08:25:29 +00:00
|
|
|
|
server.watch(args.website_dir + '**/*', livereload.shell(new_args, cwd='tools', shell=True))
|
2020-03-13 19:35:03 +00:00
|
|
|
|
server.serve(
|
|
|
|
|
root=args.output_dir,
|
2020-03-30 08:25:29 +00:00
|
|
|
|
host='0.0.0.0',
|
2020-03-13 19:35:03 +00:00
|
|
|
|
port=args.livereload
|
|
|
|
|
)
|
|
|
|
|
sys.exit(0)
|