mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-27 01:51:59 +00:00
Merge pull request #62690 from peter279k/improve_wikistat_data
Add the loading data with cleaning approach
This commit is contained in:
commit
59df591ba1
@ -13,7 +13,7 @@ And the presentation: https://presentations.clickhouse.com/fosdem2023/
|
||||
Data source: https://dumps.wikimedia.org/other/pageviews/
|
||||
|
||||
Getting the list of links:
|
||||
```
|
||||
``` shell
|
||||
for i in {2015..2023}; do
|
||||
for j in {01..12}; do
|
||||
echo "${i}-${j}" >&2
|
||||
@ -24,7 +24,7 @@ done | sort | uniq | tee links.txt
|
||||
```
|
||||
|
||||
Downloading the data:
|
||||
```
|
||||
``` shell
|
||||
sed -r 's!pageviews-([0-9]{4})([0-9]{2})[0-9]{2}-[0-9]+\.gz!https://dumps.wikimedia.org/other/pageviews/\1/\1-\2/\0!' \
|
||||
links.txt | xargs -P3 wget --continue
|
||||
```
|
||||
@ -40,8 +40,7 @@ CREATE TABLE wikistat
|
||||
project LowCardinality(String),
|
||||
subproject LowCardinality(String),
|
||||
path String CODEC(ZSTD(3)),
|
||||
hits UInt64 CODEC(ZSTD(3)),
|
||||
size UInt64 CODEC(ZSTD(3))
|
||||
hits UInt64 CODEC(ZSTD(3))
|
||||
)
|
||||
ENGINE = MergeTree
|
||||
ORDER BY (path, time);
|
||||
@ -49,7 +48,7 @@ ORDER BY (path, time);
|
||||
|
||||
Loading the data:
|
||||
|
||||
```
|
||||
``` shell
|
||||
clickhouse-local --query "
|
||||
WITH replaceRegexpOne(_path, '^.+pageviews-(\\d{4})(\\d{2})(\\d{2})-(\\d{2})(\\d{2})(\\d{2}).gz$', '\1-\2-\3 \4-\5-\6')::DateTime AS time,
|
||||
extractGroups(line, '^([^ \\.]+)(\\.[^ ]+)? +([^ ]+) +(\\d+) +(\\d+)$') AS values
|
||||
@ -58,9 +57,27 @@ clickhouse-local --query "
|
||||
values[1] AS project,
|
||||
values[2] AS subproject,
|
||||
values[3] AS path,
|
||||
(values[4])::UInt64 AS hits,
|
||||
(values[5])::UInt64 AS size
|
||||
(values[4])::UInt64 AS hits
|
||||
FROM file('pageviews*.gz', LineAsString)
|
||||
WHERE length(values) = 5 FORMAT Native
|
||||
" | clickhouse-client --query "INSERT INTO wikistat FORMAT Native"
|
||||
```
|
||||
|
||||
Or loading the cleaning data:
|
||||
|
||||
``` sql
|
||||
INSERT INTO wikistat WITH
|
||||
parseDateTimeBestEffort(extract(_file, '^pageviews-([\\d\\-]+)\\.gz$')) AS time,
|
||||
splitByChar(' ', line) AS values,
|
||||
splitByChar('.', values[1]) AS projects
|
||||
SELECT
|
||||
time,
|
||||
projects[1] AS project,
|
||||
projects[2] AS subproject,
|
||||
decodeURLComponent(values[2]) AS path,
|
||||
CAST(values[3], 'UInt64') AS hits
|
||||
FROM s3(
|
||||
'https://clickhouse-public-datasets.s3.amazonaws.com/wikistat/original/pageviews*.gz',
|
||||
LineAsString)
|
||||
WHERE length(values) >= 3
|
||||
```
|
||||
|
Loading…
Reference in New Issue
Block a user