mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 08:02:02 +00:00
Update wikistat.md
This commit is contained in:
parent
b0ea047e76
commit
30921b211b
@ -5,27 +5,61 @@ sidebar_label: WikiStat
|
||||
|
||||
# WikiStat
|
||||
|
||||
See http://dumps.wikimedia.org/other/pagecounts-raw/ for details.
|
||||
The dataset contains 0.5 trillion records.
|
||||
|
||||
See the video from FOSDEM 2023: https://www.youtube.com/watch?v=JlcI2Vfz_uk
|
||||
And the presentation: https://presentations.clickhouse.com/fosdem2023/
|
||||
|
||||
Data source: https://dumps.wikimedia.org/other/pageviews/
|
||||
|
||||
Getting the list of links:
|
||||
```
|
||||
for i in {2015..2023}; do
|
||||
for j in {01..12}; do
|
||||
echo "${i}-${j}" >&2
|
||||
curl -sSL "https://dumps.wikimedia.org/other/pageviews/$i/$i-$j/" \
|
||||
| grep -oE 'pageviews-[0-9]+-[0-9]+\.gz'
|
||||
done
|
||||
done | sort | uniq | tee links.txt
|
||||
```
|
||||
|
||||
Downloading the data:
|
||||
```
|
||||
sed -r 's!pageviews-([0-9]{4})([0-9]{2})[0-9]{2}-[0-9]+\.gz!https://dumps.wikimedia.org/other/pageviews/\1/\1-\2/\0!' \
|
||||
links.txt | xargs -P3 wget --continue
|
||||
```
|
||||
|
||||
(it will take about 3 days)
|
||||
|
||||
Creating a table:
|
||||
|
||||
``` sql
|
||||
CREATE TABLE wikistat
|
||||
(
|
||||
date Date,
|
||||
time DateTime,
|
||||
project String,
|
||||
subproject String,
|
||||
path String,
|
||||
hits UInt64,
|
||||
size UInt64
|
||||
) ENGINE = MergeTree(date, (path, time), 8192);
|
||||
time DateTime CODEC(Delta, ZSTD(3)),
|
||||
project LowCardinality(String),
|
||||
subproject LowCardinality(String),
|
||||
path String CODEC(ZSTD(3)),
|
||||
hits UInt64 CODEC(ZSTD(3))
|
||||
)
|
||||
ENGINE = MergeTree
|
||||
ORDER BY (path, time);
|
||||
```
|
||||
|
||||
Loading data:
|
||||
Loading the data:
|
||||
|
||||
``` bash
|
||||
$ for i in {2007..2016}; do for j in {01..12}; do echo $i-$j >&2; curl -sSL "http://dumps.wikimedia.org/other/pagecounts-raw/$i/$i-$j/" | grep -oE 'pagecounts-[0-9]+-[0-9]+\.gz'; done; done | sort | uniq | tee links.txt
|
||||
$ cat links.txt | while read link; do wget http://dumps.wikimedia.org/other/pagecounts-raw/$(echo $link | sed -r 's/pagecounts-([0-9]{4})([0-9]{2})[0-9]{2}-[0-9]+\.gz/\1/')/$(echo $link | sed -r 's/pagecounts-([0-9]{4})([0-9]{2})[0-9]{2}-[0-9]+\.gz/\1-\2/')/$link; done
|
||||
$ ls -1 /opt/wikistat/ | grep gz | while read i; do echo $i; gzip -cd /opt/wikistat/$i | ./wikistat-loader --time="$(echo -n $i | sed -r 's/pagecounts-([0-9]{4})([0-9]{2})([0-9]{2})-([0-9]{2})([0-9]{2})([0-9]{2})\.gz/\1-\2-\3 \4-00-00/')" | clickhouse-client --query="INSERT INTO wikistat FORMAT TabSeparated"; done
|
||||
```
|
||||
clickhouse-local --query "
|
||||
WITH replaceRegexpOne(_path, '^.+pageviews-(\\d{4})(\\d{2})(\\d{2})-(\\d{2})(\\d{2})(\\d{2}).gz$', '\1-\2-\3 \4-\5-\6')::DateTime AS time,
|
||||
extractGroups(line, '^([^ \\.]+)(\\.[^ ]+)? +([^ ]+) +(\\d+) +(\\d+)$') AS values
|
||||
SELECT
|
||||
time,
|
||||
values[1] AS project,
|
||||
values[2] AS subproject,
|
||||
values[3] AS path,
|
||||
(values[4])::UInt64 AS hits,
|
||||
(values[5])::UInt64 AS size
|
||||
FROM file('pageviews*.gz', LineAsString)
|
||||
WHERE length(values) = 5 FORMAT Native
|
||||
" | clickhouse-client --query "INSERT INTO wikistat FORMAT Native"
|
||||
```
|
||||
|
Loading…
Reference in New Issue
Block a user