tidb 批量导入历史数据
cdh192-150:/opt/script/tidb-import-data.bash
1 2 3 4 5 6 7 8 9 10 11 12 13
| #!/bin/bash
for i in {4..9}; do if [ ! -d "/dfs/data3/data-source-dir" ]; then mkdir /dfs/data3/data-source-dir chmod 777 /dfs/data3/data-source-dir sudo -u hdfs hadoop fs -get hdfs://172.20.192.36:8020/user/hive/warehouse/test.db/history_ethereum/part-$i* /dfs/data3/data-source-dir/ fi source /home/tidb/.bash_profile tiup tidb-lightning -config /home/tidb/tidb-lightning.toml rm -rf /dfs/data3/data-source-dir done
|
vim tidb-lightning.toml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37
| [lightning]
level = "info" file = "tidb-lightning.log" max-error = 10000
[tikv-importer] incremental-import = true
backend = "local"
sorted-kv-dir = "/dfs/data2/sorted-kv-dir"
[mydumper]
data-source-dir = "/dfs/data3/data-source-dir"
[[mydumper.files]]
pattern = '.*.snappy.parquet' schema = 'ADDRESS' table = 'T_HISTORY_TRANS_ETHEREUM' type = 'parquet'
[tidb]
host = "172.20.192.115" port = 4000 user = "root" password = "" status-port = 10080 pd-addr = "172.20.192.115:2379"
[checkpoint] enable = true driver = "file"
|