Skip to content

Commit dae60ff

Browse files
committed
merge origin/main
2 parents dd2f396 + cd95677 commit dae60ff

File tree

1,214 files changed

+55291
-32563
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,214 files changed

+55291
-32563
lines changed

.github/actions/build_linux/action.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,10 +43,10 @@ runs:
4343
run: |
4444
case ${{ inputs.target }} in
4545
x86_64-unknown-linux-gnu)
46-
flags="-C target-feature=+sse4.2"
46+
flags="-C target-feature=+sse4.2,+avx2"
4747
;;
4848
x86_64-unknown-linux-musl)
49-
flags="-C target-feature=+sse4.2"
49+
flags="-C target-feature=+sse4.2,+avx2"
5050
;;
5151
aarch64-unknown-linux-gnu)
5252
flags=""
@@ -68,7 +68,7 @@ runs:
6868
if: inputs.artifacts == 'all'
6969
shell: bash
7070
run: |
71-
artifacts="meta,metactl,metabench,query,sqllogictests"
71+
artifacts="meta,metactl,metabench,query,sqllogictests,bendsave"
7272
binaries=""
7373
for artifact in ${artifacts//,/ }; do
7474
binaries="${binaries} --bin databend-$artifact"

.github/actions/pack_binaries/action.yml

Lines changed: 14 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ inputs:
55
description: "Release target"
66
required: true
77
category:
8-
description: "Release default/hdfs/udf/testsuite"
8+
description: "Release default/hdfs/udf/testsuite/dbg"
99
required: false
1010
default: default
1111
version:
@@ -15,14 +15,24 @@ inputs:
1515
runs:
1616
using: "composite"
1717
steps:
18-
- name: Download artifact
18+
- name: Download artifact for dbg
19+
if: inputs.category == 'dbg'
20+
uses: ./.github/actions/artifact_download
21+
with:
22+
sha: ${{ github.sha }}
23+
target: ${{ inputs.target }}
24+
category: default
25+
path: distro/bin
26+
artifacts: metactl,meta,query,query.debug,bendsave
27+
- name: Download artifact for others
28+
if: inputs.category != 'dbg'
1929
uses: ./.github/actions/artifact_download
2030
with:
2131
sha: ${{ github.sha }}
2232
target: ${{ inputs.target }}
2333
category: ${{ inputs.category }}
2434
path: distro/bin
25-
artifacts: metactl,meta,query,query.debug
35+
artifacts: metactl,meta,query,bendsave
2636
- name: Get Latest BendSQL
2737
id: bendsql
2838
uses: pozetroninc/github-action-get-latest-release@master
@@ -53,7 +63,7 @@ runs:
5363
cp ./scripts/distribution/configs/databend-* distro/configs/
5464
cp ./scripts/distribution/release-readme.txt distro/readme.txt
5565
cp -r ./scripts/distribution/package-scripts/* distro/scripts/
56-
tar -C ./distro --exclude='*.debug' -czvf ${pkg_name}.tar.gz bin configs systemd scripts readme.txt
66+
tar -C ./distro -czvf ${pkg_name}.tar.gz bin configs systemd scripts readme.txt
5767
sha256sum ${pkg_name}.tar.gz >> sha256-${pkg_name}.txt
5868
echo "pkg_name=$pkg_name" >> $GITHUB_OUTPUT
5969
- name: post sha256
@@ -68,37 +78,3 @@ runs:
6878
name: ${{ steps.pack_binaries.outputs.pkg_name }}.tar.gz
6979
path: ${{ steps.pack_binaries.outputs.pkg_name }}.tar.gz
7080
retention-days: 1
71-
- name: Pack DBG Binaries
72-
id: pack_dbg_binaries
73-
shell: bash
74-
run: |
75-
target=${{ inputs.target }}
76-
version=${{ inputs.version }}
77-
case ${{ inputs.category }} in
78-
default)
79-
pkg_name="databend-dbg-${version}-${target}"
80-
;;
81-
*)
82-
pkg_name="databend-dbg-${{ inputs.category }}-${version}-${target}"
83-
;;
84-
esac
85-
mkdir -p distro/{bin,configs,systemd,scripts}
86-
cp ./scripts/distribution/systemd/databend-* distro/systemd/
87-
cp ./scripts/distribution/configs/databend-* distro/configs/
88-
cp ./scripts/distribution/release-readme.txt distro/readme.txt
89-
cp -r ./scripts/distribution/package-scripts/* distro/scripts/
90-
tar -C ./distro -czvf ${pkg_name}.tar.gz bin configs systemd scripts readme.txt
91-
sha256sum ${pkg_name}.tar.gz >> sha256-${pkg_name}.txt
92-
echo "pkg_name=$pkg_name" >> $GITHUB_OUTPUT
93-
- name: post dbg sha256
94-
uses: actions/upload-artifact@v4
95-
with:
96-
name: sha256sums-${{ inputs.category }}-${{ inputs.target }}-gdb
97-
path: sha256-${{ steps.pack_dbg_binaries.outputs.pkg_name }}.txt
98-
retention-days: 1
99-
- name: post dbg binaries
100-
uses: actions/upload-artifact@v4
101-
with:
102-
name: ${{ steps.pack_dbg_binaries.outputs.pkg_name }}.tar.gz
103-
path: ${{ steps.pack_dbg_binaries.outputs.pkg_name }}.tar.gz
104-
retention-days: 1

.github/actions/pack_deb/action.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ runs:
6464
export deb_version="${{ steps.info.outputs.deb_version }}"
6565
export deb_arch="${{ steps.info.outputs.deb_arch }}"
6666
pkg_name="databend-query_${deb_version}_${deb_arch}.deb"
67-
nfpm pkg --packager deb -t "$pkg_name" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-query.yaml)
67+
nfpm pkg --packager deb -t "${path}/${pkg_name}" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-query.yaml)
6868
6969
- name: Build Databend Meta Package
7070
shell: bash
@@ -74,7 +74,7 @@ runs:
7474
export deb_version="${{ steps.info.outputs.deb_version }}"
7575
export deb_arch="${{ steps.info.outputs.deb_arch }}"
7676
pkg_name="databend-meta_${deb_version}_${deb_arch}.deb"
77-
nfpm pkg --packager deb -t "$pkg_name" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-meta.yaml)
77+
nfpm pkg --packager deb -t "${path}/${pkg_name}" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-meta.yaml)
7878
7979
- name: Build Databend Debug Package
8080
shell: bash
@@ -84,7 +84,7 @@ runs:
8484
export deb_version="${{ steps.info.outputs.deb_version }}"
8585
export deb_arch="${{ steps.info.outputs.deb_arch }}"
8686
pkg_name="databend-query-dbg_${deb_version}_${deb_arch}.deb"
87-
nfpm pkg --packager deb -t "$pkg_name" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-query-dbg.yaml)
87+
nfpm pkg --packager deb -t "${path}/${pkg_name}" -f <(envsubst '${version} ${path} ${arch}' < scripts/distribution/nfpm-query-dbg.yaml)
8888
8989
- name: Update release to github
9090
shell: bash

.github/actions/publish_binary/action.yml

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ runs:
2323
default)
2424
publish_name="databend-${{ inputs.version }}-${{ inputs.target }}"
2525
;;
26-
hdfs|udf|testsuite)
26+
hdfs|udf|testsuite|dbg)
2727
publish_name="databend-${{ inputs.category }}-${{ inputs.version }}-${{ inputs.target }}"
2828
;;
2929
*)
@@ -38,18 +38,12 @@ runs:
3838
# Reference: https://cli.github.com/manual/gh_release_upload
3939
run: |
4040
gh release upload ${{ inputs.version }} ${{ steps.name.outputs.name }}.tar.gz --clobber
41-
if [ -f ${{ steps.name.outputs.name }}-dbg.tar.gz ]; then
42-
gh release upload ${{ inputs.version }} ${{ steps.name.outputs.name }}-dbg.tar.gz --clobber
43-
fi
4441
4542
- name: Sync normal release to R2
4643
shell: bash
4744
if: inputs.category == 'default'
4845
run: |
4946
aws s3 cp ${{ steps.name.outputs.name }}.tar.gz s3://repo/databend/${{ inputs.version }}/${{ steps.name.outputs.name }}.tar.gz --no-progress --checksum-algorithm=CRC32
50-
if [ -f ${{ steps.name.outputs.name }}-dbg.tar.gz ]; then
51-
aws s3 cp ${{ steps.name.outputs.name }}-dbg.tar.gz s3://repo/databend/${{ inputs.version }}/${{ steps.name.outputs.name }}-dbg.tar.gz --no-progress --checksum-algorithm=CRC32
52-
fi
5347
gh api /repos/databendlabs/databend/tags > tags.json
5448
aws s3 cp ./tags.json s3://repo/databend/tags.json --no-progress --checksum-algorithm=CRC32
5549
gh api /repos/databendlabs/databend/releases > releases.json

.github/actions/test_sqllogic_cluster_linux/action.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,11 @@ inputs:
99
description: "logic test handlers, mysql,http,clickhouse"
1010
required: false
1111
default: ""
12+
parallel:
13+
description: "logic test parallel"
14+
required: false
15+
default: ""
16+
1217
runs:
1318
using: "composite"
1419
steps:
@@ -20,4 +25,6 @@ runs:
2025
shell: bash
2126
env:
2227
TEST_HANDLERS: ${{ inputs.handlers }}
28+
TEST_PARALLEL: ${{ inputs.parallel }}
29+
TEST_EXT_ARGS: '--skip_file tpcds_spill_1.test,tpcds_spill_2.test,tpcds_spill_3.test'
2330
run: bash ./scripts/ci/ci-run-sqllogic-tests-cluster.sh ${{ inputs.dirs }}

.github/actions/test_sqllogic_iceberg_tpch/action.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ runs:
3232
pip install pyspark
3333
python3 tests/sqllogictests/scripts/prepare_iceberg_tpch_data.py
3434
35+
3536
- name: Run sqllogic Tests with Standalone lib
3637
shell: bash
3738
env:

.github/actions/test_sqllogic_stage/action.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ inputs:
1313
description: "storage backend for stage, choices: s3,fs"
1414
required: true
1515
default: ""
16-
deducp:
16+
dedup:
1717
description: "path type for dedup when copy, choices: full_path,sub_path"
1818
required: true
1919
default: ""

.github/actions/test_sqllogic_standalone_linux/action.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,20 @@ runs:
2929
with:
3030
artifacts: sqllogictests,meta,query
3131

32+
- uses: actions/github-script@v7
33+
id: ext-args
34+
env:
35+
DIRS: ${{ inputs.dirs }}
36+
with:
37+
script: require('.github/actions/test_sqllogic_standalone_linux/script.js')(core)
38+
3239
- name: Run sqllogic Tests with Standalone mode
3340
if: inputs.storage-format == 'all' || inputs.storage-format == 'parquet'
3441
shell: bash
3542
env:
3643
TEST_HANDLERS: ${{ inputs.handlers }}
3744
TEST_PARALLEL: ${{ inputs.parallel }}
45+
TEST_EXT_ARGS: ${{ steps.ext-args.outputs.parquet }}
3846
CACHE_ENABLE_TABLE_META_CACHE: ${{ inputs.enable_table_meta_cache}}
3947
run: bash ./scripts/ci/ci-run-sqllogic-tests.sh ${{ inputs.dirs }}
4048

@@ -44,5 +52,6 @@ runs:
4452
env:
4553
TEST_HANDLERS: ${{ inputs.handlers }}
4654
TEST_PARALLEL: ${{ inputs.parallel }}
55+
TEST_EXT_ARGS: '--skip_file tpcds_spill_1.test,tpcds_spill_2.test,tpcds_spill_3.test'
4756
CACHE_ENABLE_TABLE_META_CACHE: ${{ inputs.enable_table_meta_cache}}
4857
run: bash ./scripts/ci/ci-run-sqllogic-tests-native.sh ${{ inputs.dirs }}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
module.exports = (core) => {
2+
switch (process.env.DIRS) {
3+
case 'tpcds':
4+
const parquet = [
5+
'--skip_file tpcds_spill_2.test,tpcds_spill_3.test',
6+
'--skip_file tpcds_spill_1.test,tpcds_spill_3.test',
7+
'--skip_file tpcds_spill_1.test,tpcds_spill_2.test',
8+
][Date.now() % 3];
9+
core.setOutput('parquet', parquet)
10+
return
11+
}
12+
}
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
name: "Test sqllogic Standalone(minio) with bendsave"
2+
description: "Running sqllogic tests in standalone mode"
3+
4+
runs:
5+
using: "composite"
6+
steps:
7+
- uses: ./.github/actions/setup_test
8+
with:
9+
artifacts: sqllogictests,meta,query,bendsave
10+
11+
- name: Minio Setup for (ubuntu-latest only)
12+
shell: bash
13+
run: |
14+
docker run -d --network host --name minio \
15+
-e "MINIO_ACCESS_KEY=minioadmin" \
16+
-e "MINIO_SECRET_KEY=minioadmin" \
17+
-e "MINIO_ADDRESS=:9900" \
18+
-v /tmp/data:/data \
19+
-v /tmp/config:/root/.minio \
20+
minio/minio server /data
21+
22+
export AWS_ACCESS_KEY_ID=minioadmin
23+
export AWS_SECRET_ACCESS_KEY=minioadmin
24+
export AWS_EC2_METADATA_DISABLED=true
25+
26+
aws --endpoint-url http://127.0.0.1:9900/ s3 mb s3://testbucket
27+
aws --endpoint-url http://127.0.0.1:9900/ s3 mb s3://backupbucket
28+
29+
- name: Run sqllogic Tests with Standalone mode
30+
shell: bash
31+
env:
32+
TEST_HANDLERS: "http"
33+
run: bash ./scripts/ci/ci-run-sqllogic-tests-minio-with-bendsave.sh tpch
34+
35+
- name: Run bendsave backup
36+
shell: bash
37+
env:
38+
AWS_ACCESS_KEY_ID: minioadmin
39+
AWS_SECRET_ACCESS_KEY: minioadmin
40+
AWS_EC2_METADATA_DISABLED: true
41+
AWS_REGION: us-west-2
42+
run: |
43+
export STORAGE_TYPE=s3
44+
export STORAGE_S3_BUCKET=testbucket
45+
export STORAGE_S3_ROOT=admin
46+
export STORAGE_S3_ENDPOINT_URL=http://127.0.0.1:9900
47+
export STORAGE_S3_ACCESS_KEY_ID=minioadmin
48+
export STORAGE_S3_SECRET_ACCESS_KEY=minioadmin
49+
export STORAGE_ALLOW_INSECURE=true
50+
51+
./target/${{ env.BUILD_PROFILE }}/databend-bendsave backup --from ./scripts/ci/deploy/config/databend-query-node-1.toml --to s3://backupbucket?endpoint=http://127.0.0.1:9900/
52+
53+
aws --endpoint-url http://127.0.0.1:9900/ s3 ls s3://backupbucket --recursive
54+
55+
- name: Destroy the existing services.
56+
shell: bash
57+
env:
58+
AWS_ACCESS_KEY_ID: minioadmin
59+
AWS_SECRET_ACCESS_KEY: minioadmin
60+
AWS_EC2_METADATA_DISABLED: true
61+
AWS_REGION: us-west-2
62+
run: |
63+
# kill all services
64+
pkill databend-query
65+
pkill databend-meta
66+
# destory databend query
67+
aws --endpoint-url http://127.0.0.1:9900/ s3 rm s3://testbucket --recursive
68+
# destory databend meta
69+
rm -rf /tmp/databend/meta1
70+
71+
- name: Run bendsave restore
72+
shell: bash
73+
env:
74+
AWS_ACCESS_KEY_ID: minioadmin
75+
AWS_SECRET_ACCESS_KEY: minioadmin
76+
AWS_EC2_METADATA_DISABLED: true
77+
AWS_REGION: us-west-2
78+
run: |
79+
export STORAGE_TYPE=s3
80+
export STORAGE_S3_BUCKET=testbucket
81+
export STORAGE_S3_ROOT=admin
82+
export STORAGE_S3_ENDPOINT_URL=http://127.0.0.1:9900
83+
export STORAGE_S3_ACCESS_KEY_ID=minioadmin
84+
export STORAGE_S3_SECRET_ACCESS_KEY=minioadmin
85+
export STORAGE_ALLOW_INSECURE=true
86+
87+
./target/${{ env.BUILD_PROFILE }}/databend-bendsave restore --from s3://backupbucket?endpoint=http://127.0.0.1:9900/ --to-query ./scripts/ci/deploy/config/databend-query-node-1.toml --to-meta ./scripts/ci/deploy/config/databend-meta-node-for-bendsave.toml --confirm
88+
89+
- name: Run sqllogic Tests with Standalone mode again for testing
90+
shell: bash
91+
env:
92+
TEST_HANDLERS: "http"
93+
run: bash ./scripts/ci/ci-run-sqllogic-tests-minio-with-bendsave.sh tpch

0 commit comments

Comments
 (0)