Skip to content

Commit ed65512

Browse files
authored
Merge pull request #432 from duckdb/add_s3_snowflake_and_fix_glue_tests
Add s3 snowflake and fix glue tests
2 parents 22a95f0 + de1ca27 commit ed65512

File tree

7 files changed

+80
-30
lines changed

7 files changed

+80
-30
lines changed

.github/workflows/CloudTesting.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
AWS_DEFAULT_REGION: ${{secrets.S3_ICEBERG_TEST_USER_REGION}}
5959
R2_TOKEN: ${{secrets.r2_token}}
6060
run: |
61-
python3 scripts/create_s3_insert_table.py --action=delete-and-create --catalogs=s3tables,r2
61+
python3 scripts/create_s3_insert_table.py --action=delete-and-create --catalogs=s3tables,r2,glue
6262
6363
- name: Test with rest catalog
6464
env:
@@ -67,6 +67,8 @@ jobs:
6767
AWS_DEFAULT_REGION: ${{secrets.S3_ICEBERG_TEST_USER_REGION}}
6868
SNOWFLAKE_KEY_ID_GCS: ${{secrets.SNOWFLAKE_KEY_ID_GCS}}
6969
SNOWFLAKE_SECRET_KEY_GCS: ${{secrets.SNOWFLAKE_SECRET_KEY_GCS}}
70+
SNOWFLAKE_KEY_ID_S3: ${{secrets.SNOWFLAKE_KEY_ID_S3}}
71+
SNOWFLAKE_SECRET_KEY_S3: ${{secrets.SNOWFLAKE_SECRET_KEY_S3}}
7072
SNOWFLAKE_CATALOG_URI_GCS: ${{secrets.SNOWFLAKE_CATALOG_URI_GCS}}
7173
R2_TOKEN: ${{secrets.r2_token}}
7274
ICEBERG_REMOTE_INSERT_READY: 1
@@ -76,7 +78,7 @@ jobs:
7678
make test_release
7779
7880
- name: File issue if error
79-
if: failure()
81+
if: ${{ contains(github.ref_name, 'main') && failure() }}
8082
env:
8183
GH_TOKEN: ${{ github.token }}
8284
run: |

test/sql/cloud/glue/test_create_table_glue.test

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,6 @@ require httpfs
1818

1919
require aws
2020

21-
# credentials in CI cannot set up the environment for this test
22-
# need to give the crentials glue:DropTable priviledges
23-
mode skip
24-
2521
statement ok
2622
CREATE SECRET (
2723
TYPE S3,

test/sql/cloud/glue/test_direct_keys_glue.test

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,6 @@ require httpfs
1818

1919
require aws
2020

21-
# TODO: re-enable these tests once we know what account has these
22-
# credentials, and we can grant them access to the glue catalog
23-
# test using keys directory
24-
mode skip
25-
2621
statement ok
2722
CREATE SECRET s1 (
2823
TYPE S3,
@@ -37,10 +32,6 @@ attach '840140254803:s3tablescatalog/pyiceberg-blog-bucket' as my_datalake (
3732
ENDPOINT_TYPE 'GLUE'
3833
);
3934

40-
query T nosort tables_1
41-
show all tables;
42-
----
43-
4435
statement ok
4536
SELECT count(*) FROM my_datalake.myblognamespace.lineitem;
4637

test/sql/cloud/glue/test_direct_keys_glue_no_endpoint_type.test

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,6 @@ require httpfs
1818

1919
require aws
2020

21-
# TODO: re-enable these tests once we know what account has these
22-
# credentials, and we can grant them access to the glue catalog
23-
mode skip
24-
2521
# test using keys directory
2622
statement ok
2723
CREATE SECRET s1 (
@@ -38,10 +34,6 @@ attach '840140254803:s3tablescatalog/pyiceberg-blog-bucket' as my_datalake (
3834
ENDPOINT 'glue.us-east-1.amazonaws.com/iceberg'
3935
);
4036

41-
query T nosort tables_1
42-
show all tables;
43-
----
44-
4537
statement ok
4638
SELECT count(*) FROM my_datalake.myblognamespace.lineitem;
4739

@@ -51,8 +43,6 @@ drop secret s1;
5143
statement ok
5244
detach my_datalake;
5345

54-
mode unskip
55-
5646
# test using assume role
5747
statement ok
5848
CREATE SECRET assume_role_secret (

test/sql/cloud/glue/test_insert_glue.test

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,6 @@ require httpfs
2020

2121
require aws
2222

23-
# credentials in CI cannot set up the environment for this test
24-
# need to give the crentials glue:DropTable priviledges
25-
mode skip
26-
2723
statement ok
2824
CREATE SECRET (
2925
TYPE S3,

test/sql/cloud/snowflake/test_snowflake.test

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,14 @@ attach 'GCS_catalog' as my_datalake (
4040
ENDPOINT '${SNOWFLAKE_CATALOG_URI_GCS}'
4141
);
4242

43+
statement ok
44+
create schema if not exists my_datalake.test_create_schema;
45+
46+
statement ok
47+
create table my_datalake.test_create_schema.table1 as select range a from range(10);
4348

4449
query I
45-
select * from my_datalake.default.duckdb_created_table;
50+
select * from my_datalake.test_create_schema.table1;
4651
----
4752
0
4853
1
@@ -54,3 +59,9 @@ select * from my_datalake.default.duckdb_created_table;
5459
7
5560
8
5661
9
62+
63+
statement ok
64+
drop table if exists my_datalake.test_create_schema.table1;
65+
66+
statement ok
67+
drop schema if exists my_datalake.test_create_schema;
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
# name: test/sql/cloud/snowflake/test_snowflake_s3.test
2+
# group: [snowflake]
3+
4+
require-env SNOWFLAKE_KEY_ID_S3
5+
6+
require-env SNOWFLAKE_SECRET_ID_S3
7+
8+
require-env SNOWFLAKE_CATALOG_URI_GCS
9+
10+
require avro
11+
12+
require parquet
13+
14+
require iceberg
15+
16+
require httpfs
17+
18+
require aws
19+
20+
# Do not ignore 'HTTP' error messages!
21+
set ignore_error_messages
22+
23+
statement ok
24+
create secret polaris_secret (
25+
TYPE ICEBERG,
26+
CLIENT_ID '${SNOWFLAKE_KEY_ID_S3}',
27+
CLIENT_SECRET '${SNOWFLAKE_SECRET_KEY_S3}',
28+
ENDPOINT '${SNOWFLAKE_CATALOG_URI_GCS}'
29+
);
30+
31+
32+
statement ok
33+
attach 's3-catalog' as my_datalake (
34+
type ICEBERG,
35+
default_region 'eu-west-2',
36+
ENDPOINT '${SNOWFLAKE_CATALOG_URI_GCS}'
37+
);
38+
39+
40+
statement ok
41+
create schema if not exists my_datalake.test_create_schema;
42+
43+
statement ok
44+
create table my_datalake.test_create_schema.table1 as select range a from range(10);
45+
46+
query I
47+
select * from my_datalake.test_create_schema.table1;
48+
----
49+
0
50+
1
51+
2
52+
3
53+
4
54+
5
55+
6
56+
7
57+
8
58+
9
59+
60+
statement ok
61+
drop table if exists my_datalake.test_create_schema.table1;
62+
63+
statement ok
64+
drop schema if exists my_datalake.test_create_schema;

0 commit comments

Comments
 (0)