Skip to content

Commit b588cfd

Browse files
committed
Adds Support for COPY TO/FROM Google Cloud Storage
Supports following Google Cloud Storage uri forms: - gs:// \<bucket\> / \<path\> **Configuration** The simplest way to configure object storage is by creating a json config file like [`/tmp/gcs.json`]: ```bash $ cat /tmp/gcs.json { "gcs_base_url": "http://localhost:4443", "disable_oauth": true, "client_email": "", "private_key_id": "", "private_key": "" } ``` Alternatively, you can use the following environment variables when starting postgres to configure the Google Cloud Storage client: - `GOOGLE_SERVICE_ACCOUNT_KEY`: json serialized service account key - `GOOGLE_SERVICE_ACCOUNT_PATH`: an alternative location for the config file
1 parent 4268795 commit b588cfd

File tree

10 files changed

+145
-7
lines changed

10 files changed

+145
-7
lines changed

.devcontainer/.env

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,9 @@ AZURE_TEST_CONTAINER_NAME=testcontainer
1414
AZURE_TEST_READ_ONLY_SAS="se=2100-05-05&sp=r&sv=2022-11-02&sr=c&sig=YMPFnAHKe9y0o3hFegncbwQTXtAyvsJEgPB2Ne1b9CQ%3D"
1515
AZURE_TEST_READ_WRITE_SAS="se=2100-05-05&sp=rcw&sv=2022-11-02&sr=c&sig=TPz2jEz0t9L651t6rTCQr%2BOjmJHkM76tnCGdcyttnlA%3D"
1616

17+
# GCS tests
18+
GOOGLE_TEST_BUCKET=testbucket
19+
1720
# Others
1821
RUST_TEST_THREADS=1
1922
PG_PARQUET_TEST=true

.devcontainer/create-test-buckets.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,5 @@
33
aws --endpoint-url http://localhost:9000 s3 mb s3://$AWS_S3_TEST_BUCKET
44

55
az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING
6+
7+
curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "http://localhost:4443/storage/v1/b"

.devcontainer/docker-compose.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ services:
2020
depends_on:
2121
- minio
2222
- azurite
23+
- fake-gcs-server
2324

2425
minio:
2526
image: minio/minio
@@ -45,3 +46,16 @@ services:
4546
interval: 6s
4647
timeout: 2s
4748
retries: 3
49+
50+
fake-gcs-server:
51+
image: tustvold/fake-gcs-server
52+
env_file:
53+
- .env
54+
network_mode: host
55+
command: -scheme http -public-host localhost:4443
56+
restart: unless-stopped
57+
healthcheck:
58+
test: ["CMD", "curl", "http://localhost:4443"]
59+
interval: 6s
60+
timeout: 2s
61+
retries: 3

.github/workflows/ci.yml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,17 @@ jobs:
132132
133133
az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING
134134
135+
- name: Start fake-gcs-server for Google Cloud Storage emulator tests
136+
run: |
137+
docker run -d --env-file .devcontainer/.env -p 4443:4443 tustvold/fake-gcs-server -scheme http -filesystem-root /tmp/gcs -public-host localhost:4443
138+
139+
while ! nc -z localhost 4443; do
140+
echo "Waiting for localhost:4443..."
141+
sleep 1
142+
done
143+
144+
curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "http://localhost:4443/storage/v1/b"
145+
135146
- name: Run tests
136147
run: |
137148
# Run tests with coverage tool

.vscode/settings.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,6 @@
55
"rust-analyzer.checkOnSave": true,
66
"editor.inlayHints.enabled": "offUnlessPressed",
77
"files.watcherExclude": {
8-
"**/target/**": true
9-
}
8+
"**/target/**": true
9+
}
1010
}

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ aws-config = { version = "1.5", default-features = false, features = ["rustls"]}
2727
aws-credential-types = {version = "1.2", default-features = false}
2828
futures = "0.3"
2929
home = "0.5"
30-
object_store = {version = "0.11", default-features = false, features = ["aws", "azure"]}
30+
object_store = {version = "0.11", default-features = false, features = ["aws", "azure", "gcp"]}
3131
once_cell = "1"
3232
parquet = {version = "53", default-features = false, features = [
3333
"arrow",

README.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,28 @@ Supported Azure Blob Storage uri formats are shown below:
213213
- azure:// \<container\> / \<path\>
214214
- https:// \<account\>.blob.core.windows.net / \<container\>
215215

216+
#### Google Cloud Storage
217+
218+
The simplest way to configure object storage is by creating a json config file like [`/tmp/gcs.json`]:
219+
220+
```bash
221+
$ cat /tmp/gcs.json
222+
{
223+
"gcs_base_url": "http://localhost:4443",
224+
"disable_oauth": true,
225+
"client_email": "",
226+
"private_key_id": "",
227+
"private_key": ""
228+
}
229+
```
230+
231+
Alternatively, you can use the following environment variables when starting postgres to configure the Google Cloud Storage client:
232+
- `GOOGLE_SERVICE_ACCOUNT_KEY`: json serialized service account key
233+
- `GOOGLE_SERVICE_ACCOUNT_PATH`: an alternative location for the config file
234+
235+
Supported Google Cloud Storage uri formats are shown below:
236+
- gs:// \<bucket\> / \<path\>
237+
216238
## Copy Options
217239
`pg_parquet` supports the following options in the `COPY TO` command:
218240
- `format parquet`: you need to specify this option to read or write Parquet files which does not end with `.parquet[.<compression>]` extension,

src/arrow_parquet/uri_utils.rs

Lines changed: 50 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ use ini::Ini;
1515
use object_store::{
1616
aws::{AmazonS3, AmazonS3Builder},
1717
azure::{AzureConfigKey, MicrosoftAzure, MicrosoftAzureBuilder},
18+
gcp::{GoogleCloudStorage, GoogleCloudStorageBuilder},
1819
local::LocalFileSystem,
1920
path::Path,
2021
ObjectStore, ObjectStoreScheme,
@@ -96,6 +97,17 @@ fn parse_s3_bucket(uri: &Url) -> Option<String> {
9697
None
9798
}
9899

100+
fn parse_gcs_bucket(uri: &Url) -> Option<String> {
101+
let host = uri.host_str()?;
102+
103+
// gs://{bucket}/key
104+
if uri.scheme() == "gs" {
105+
return Some(host.to_string());
106+
}
107+
108+
None
109+
}
110+
99111
fn object_store_with_location(uri: &Url, copy_from: bool) -> (Arc<dyn ObjectStore>, Path) {
100112
let (scheme, path) =
101113
ObjectStoreScheme::parse(uri).unwrap_or_else(|_| panic!("unsupported uri {}", uri));
@@ -121,6 +133,16 @@ fn object_store_with_location(uri: &Url, copy_from: bool) -> (Arc<dyn ObjectStor
121133

122134
(storage_container, path)
123135
}
136+
ObjectStoreScheme::GoogleCloudStorage => {
137+
let bucket_name = parse_gcs_bucket(uri).unwrap_or_else(|| {
138+
panic!("failed to parse bucket name from uri: {}", uri);
139+
});
140+
141+
let storage_container = PG_BACKEND_TOKIO_RUNTIME
142+
.block_on(async { Arc::new(get_gcs_object_store(&bucket_name).await) });
143+
144+
(storage_container, path)
145+
}
124146
ObjectStoreScheme::Local => {
125147
let uri = uri_as_string(uri);
126148

@@ -262,6 +284,25 @@ async fn get_azure_object_store(container_name: &str) -> MicrosoftAzure {
262284
azure_builder.build().unwrap_or_else(|e| panic!("{}", e))
263285
}
264286

287+
async fn get_gcs_object_store(bucket_name: &str) -> GoogleCloudStorage {
288+
let mut gcs_builder = GoogleCloudStorageBuilder::from_env().with_bucket_name(bucket_name);
289+
290+
if is_testing() {
291+
// use fake-gcp-server for testing
292+
gcs_builder = gcs_builder.with_service_account_key(
293+
"{
294+
\"gcs_base_url\": \"http://localhost:4443\",
295+
\"disable_oauth\": true,
296+
\"client_email\": \"\",
297+
\"private_key_id\": \"\",
298+
\"private_key\": \"\"
299+
}",
300+
);
301+
}
302+
303+
gcs_builder.build().unwrap_or_else(|e| panic!("{}", e))
304+
}
305+
265306
fn is_testing() -> bool {
266307
std::env::var("PG_PARQUET_TEST").is_ok()
267308
}
@@ -284,13 +325,20 @@ pub(crate) fn parse_uri(uri: &str) -> Url {
284325
} else if scheme == ObjectStoreScheme::MicrosoftAzure {
285326
parse_azure_blob_container(&uri).unwrap_or_else(|| {
286327
panic!(
287-
"failed to parse container name from azure blob storage uri {}",
328+
"failed to parse container name from Azure Blob Storage uri {}",
329+
uri
330+
)
331+
});
332+
} else if scheme == ObjectStoreScheme::GoogleCloudStorage {
333+
parse_gcs_bucket(&uri).unwrap_or_else(|| {
334+
panic!(
335+
"failed to parse bucket name from Google Cloud Storage uri {}",
288336
uri
289337
)
290338
});
291339
} else {
292340
panic!(
293-
"unsupported uri {}. Only Azure and S3 uris are supported.",
341+
"unsupported uri {}. Only Azure Blob Storage, S3 and Google Cloud Storage uris are supported.",
294342
uri
295343
);
296344
};

src/pgrx_tests/object_store.rs

Lines changed: 39 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -411,10 +411,47 @@ mod tests {
411411
}
412412

413413
#[pg_test]
414-
#[should_panic(expected = "unsupported uri gs://testbucket")]
414+
fn test_gcs_from_env() {
415+
let test_bucket_name: String =
416+
std::env::var("GOOGLE_TEST_BUCKET").expect("GOOGLE_TEST_BUCKET not found");
417+
418+
let gcs_uri = format!("gs://{}/pg_parquet_test.parquet", test_bucket_name);
419+
420+
let test_table = TestTable::<i32>::new("int4".into()).with_uri(gcs_uri);
421+
422+
test_table.insert("INSERT INTO test_expected (a) VALUES (1), (2), (null);");
423+
test_table.assert_expected_and_result_rows();
424+
}
425+
426+
#[pg_test]
427+
#[should_panic(expected = "404 Not Found")]
428+
fn test_gcs_write_wrong_bucket() {
429+
let s3_uri = "gs://randombucketwhichdoesnotexist/pg_parquet_test.parquet";
430+
431+
let copy_to_command = format!(
432+
"COPY (SELECT i FROM generate_series(1,10) i) TO '{}';",
433+
s3_uri
434+
);
435+
Spi::run(copy_to_command.as_str()).unwrap();
436+
}
437+
438+
#[pg_test]
439+
#[should_panic(expected = "404 Not Found")]
440+
fn test_gcs_read_wrong_bucket() {
441+
let gcs_uri = "gs://randombucketwhichdoesnotexist/pg_parquet_test.parquet";
442+
443+
let create_table_command = "CREATE TABLE test_table (a int);";
444+
Spi::run(create_table_command).unwrap();
445+
446+
let copy_from_command = format!("COPY test_table FROM '{}';", gcs_uri);
447+
Spi::run(copy_from_command.as_str()).unwrap();
448+
}
449+
450+
#[pg_test]
451+
#[should_panic(expected = "unsupported uri http://testbucket")]
415452
fn test_unsupported_uri() {
416453
let test_table =
417-
TestTable::<i32>::new("int4".into()).with_uri("gs://testbucket".to_string());
454+
TestTable::<i32>::new("int4".into()).with_uri("http://testbucket".to_string());
418455
test_table.insert("INSERT INTO test_expected (a) VALUES (1), (2), (null);");
419456
test_table.assert_expected_and_result_rows();
420457
}

0 commit comments

Comments
 (0)