Skip to content

Commit fdb69ae

Browse files
committed
Support Google Cloud Storage
1 parent 6014e11 commit fdb69ae

File tree

14 files changed

+254
-24
lines changed

14 files changed

+254
-24
lines changed

.devcontainer/.env

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,5 +18,10 @@ AZURE_TEST_CONTAINER_NAME=testcontainer
1818
AZURE_TEST_READ_ONLY_SAS="se=2100-05-05&sp=r&sv=2022-11-02&sr=c&sig=YMPFnAHKe9y0o3hFegncbwQTXtAyvsJEgPB2Ne1b9CQ%3D"
1919
AZURE_TEST_READ_WRITE_SAS="se=2100-05-05&sp=rcw&sv=2022-11-02&sr=c&sig=TPz2jEz0t9L651t6rTCQr%2BOjmJHkM76tnCGdcyttnlA%3D"
2020

21+
# GCS tests
22+
GOOGLE_TEST_BUCKET=testbucket
23+
GOOGLE_SERVICE_ACCOUNT_KEY='{"gcs_base_url": "http://localhost:4443","disable_oauth": true,"client_email": "","private_key_id": "","private_key": ""}'
24+
GOOGLE_SERVICE_ENDPOINT=http://localhost:4443
25+
2126
# Others
2227
RUST_TEST_THREADS=1

.devcontainer/docker-compose.yml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ services:
1212
- ${USERPROFILE}${HOME}/.gitconfig:/home/rust/.gitconfig:ro
1313
- ${USERPROFILE}${HOME}/.aws:/home/rust/.aws:rw
1414
- ${USERPROFILE}${HOME}/.azure:/home/rust/.azure:rw
15+
- ${USERPROFILE}${HOME}/.config/gcloud:/home/rust/.config/gcloud:rw
1516
- ./entrypoint.sh:/entrypoint.sh
1617
env_file:
1718
- .env
@@ -20,6 +21,7 @@ services:
2021
depends_on:
2122
- minio
2223
- azurite
24+
- fake-gcs-server
2325

2426
minio:
2527
image: minio/minio
@@ -47,3 +49,16 @@ services:
4749
interval: 6s
4850
timeout: 2s
4951
retries: 3
52+
53+
fake-gcs-server:
54+
image: tustvold/fake-gcs-server
55+
env_file:
56+
- .env
57+
network_mode: host
58+
command: -scheme http -public-host localhost:4443
59+
restart: unless-stopped
60+
healthcheck:
61+
test: ["CMD", "nc", "-z", "localhost", "4443"]
62+
interval: 6s
63+
timeout: 2s
64+
retries: 3

.devcontainer/entrypoint.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,8 @@ trap "echo 'Caught termination signal. Exiting...'; exit 0" SIGINT SIGTERM
66
az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING
77
az storage container create -n ${AZURE_TEST_CONTAINER_NAME}2 --connection-string $AZURE_STORAGE_CONNECTION_STRING
88

9+
# create fake-gcs bucket
10+
curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b"
11+
curl -v -X POST --data-binary "{\"name\":\"${GOOGLE_TEST_BUCKET}2\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b"
12+
913
sleep infinity

.github/workflows/ci.yml

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,22 @@ jobs:
140140
az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING
141141
az storage container create -n ${AZURE_TEST_CONTAINER_NAME}2 --connection-string $AZURE_STORAGE_CONNECTION_STRING
142142
143+
- name: Start fake-gcs-server for Google Cloud Storage emulator tests
144+
run: |
145+
docker run -d \
146+
--env-file .devcontainer/.env \
147+
-p 4443:4443 \
148+
tustvold/fake-gcs-server -scheme http -public-host localhost:4443
149+
150+
while ! curl $GOOGLE_SERVICE_ENDPOINT; do
151+
echo "Waiting for $GOOGLE_SERVICE_ENDPOINT..."
152+
sleep 1
153+
done
154+
155+
# create bucket
156+
curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b"
157+
curl -v -X POST --data-binary "{\"name\":\"${GOOGLE_TEST_BUCKET}2\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b"
158+
143159
- name: Run tests
144160
run: |
145161
# Run tests with coverage tool

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ aws-credential-types = {version = "1", default-features = false}
2828
azure_storage = {version = "0.21", default-features = false}
2929
futures = "0.3"
3030
home = "0.5"
31-
object_store = {version = "0.11", default-features = false, features = ["aws", "azure"]}
31+
object_store = {version = "0.11", default-features = false, features = ["aws", "azure", "gcp"]}
3232
once_cell = "1"
3333
parquet = {version = "54", default-features = false, features = [
3434
"arrow",

README.md

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ SELECT uri, encode(key, 'escape') as key, encode(value, 'escape') as value FROM
156156
```
157157

158158
## Object Store Support
159-
`pg_parquet` supports reading and writing Parquet files from/to `S3` and `Azure Blob Storage` object stores.
159+
`pg_parquet` supports reading and writing Parquet files from/to `S3`, `Azure Blob Storage` and `Google Cloud Service` object stores.
160160

161161
> [!NOTE]
162162
> To be able to write into a object store location, you need to grant `parquet_object_store_write` role to your current postgres user.
@@ -239,6 +239,28 @@ Supported authorization methods' priority order is shown below:
239239
2. Sas token,
240240
3. Storage key.
241241

242+
#### Google Cloud Storage
243+
244+
The simplest way to configure object storage is by creating a json config file like [`/tmp/gcs.json`]:
245+
246+
```bash
247+
$ cat /tmp/gcs.json
248+
{
249+
"gcs_base_url": "http://localhost:4443",
250+
"disable_oauth": true,
251+
"client_email": "",
252+
"private_key_id": "",
253+
"private_key": ""
254+
}
255+
```
256+
257+
Alternatively, you can use the following environment variables when starting postgres to configure the Google Cloud Storage client:
258+
- `GOOGLE_SERVICE_ACCOUNT_KEY`: json serialized service account key **(only via environment variables)**
259+
- `GOOGLE_SERVICE_ACCOUNT_PATH`: an alternative location for the config file **(only via environment variables)**
260+
261+
Supported Google Cloud Storage uri formats are shown below:
262+
- gs:// \<bucket\> / \<path\>
263+
242264
## Copy Options
243265
`pg_parquet` supports the following options in the `COPY TO` command:
244266
- `format parquet`: you need to specify this option to read or write Parquet files which does not end with `.parquet[.<compression>]` extension,

src/arrow_parquet/uri_utils.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use url::Url;
2020
use crate::{
2121
arrow_parquet::parquet_writer::DEFAULT_ROW_GROUP_SIZE,
2222
object_store::{
23-
aws::parse_s3_bucket, azure::parse_azure_blob_container,
23+
aws::parse_s3_bucket, azure::parse_azure_blob_container, gcs::parse_gcs_bucket,
2424
object_store_cache::get_or_create_object_store,
2525
},
2626
PG_BACKEND_TOKIO_RUNTIME,
@@ -65,6 +65,9 @@ impl ParsedUriInfo {
6565
ObjectStoreScheme::MicrosoftAzure => parse_azure_blob_container(uri)
6666
.ok_or(format!("unsupported azure blob storage uri: {uri}"))
6767
.map(Some),
68+
ObjectStoreScheme::GoogleCloudStorage => parse_gcs_bucket(uri)
69+
.ok_or(format!("unsupported gcs uri {uri}"))
70+
.map(Some),
6871
ObjectStoreScheme::Local => Ok(None),
6972
_ => Err(format!("unsupported scheme {} in uri {}. pg_parquet supports local paths, s3:// or azure:// schemes.",
7073
uri.scheme(), uri))

src/object_store.rs

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,5 @@
1-
use crate::{
2-
arrow_parquet::uri_utils::uri_as_string,
3-
object_store::{
4-
aws::create_s3_object_store, azure::create_azure_object_store,
5-
local_file::create_local_file_object_store,
6-
},
7-
PG_BACKEND_TOKIO_RUNTIME,
8-
};
9-
101
pub(crate) mod aws;
112
pub(crate) mod azure;
3+
pub(crate) mod gcs;
124
pub(crate) mod local_file;
135
pub(crate) mod object_store_cache;

src/object_store/aws.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@ use aws_credential_types::provider::ProvideCredentials;
55
use object_store::aws::AmazonS3Builder;
66
use url::Url;
77

8-
use super::{object_store_cache::ObjectStoreWithExpiration, PG_BACKEND_TOKIO_RUNTIME};
8+
use crate::PG_BACKEND_TOKIO_RUNTIME;
9+
10+
use super::object_store_cache::ObjectStoreWithExpiration;
911

1012
// create_s3_object_store creates an AmazonS3 object store with the given bucket name.
1113
// It is configured by environment variables and aws config files as fallback method.

0 commit comments

Comments
 (0)