Skip to content

Commit 318a98f

Browse files
Merge branch 'main' into feature/94-comply-with-latest-delta-sharing-protocol
2 parents db96c66 + 37edf36 commit 318a98f

19 files changed

+340
-612
lines changed

.github/workflows/build_doc.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
runs-on: ubuntu-latest
99
steps:
1010
- uses: actions/checkout@v4
11-
- uses: actions/setup-java@v3
11+
- uses: actions/setup-java@v4
1212
with:
1313
cache: 'gradle'
1414
distribution: temurin
@@ -18,13 +18,13 @@ jobs:
1818
- name: Execute npm run build
1919
run: |
2020
cp -r protocol docsite/static/protocol
21-
./gradlew docsite:npm_run_build --no-daemon
21+
./gradlew docsite:npm_install docsite:npm_run_build --no-daemon
2222
chmod -c -R +rX "docsite/build" | while read line; do
2323
echo "::warning title=Invalid file permissions automatically fixed::$line"
2424
done
2525
shell: bash
2626
- name: Upload gh-pages artifact
27-
uses: actions/upload-pages-artifact@v2
27+
uses: actions/upload-pages-artifact@v3
2828
with:
2929
path: docsite/build
3030
deploy:
@@ -43,5 +43,5 @@ jobs:
4343
steps:
4444
- name: Deploy to GitHub Pages
4545
id: deployment
46-
uses: actions/deploy-pages@v2
46+
uses: actions/deploy-pages@v4
4747

.github/workflows/compile.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ jobs:
1616
with:
1717
fetch-depth: 0
1818
ref: ${{ github.event.pull_request.head.sha }}
19-
- uses: actions/setup-java@v3
19+
- uses: actions/setup-java@v4
2020
with:
2121
cache: 'gradle'
2222
distribution: temurin

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,8 @@ build/
1212
.gradle
1313
.env
1414
client/bin
15-
server/bin
15+
server/bin
16+
.terraform
17+
.terraform.*
18+
*.tfstate
19+
*.tfstate.backup

build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
group = "io.whitefox"
22
plugins {
3-
id("co.uzzu.dotenv.gradle").version("3.0.0")
3+
id("co.uzzu.dotenv.gradle").version("4.0.0")
44
}

buildSrc/build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,6 @@ repositories {
88

99
dependencies {
1010
implementation("org.openapi.generator:org.openapi.generator.gradle.plugin:6.6.0")
11-
implementation("com.diffplug.spotless:spotless-plugin-gradle:6.23.1")
11+
implementation("com.diffplug.spotless:spotless-plugin-gradle:6.23.3")
1212
implementation("com.palantir.gradle.gitversion:gradle-git-version:3.0.0")
1313
}

client-spark/build.gradle.kts

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,14 @@ dependencies {
1919

2020
// DELTA
2121
testImplementation("org.apache.hadoop:hadoop-common:3.3.6")
22-
testImplementation("io.delta:delta-sharing-spark_2.12:1.0.2")
22+
testImplementation("io.delta:delta-sharing-spark_2.13:1.0.3")
2323

2424
//SPARK
25-
testImplementation("org.apache.spark:spark-core_2.12:3.3.2")
26-
testImplementation("org.apache.spark:spark-sql_2.12:3.3.2")
27-
testImplementation("com.github.mrpowers:spark-fast-tests_2.12:1.3.0")
25+
testImplementation("org.apache.spark:spark-sql_2.13:3.5.0")
26+
testImplementation("com.github.mrpowers:spark-fast-tests_2.13:1.3.0")
2827

2928
//JUNIT
30-
testImplementation("org.junit.jupiter:junit-jupiter:5.8.1")
29+
testImplementation("org.junit.jupiter:junit-jupiter:5.10.1")
3130
}
3231

3332

client-spark/src/test/java/io/whitefox/api/client/ITDeltaSharingClient.java

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,34 +4,31 @@
44

55
import com.github.mrpowers.spark.fast.tests.DatasetComparer;
66
import io.whitefox.api.client.model.CreateMetastore;
7-
import io.whitefox.api.client.model.Metastore;
7+
import io.whitefox.api.client.model.Provider;
88
import io.whitefox.api.models.MrFoxDeltaTableSchema;
9-
import io.whitefox.api.utils.SparkUtil;
9+
import io.whitefox.api.utils.ScalaUtils;
1010
import io.whitefox.api.utils.StorageManagerInitializer;
1111
import io.whitefox.api.utils.TablePath;
12+
import io.whitefox.api.utils.TestSparkSession;
1213
import java.util.List;
13-
import org.apache.spark.sql.SparkSession;
1414
import org.apache.spark.sql.types.DataType;
1515
import org.apache.spark.sql.types.Metadata;
1616
import org.apache.spark.sql.types.StructField;
1717
import org.apache.spark.sql.types.StructType;
1818
import org.junit.jupiter.api.BeforeAll;
1919
import org.junit.jupiter.api.Tag;
2020
import org.junit.jupiter.api.Test;
21-
import scala.collection.GenMap;
2221

2322
@Tag("clientSparkTest")
24-
public class ITDeltaSharingClient implements DatasetComparer, SparkUtil {
23+
public class ITDeltaSharingClient implements DatasetComparer, ScalaUtils {
2524

2625
private final StorageManagerInitializer storageManagerInitializer;
2726
private final String deltaTablePath;
28-
private final SparkSession spark;
2927

3028
public ITDeltaSharingClient() {
3129
this.storageManagerInitializer = new StorageManagerInitializer();
3230
this.deltaTablePath =
3331
TablePath.getDeltaTablePath(getClass().getClassLoader().getResource("MrFoxProfile.json"));
34-
this.spark = newSparkSession();
3532
}
3633

3734
@BeforeAll
@@ -41,10 +38,11 @@ static void initStorageManager() {
4138

4239
@Test
4340
void showS3Table1withQueryTableApi() {
41+
var spark = TestSparkSession.newSparkSession();
4442
storageManagerInitializer.createS3DeltaTable();
4543
var ds = spark.read().format("deltaSharing").load(deltaTablePath);
4644
var expectedSchema = new StructType(new StructField[] {
47-
new StructField("id", DataType.fromDDL("long"), true, new Metadata(GenMap.empty()))
45+
new StructField("id", DataType.fromDDL("long"), true, new Metadata(emptyScalaMap()))
4846
});
4947
var expectedData = spark
5048
.createDataFrame(
@@ -57,15 +55,16 @@ void showS3Table1withQueryTableApi() {
5755
MrFoxDeltaTableSchema.class)
5856
.toDF();
5957

60-
assertEquals(expectedSchema.json(), ds.schema().json());
58+
assertEquals(expectedSchema, ds.schema());
6159
assertEquals(5, ds.count());
6260
assertSmallDatasetEquality(ds, expectedData, true, false, false, 500);
6361
}
6462

6563
@Test
66-
void createGlueMetastore() {
67-
Metastore metastore = storageManagerInitializer.createGlueMetastore();
68-
assertEquals(metastore.getName(), "MrFoxMetastore");
69-
assertEquals(metastore.getType(), CreateMetastore.TypeEnum.GLUE.getValue());
64+
void createProviderWithGlueMetastore() {
65+
Provider provider = storageManagerInitializer.createProviderWithGlueMetastore();
66+
assertEquals(provider.getStorage().getName(), "MrFoxStorage");
67+
assertEquals(provider.getMetastore().getName(), "MrFoxMetastore");
68+
assertEquals(provider.getMetastore().getType(), CreateMetastore.TypeEnum.GLUE.getValue());
7069
}
7170
}
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
package io.whitefox.api.utils;
2+
3+
public interface ScalaUtils {
4+
default <K, V> scala.collection.immutable.Map<K, V> emptyScalaMap() {
5+
return scala.collection.immutable.Map$.MODULE$.empty();
6+
}
7+
}

client-spark/src/test/java/io/whitefox/api/utils/SparkUtil.java

Lines changed: 0 additions & 14 deletions
This file was deleted.

client-spark/src/test/java/io/whitefox/api/utils/StorageManagerInitializer.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,15 @@ public void createS3DeltaTable() {
4949
addTableToSchemaRequest(providerRequest.getName(), createTableRequest.getName())));
5050
}
5151

52-
public Metastore createGlueMetastore() {
52+
public Provider createProviderWithGlueMetastore() {
5353
var metastoreRequest = createMetastoreRequest(s3TestConfig, CreateMetastore.TypeEnum.GLUE);
54-
return ApiUtils.recoverConflictLazy(
54+
var metastore = ApiUtils.recoverConflictLazy(
5555
() -> metastoreV1Api.createMetastore(metastoreRequest),
5656
() -> metastoreV1Api.describeMetastore(metastoreRequest.getName()));
57+
var providerRequest = addProviderRequest(Optional.of(metastore.getName()), TableFormat.iceberg);
58+
return ApiUtils.recoverConflictLazy(
59+
() -> providerV1Api.addProvider(providerRequest),
60+
() -> providerV1Api.getProvider(providerRequest.getName()));
5761
}
5862

5963
private String createSchemaRequest(TableFormat tableFormat) {
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
package io.whitefox.api.utils;
2+
3+
import org.apache.spark.sql.SparkSession;
4+
5+
public class TestSparkSession {
6+
7+
private static final class SparkHolder {
8+
private static final SparkSession spark = SparkSession.builder()
9+
.appName("delta sharing client test")
10+
.config("spark.driver.host", "localhost")
11+
.master("local[1, 4]")
12+
.getOrCreate();
13+
}
14+
15+
public static SparkSession newSparkSession() {
16+
return SparkHolder.spark.newSession();
17+
}
18+
}

docsite/docs/development_guidelines.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ not run properly on newer versions of the JVM.
2121

2222
As soon as you clone the project you should verify that you are able to build and test locally, to do so you need to
2323
run the `check` command of Gradle, you can achieve that using either `gradlew` script in the project root (`.
24-
/gradlew check`) or run the same [gradle task from intellij](https://www.jetbrains.com/help/idea/work-with-gradle-tasks.
25-
html). If you're default jvm is not version 11, you can run `gradlew` passing another java home as follows:
24+
/gradlew check`) or run the same [gradle task from intellij](https://www.jetbrains.com/help/idea/work-with-gradle-tasks.html).
25+
If you're default jvm is not version 11, you can run `gradlew` passing another java home as follows:
2626
`./gradlew -Dorg.gradle.java.home=<PATH_TO_JAVA_HOME> build`.
2727

2828
Sometimes IntelliJ will tell you have build errors, especially when moving from one branch to the other. The problem

0 commit comments

Comments
 (0)