Skip to content

Draft: build(deps): bump paimon version to 1.1.1 #7193

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion catalogs/catalog-lakehouse-paimon/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ plugins {
}

val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString()
val sparkVersion: String = libs.versions.spark34.get()
val sparkVersion: String = libs.versions.spark35.get()
val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
val paimonVersion: String = libs.versions.paimon.get()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public List<String> listDatabases() {
}

public Map<String, String> loadDatabase(String databaseName) throws DatabaseNotExistException {
return catalog.loadDatabaseProperties(databaseName);
return catalog.getDatabase(databaseName).options();
}

public void createDatabase(String databaseName, Map<String, String> properties)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ public void stop() {
}

@AfterEach
private void resetSchema() {
public void resetSchema() {
clearTableAndSchema();
createSchema();
}
Expand Down Expand Up @@ -192,7 +192,7 @@ void testPaimonSchemaOperations() throws DatabaseNotExistException {
Assertions.assertThrows(
DatabaseNotExistException.class,
() -> {
paimonCatalog.loadDatabaseProperties(schemaIdent.name());
paimonCatalog.getDatabase(schemaIdent.name());
});

schemaNames = new HashSet<>(Arrays.asList(schemas.listSchemas()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,6 @@ void testPaimonSchemaProperties() throws Catalog.DatabaseNotExistException {
// load schema check, database properties is empty for Paimon FilesystemCatalog.
Schema schema = schemas.loadSchema(schemaIdent.name());
Assertions.assertTrue(schema.properties().isEmpty());
Assertions.assertTrue(paimonCatalog.loadDatabaseProperties(schemaIdent.name()).isEmpty());
Assertions.assertTrue(paimonCatalog.getDatabase(schemaIdent.name()).options().isEmpty());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ void testPaimonSchemaProperties() throws Catalog.DatabaseNotExistException {
// load schema check.
Schema schema = schemas.loadSchema(schemaIdent.name());
Assertions.assertEquals(schema.properties().get("key"), "hive");
Map<String, String> loadedProps = paimonCatalog.loadDatabaseProperties(schemaIdent.name());
Map<String, String> loadedProps = paimonCatalog.getDatabase(schemaIdent.name()).options();
Assertions.assertEquals(loadedProps.get("key"), "hive");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import static org.apache.gravitino.rel.TableChange.updateColumnType;
import static org.apache.gravitino.rel.TableChange.updateComment;
import static org.apache.paimon.CoreOptions.BUCKET;
import static org.apache.paimon.CoreOptions.BUCKET_KEY;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
Expand Down Expand Up @@ -92,7 +93,8 @@ public class TestPaimonCatalogOps {
private static final String TABLE = "test_table_ops_table";
private static final String COMMENT = "table_ops_table_comment";
private static final NameIdentifier IDENTIFIER = NameIdentifier.of(Namespace.of(DATABASE), TABLE);
private static final Map<String, String> OPTIONS = ImmutableMap.of(BUCKET.key(), "10");
private static final Map<String, String> OPTIONS =
ImmutableMap.of(BUCKET.key(), "10", BUCKET_KEY.key(), "col_1");

@BeforeEach
public void setUp() throws Exception {
Expand Down Expand Up @@ -218,7 +220,7 @@ void testUpdateColumnPosition() throws Exception {
IllegalArgumentException.class, () -> assertUpdateColumnPosition(5, defaultPos()));
// Test NullPointerException with UpdateColumnPosition for after non-existent column.
assertThrowsExactly(
NullPointerException.class, () -> assertUpdateColumnPosition(1, after("col_5")));
IllegalArgumentException.class, () -> assertUpdateColumnPosition(1, after("col_5")));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.paimon.factories.FactoryException;
import org.apache.paimon.hive.HiveCatalog;
import org.apache.paimon.jdbc.JdbcCatalog;
import org.apache.paimon.options.CatalogOptions;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -65,6 +66,8 @@ private void assertCatalog(String metastore, Consumer<Catalog> consumer) throws
loadCatalogBackend(
new PaimonConfig(
ImmutableMap.of(
CatalogOptions.CACHE_ENABLED.key(),
"false",
PaimonConfig.CATALOG_BACKEND.getKey(),
metastore,
PaimonConfig.CATALOG_WAREHOUSE.getKey(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import static org.junit.jupiter.api.Assertions.assertThrowsExactly;
import static org.junit.jupiter.api.Assertions.assertTrue;

import com.google.common.base.Joiner;
import java.util.Arrays;
import java.util.function.Consumer;
import org.apache.commons.lang3.tuple.Pair;
Expand Down Expand Up @@ -87,7 +88,7 @@ void testAddColumnFirst() {
AddColumn.class,
schemaChange -> {
AddColumn addColumn = (AddColumn) schemaChange;
assertEquals("col_1", addColumn.fieldName());
assertEquals("col_1", filedPath(addColumn.fieldNames()));
assertEquals(DataTypeRoot.INTEGER, addColumn.dataType().getTypeRoot());
assertEquals(AddColumn.class.getSimpleName(), addColumn.description());
assertNotNull(addColumn.move());
Expand All @@ -111,7 +112,7 @@ void testAddColumnAfter() {
AddColumn.class,
schemaChange -> {
AddColumn addColumn = (AddColumn) schemaChange;
assertEquals("col_2", addColumn.fieldName());
assertEquals("col_2", filedPath(addColumn.fieldNames()));
assertEquals(DataTypeRoot.FLOAT, addColumn.dataType().getTypeRoot());
assertEquals(AddColumn.class.getSimpleName(), addColumn.description());
assertNotNull(addColumn.move());
Expand All @@ -135,7 +136,7 @@ void testAddColumnDefaultPosition() {
AddColumn.class,
schemaChange -> {
AddColumn addColumn = (AddColumn) schemaChange;
assertEquals("col_3", addColumn.fieldName());
assertEquals("col_3", filedPath(addColumn.fieldNames()));
assertEquals(DataTypeRoot.ARRAY, addColumn.dataType().getTypeRoot());
assertEquals(AddColumn.class.getSimpleName(), addColumn.description());
assertNull(addColumn.move());
Expand All @@ -156,7 +157,7 @@ void testAddColumnWitNullPosition() {
AddColumn.class,
schemaChange -> {
AddColumn addColumn = (AddColumn) schemaChange;
assertEquals("col_4", addColumn.fieldName());
assertEquals("col_4", filedPath(addColumn.fieldNames()));
assertEquals(DataTypeRoot.MAP, addColumn.dataType().getTypeRoot());
assertEquals(AddColumn.class.getSimpleName(), addColumn.description());
assertNull(addColumn.move());
Expand Down Expand Up @@ -196,7 +197,7 @@ void testUpdateColumnType() {
UpdateColumnType.class,
schemaChange -> {
UpdateColumnType updateColumnType = (UpdateColumnType) schemaChange;
assertEquals("col_4", updateColumnType.fieldName());
assertEquals("col_4", filedPath(updateColumnType.fieldNames()));
assertEquals(DataTypeRoot.DOUBLE, updateColumnType.newDataType().getTypeRoot());
});
}
Expand All @@ -208,7 +209,7 @@ void testRenameColumn() {
RenameColumn.class,
schemaChange -> {
RenameColumn renameColumn = (RenameColumn) schemaChange;
assertEquals("col_1", renameColumn.fieldName());
assertEquals("col_1", filedPath(renameColumn.fieldNames()));
assertEquals("col_5", renameColumn.newName());
});
}
Expand All @@ -220,7 +221,7 @@ void testDeleteColumn() {
DropColumn.class,
schemaChange -> {
DropColumn dropColumn = (DropColumn) schemaChange;
assertEquals("col_2", dropColumn.fieldName());
assertEquals("col_2", filedPath(dropColumn.fieldNames()));
});
}

Expand Down Expand Up @@ -330,4 +331,8 @@ private void assertIllegalTableChange(Pair<TableChange, String> tableChange) {
IllegalArgumentException.class, () -> buildSchemaChange(tableChange.getKey()));
assertEquals(tableChange.getValue(), exception.getMessage());
}

private static String filedPath(String[] fieldNames) {
return Joiner.on(".").join(fieldNames);
}
}
2 changes: 1 addition & 1 deletion gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ commons-dbcp2 = "2.11.0"
caffeine = "2.9.3"
iceberg = '1.6.1' # used for Gravitino Iceberg catalog and Iceberg REST service
iceberg4spark = "1.4.1" # used for compile spark connector
paimon = '0.8.0'
paimon = '1.1.1'
spark33 = "3.3.4"
spark34 = "3.4.3"
spark35 = "3.5.1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.gravitino.spark.connector.plugin.GravitinoSparkPlugin;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
Expand Down Expand Up @@ -203,7 +204,8 @@ private void initSparkEnv() {
.set(GravitinoSparkConfig.GRAVITINO_ENABLE_ICEBERG_SUPPORT, "true")
.set("hive.exec.dynamic.partition.mode", "nonstrict")
.set("spark.sql.warehouse.dir", warehouse)
.set("spark.sql.session.timeZone", TIME_ZONE_UTC);
.set("spark.sql.session.timeZone", TIME_ZONE_UTC)
.set("spark.sql.extensions", PaimonSparkSessionExtensions.class.getName());
sparkSession =
SparkSession.builder()
.master("local[1]")
Expand Down
Loading