Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add create/drop dynamic catalog tests for lake connectors #24658

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,29 @@ public void testDropSchemaExternalFiles()

protected abstract String bucketUrl();

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = """
CREATE CATALOG %1$s USING delta_lake
WITH (
"hive.metastore" = 'thrift',
"hive.metastore.uri" = '%2$s'
)""".formatted(catalog, hiveHadoop.getHiveMetastoreEndpoint().toString());
assertUpdate(createCatalogSql);
assertCatalogs("system", "delta", "hive", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "delta", "hive", "tpch");
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs("system", "delta", "hive", "tpch", catalog);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it going to be useful to issue a query since that would show any issues with "leftover state"? Or not really and it gets into resiliency/scale testing territory?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ya it gets into resiliency, but we can maybe do a show schemas or something?


assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "delta", "hive", "tpch");
}

@Test
public void testCreateTableInNonexistentSchemaFails()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import io.trino.plugin.deltalake.DeltaLakeQueryRunner;
import io.trino.plugin.hive.BaseS3AndGlueMetastoreTest;
import io.trino.testing.QueryRunner;
import org.junit.jupiter.api.Test;

import java.net.URI;
import java.util.Set;
Expand All @@ -26,6 +27,7 @@
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.plugin.hive.metastore.glue.TestingGlueHiveMetastore.createTestingGlueHiveMetastore;
import static io.trino.testing.SystemEnvironmentUtils.requireEnv;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static org.assertj.core.api.Assertions.assertThat;

public class TestDeltaS3AndGlueMetastoreTest
Expand Down Expand Up @@ -106,4 +108,27 @@ private String getExtendedStatisticsFileFromTableDirectory(String tableLocation)
.filter(path -> path.contains("/_trino_meta"))
.collect(Collectors.toUnmodifiableSet()));
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = """
CREATE CATALOG %1$s USING delta_lake
WITH (
"hive.metastore" = 'glue',
"hive.metastore.glue.default-warehouse-dir" = '%2$s'
)""".formatted(catalog, schemaPath());
assertUpdate(createCatalogSql);
assertCatalogs("system", "delta", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "delta", "tpch");
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs("system", "delta", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "delta", "tpch");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,37 @@ protected void verifySelectAfterInsertFailurePermissible(Throwable e)
.containsPattern("io.trino.spi.TrinoException: Cannot read from a table tpch.test_insert_select_\\w+ that was modified within transaction, you need to commit the transaction first");
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = "CREATE CATALOG %s USING hive".formatted(catalog);
assertUpdate(createCatalogSql);
assertCatalogs(availableCatalogs(Optional.of(catalog)));

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs(availableCatalogs(Optional.empty()));
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs(availableCatalogs(Optional.of(catalog)));

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs(availableCatalogs(Optional.empty()));
}

private static String[] availableCatalogs(Optional<String> catalog)
{
ImmutableList.Builder<String> catalogs = ImmutableList.builder();
catalogs.add("system")
.add("hive")
.add("hive_bucketed")
.add("hive_timestamp_nanos")
.add("mock_dynamic_listing")
.add("tpch");
catalog.ifPresent(catalogs::add);
return catalogs.build().toArray(new String[0]);
}

@Test
@Override
public void testDelete()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -442,4 +442,22 @@ public void testCreateFunction()
assertUpdate("DROP FUNCTION " + name2 + "(varchar)");
assertQueryFails("DROP FUNCTION " + name2 + "(varchar)", "line 1:1: Function not found");
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = "CREATE CATALOG %s USING hive".formatted(catalog);
assertUpdate(createCatalogSql);
assertCatalogs("system", "hive", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "hive", "tpch");
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs("system", "hive", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "hive", "tpch");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.junit.jupiter.api.Test;

import static io.trino.plugin.hudi.testing.HudiTestUtils.COLUMNS_TO_HIDE;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static org.assertj.core.api.Assertions.assertThat;

public class TestHudiConnectorTest
Expand Down Expand Up @@ -87,4 +88,22 @@ public void testHideHiveSysSchema()
assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).doesNotContain("sys");
assertQueryFails("SHOW TABLES IN hudi.sys", ".*Schema 'sys' does not exist");
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = "CREATE CATALOG %1$s USING hudi".formatted(catalog);
assertUpdate(createCatalogSql);
assertCatalogs("system", "hudi", "mock_dynamic_listing", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "hudi", "mock_dynamic_listing", "tpch");
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs("system", "hudi", "mock_dynamic_listing", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "hudi", "mock_dynamic_listing", "tpch");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,34 @@ public void testShowCreateTable()
"\\)");
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = "CREATE CATALOG %s USING iceberg".formatted(catalog);
assertUpdate(createCatalogSql);
assertCatalogs(availableCatalogs(Optional.of(catalog)));

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs(availableCatalogs(Optional.empty()));
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs(availableCatalogs(Optional.of(catalog)));

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs(availableCatalogs(Optional.empty()));
}

protected String[] availableCatalogs(Optional<String> catalog)
{
ImmutableList.Builder<String> catalogs = ImmutableList.builder();
catalogs.add("system")
.add("iceberg")
.add("tpch");
catalog.ifPresent(catalogs::add);
return catalogs.build().toArray(new String[0]);
}

@Test
public void testHiddenPathColumn()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,4 +149,22 @@ private void testAnalyzeWithProvidedTableLocation(boolean partitioned, LocationP
assertQuery("SHOW STATS FOR " + tableName, expectedStatistics);
}
}

@Test
void testCreateDropDynamicCatalog()
{
String catalog = "new_catalog_" + randomNameSuffix();
String createCatalogSql = "CREATE CATALOG %s USING iceberg".formatted(catalog);
assertUpdate(createCatalogSql);
assertCatalogs("system", "iceberg", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "iceberg", "tpch");
// re-add the same catalog
assertUpdate(createCatalogSql);
assertCatalogs("system", "iceberg", "tpch", catalog);

assertUpdate("DROP CATALOG " + catalog);
assertCatalogs("system", "iceberg", "tpch");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/
package io.trino.plugin.iceberg.catalog.rest;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.http.server.testing.TestingHttpServer;
import io.trino.filesystem.Location;
Expand Down Expand Up @@ -297,6 +298,18 @@ protected boolean locationExists(String location)
return java.nio.file.Files.exists(Path.of(location));
}

@Override
protected String[] availableCatalogs(Optional<String> catalog)
{
ImmutableList.Builder<String> catalogs = ImmutableList.builder();
catalogs.add("system")
.add("iceberg")
.add("nested_namespace_disabled")
.add("tpch");
catalog.ifPresent(catalogs::add);
return catalogs.build().toArray(new String[0]);
}

private TableIdentifier toIdentifier(String tableName)
{
return TableIdentifier.of(getSession().getSchema().orElseThrow(), tableName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,17 @@ protected void assertNoDataRead(@Language("SQL") String sql)
results -> assertThat(results.getRowCount()).isEqualTo(0));
}

protected void assertCatalogs(String... catalogs)
{
List<MaterializedRow> showCatalogs = computeActual("SHOW CATALOGS").getMaterializedRows();
assertThat(showCatalogs)
.extracting(row -> {
assertThat(row.getFieldCount()).isEqualTo(1);
return row.getField(0);
})
.containsExactlyInAnyOrder(catalogs);
}

protected MaterializedResult computeExpected(@Language("SQL") String sql, List<? extends Type> resultTypes)
{
return h2QueryRunner.execute(getSession(), sql, resultTypes);
Expand Down
Loading