Skip to content

Commit

Permalink
Revert "[hive] Fix no longer possible to drop the hms table issue whe…
Browse files Browse the repository at this point in the history
…n table not in fs (apache#4853)"

This reverts commit 6690253.
  • Loading branch information
xiaohongbo committed Jan 8, 2025
1 parent 6690253 commit c8b5e1f
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 62 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -272,14 +272,10 @@ public void dropTable(Identifier identifier, boolean ignoreIfNotExists)
try {
getTable(identifier);
} catch (TableNotExistException e) {
if (!tableExistsInFileSystem(getTableLocation(identifier), DEFAULT_MAIN_BRANCH)) {
dropTableImpl(identifier);
}
if (ignoreIfNotExists) {
return;
} else {
throw new TableNotExistException(identifier);
}
throw new TableNotExistException(identifier);
}

dropTableImpl(identifier);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,6 @@
import org.junit.jupiter.api.io.TempDir;

import java.lang.reflect.Field;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand All @@ -61,7 +59,6 @@
import static org.apache.paimon.hive.HiveCatalog.TABLE_TYPE_PROP;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertThrows;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;

Expand Down Expand Up @@ -410,37 +407,6 @@ public void testListTables() throws Exception {
catalog.dropDatabase(databaseName, true, true);
}

@Test
public void testDropTable() throws Exception {
String databaseName = "drop_table_test_db";
String tableName = "drop_table_test_table";
catalog.dropDatabase(databaseName, true, true);
catalog.createDatabase(databaseName, true);
Identifier identifier = Identifier.create(databaseName, tableName);

// test ignore if exists
catalog.createTable(
identifier, Schema.newBuilder().column("col", DataTypes.INT()).build(), true);
Path path = Paths.get(catalog.warehouse(), databaseName.concat(".db"), tableName);
catalog.fileIO().delete(new org.apache.paimon.fs.Path(path.toString()), true);
List<String> tables = catalog.listTables(databaseName);
assertEquals(1, tables.size());
catalog.dropTable(identifier, true);
List<String> newTables = catalog.listTables(databaseName);
assertEquals(0, newTables.size());

// test not ignore if exists
catalog.createTable(
identifier, Schema.newBuilder().column("col", DataTypes.INT()).build(), true);
catalog.fileIO().delete(new org.apache.paimon.fs.Path(path.toString()), true);
tables = catalog.listTables(databaseName);
assertEquals(1, tables.size());
assertThrows(
Catalog.TableNotExistException.class, () -> catalog.dropTable(identifier, false));

catalog.dropDatabase(databaseName, true, true);
}

@Override
protected boolean supportsView() {
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -621,29 +621,6 @@ abstract class DDLWithHiveCatalogTestBase extends PaimonHiveTestBase {
}
}

test("Paimon DDL with hive catalog: drop table which location has been deleted") {
Seq("paimon", sparkCatalogName, paimonHiveCatalogName).foreach {
catalogName =>
spark.sql(s"USE $catalogName")
withDatabase("paimon_db") {
spark.sql(s"CREATE DATABASE paimon_db")
spark.sql(s"USE paimon_db")
spark.sql("CREATE TABLE t USING paimon")
val table = loadTable("paimon_db", "t")
table.fileIO().delete(table.location(), true)
if (catalogName.equals("paimon")) {
// Filesystem catalog determines whether a table exists based on table location
assert(spark.sql("SHOW TABLES").count() == 0)
} else {
// Hive catalog determines whether a table exists based on metadata in hms
assert(spark.sql("SHOW TABLES").count() == 1)
}
spark.sql("DROP TABLE IF EXISTS t")
assert(spark.sql("SHOW TABLES").count() == 0)
}
}
}

def getDatabaseProp(dbName: String, propertyName: String): String = {
spark
.sql(s"DESC DATABASE EXTENDED $dbName")
Expand Down

0 comments on commit c8b5e1f

Please sign in to comment.