This is an automated email from the ASF dual-hosted git repository. sarath pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/atlas.git
The following commit(s) were added to refs/heads/master by this push: new e993f85 ATLAS-4301 Handle Integration Test Failures on HiveHook module e993f85 is described below commit e993f85d7b2c5e11914a328f4b3aee4392c4e106 Author: Mandar Ambawane <mandar.ambaw...@freestoneinfotech.com> AuthorDate: Thu Jun 3 14:35:22 2021 +0530 ATLAS-4301 Handle Integration Test Failures on HiveHook module Signed-off-by: Sarath Subramanian <sar...@apache.org> --- .../org/apache/atlas/hive/hook/events/BaseHiveEvent.java | 7 +++++-- .../src/test/java/org/apache/atlas/hive/HiveITBase.java | 2 +- .../src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java | 12 ++++++------ 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java index 21c0ae3..2e8237b 100644 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java +++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java @@ -230,7 +230,8 @@ public abstract class BaseHiveEvent { switch(entity.getType()) { case TABLE: case PARTITION: - case DFS_DIR: { + case DFS_DIR: + case LOCAL_DIR: { ret = toAtlasEntity(entity, entityExtInfo, skipTempTables); } break; @@ -278,7 +279,8 @@ public abstract class BaseHiveEvent { } break; - case DFS_DIR: { + case DFS_DIR: + case LOCAL_DIR: { URI location = entity.getLocation(); if (location != null) { @@ -819,6 +821,7 @@ public abstract class BaseHiveEvent { return getQualifiedName(entity.getTable()); case DFS_DIR: + case LOCAL_DIR: return getQualifiedName(entity.getLocation()); } diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java index 7a11ec3..0875afa 100644 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java +++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java @@ -175,7 +175,7 @@ public class HiveITBase { } protected String createTestDFSPath(String path) throws Exception { - return "pfile://" + mkdir(path); + return "file://" + mkdir(path); } protected String file(String tag) throws Exception { diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java index 52684e7..1db73e5 100755 --- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java +++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java @@ -282,7 +282,7 @@ public class HiveHookIT extends HiveITBase { private Set<ReadEntity> getInputs(String inputName, Entity.Type entityType) throws HiveException { final ReadEntity entity; - if (Entity.Type.DFS_DIR.equals(entityType)) { + if (Entity.Type.DFS_DIR.equals(entityType) || Entity.Type.LOCAL_DIR.equals(entityType)) { entity = new TestReadEntity(lower(new Path(inputName).toString()), entityType); } else { entity = new TestReadEntity(getQualifiedTblName(inputName), entityType); @@ -602,7 +602,7 @@ public class HiveHookIT extends HiveITBase { Assert.assertNotNull(ddlQueries); Assert.assertEquals(ddlQueries.size(), 1); - assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE))); + assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, getInputs("file://" + loadFile, Entity.Type.LOCAL_DIR), getOutputs(tableName, Entity.Type.TABLE))); } @Test @@ -840,7 +840,7 @@ public class HiveHookIT extends HiveITBase { runCommand(query); HiveEventContext event = constructEvent(query, HiveOperation.QUERY, - getInputs(tableName, Entity.Type.TABLE), null); + getInputs(tableName, Entity.Type.TABLE), getOutputs(randomLocalPath, Entity.Type.LOCAL_DIR)); AtlasEntity hiveProcess = validateProcess(event); AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event); AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute( @@ -1057,7 +1057,7 @@ public class HiveHookIT extends HiveITBase { String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); - String filename = "pfile://" + mkdir("exportUnPartitioned"); + String filename = "file://" + mkdir("exportUnPartitioned"); String query = "export table " + tableName + " to \"" + filename + "\""; runCommand(query); @@ -1110,7 +1110,7 @@ public class HiveHookIT extends HiveITBase { Assert.assertNotEquals(processEntity.getGuid(), processEntity2.getGuid()); //Should create another process - filename = "pfile://" + mkdir("export2UnPartitioned"); + filename = "file://" + mkdir("export2UnPartitioned"); query = "export table " + tableName + " to \"" + filename + "\""; runCommand(query); @@ -1174,7 +1174,7 @@ public class HiveHookIT extends HiveITBase { String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); //Add a partition - String partFile = "pfile://" + mkdir("partition"); + String partFile = "file://" + mkdir("partition"); String query = "alter table " + tableName + " add partition (dt='"+ PART_FILE + "') location '" + partFile + "'"; runCommand(query);