This is an automated email from the ASF dual-hosted git repository.

sarath pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/atlas.git


The following commit(s) were added to refs/heads/branch-2.0 by this push:
     new d24ce80  ATLAS-4301 Handle Integration Test Failures on HiveHook module
d24ce80 is described below

commit d24ce80914594831a03aa18e5e8cc7d129590d88
Author: Mandar Ambawane <mandar.ambaw...@freestoneinfotech.com>
AuthorDate: Thu Jun 3 14:35:22 2021 +0530

    ATLAS-4301 Handle Integration Test Failures on HiveHook module
    
    Signed-off-by: Sarath Subramanian <sar...@apache.org>
    (cherry picked from commit e993f85d7b2c5e11914a328f4b3aee4392c4e106)
---
 .../org/apache/atlas/hive/hook/events/BaseHiveEvent.java     |  7 +++++--
 .../src/test/java/org/apache/atlas/hive/HiveITBase.java      |  2 +-
 .../src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java | 12 ++++++------
 3 files changed, 12 insertions(+), 9 deletions(-)

diff --git 
a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
 
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
index a69d5ce..aefe9e9 100644
--- 
a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
+++ 
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/events/BaseHiveEvent.java
@@ -233,7 +233,8 @@ public abstract class BaseHiveEvent {
         switch(entity.getType()) {
             case TABLE:
             case PARTITION:
-            case DFS_DIR: {
+            case DFS_DIR:
+            case LOCAL_DIR: {
                 ret = toAtlasEntity(entity, entityExtInfo, skipTempTables);
             }
             break;
@@ -281,7 +282,8 @@ public abstract class BaseHiveEvent {
             }
             break;
 
-            case DFS_DIR: {
+            case DFS_DIR:
+            case LOCAL_DIR: {
                 URI location = entity.getLocation();
 
                 if (location != null) {
@@ -822,6 +824,7 @@ public abstract class BaseHiveEvent {
                 return getQualifiedName(entity.getTable());
 
             case DFS_DIR:
+            case LOCAL_DIR:
                 return getQualifiedName(entity.getLocation());
         }
 
diff --git 
a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java 
b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
index 7a11ec3..0875afa 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
@@ -175,7 +175,7 @@ public class HiveITBase {
     }
 
     protected String createTestDFSPath(String path) throws Exception {
-        return "pfile://" + mkdir(path);
+        return "file://" + mkdir(path);
     }
 
     protected String file(String tag) throws Exception {
diff --git 
a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java 
b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index 03bbd60..eee5a75 100755
--- 
a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ 
b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -286,7 +286,7 @@ public class HiveHookIT extends HiveITBase {
     private Set<ReadEntity> getInputs(String inputName, Entity.Type 
entityType) throws HiveException {
         final ReadEntity entity;
 
-        if (Entity.Type.DFS_DIR.equals(entityType)) {
+        if (Entity.Type.DFS_DIR.equals(entityType) || 
Entity.Type.LOCAL_DIR.equals(entityType)) {
             entity = new TestReadEntity(lower(new Path(inputName).toString()), 
entityType);
         } else {
             entity = new TestReadEntity(getQualifiedTblName(inputName), 
entityType);
@@ -606,7 +606,7 @@ public class HiveHookIT extends HiveITBase {
         Assert.assertNotNull(ddlQueries);
         Assert.assertEquals(ddlQueries.size(), 1);
 
-        assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, 
null, getOutputs(tableName, Entity.Type.TABLE)));
+        assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, 
getInputs("file://" + loadFile, Entity.Type.LOCAL_DIR), getOutputs(tableName, 
Entity.Type.TABLE)));
     }
 
     @Test
@@ -844,7 +844,7 @@ public class HiveHookIT extends HiveITBase {
         runCommand(query);
 
         HiveEventContext event = constructEvent(query,  HiveOperation.QUERY,
-                getInputs(tableName, Entity.Type.TABLE), null);
+                getInputs(tableName, Entity.Type.TABLE), 
getOutputs(randomLocalPath, Entity.Type.LOCAL_DIR));
         AtlasEntity hiveProcess = validateProcess(event);
         AtlasEntity hiveProcessExecution = 
validateProcessExecution(hiveProcess, event);
         AtlasObjectId process = 
toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute(
@@ -1061,7 +1061,7 @@ public class HiveHookIT extends HiveITBase {
 
         String tblId = assertTableIsRegistered(DEFAULT_DB, tableName);
 
-        String filename = "pfile://" + mkdir("exportUnPartitioned");
+        String filename = "file://" + mkdir("exportUnPartitioned");
         String query    = "export table " + tableName + " to \"" + filename + 
"\"";
 
         runCommand(query);
@@ -1114,7 +1114,7 @@ public class HiveHookIT extends HiveITBase {
         Assert.assertNotEquals(processEntity.getGuid(), 
processEntity2.getGuid());
 
         //Should create another process
-        filename = "pfile://" + mkdir("export2UnPartitioned");
+        filename = "file://" + mkdir("export2UnPartitioned");
         query    = "export table " + tableName + " to \"" + filename + "\"";
 
         runCommand(query);
@@ -1178,7 +1178,7 @@ public class HiveHookIT extends HiveITBase {
         String tblId = assertTableIsRegistered(DEFAULT_DB, tableName);
 
         //Add a partition
-        String partFile = "pfile://" + mkdir("partition");
+        String partFile = "file://" + mkdir("partition");
         String query    = "alter table " + tableName + " add partition (dt='"+ 
PART_FILE + "') location '" + partFile + "'";
 
         runCommand(query);

Reply via email to