steveloughran commented on a change in pull request #1530: HDFS-14869 Copy 
renamed files which are not excluded anymore by filter
URL: https://github.com/apache/hadoop/pull/1530#discussion_r333438379
 
 

 ##########
 File path: 
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java
 ##########
 @@ -747,4 +754,84 @@ public void testSyncSnapshotTimeStampChecking() throws 
Exception {
     }
     Assert.assertTrue(threwException);
   }
+
+  private void initData10(Path dir) throws Exception {
+    final Path staging = new Path(dir, ".staging");
+    final Path staging_f1 = new Path(staging, "f1");
+    final Path data = new Path(dir, "data");
+    final Path data_f1 = new Path(data, "f1");
+
+    DFSTestUtil.createFile(dfs, staging_f1, BLOCK_SIZE, DATA_NUM, 0L);
+    DFSTestUtil.createFile(dfs, data_f1, BLOCK_SIZE, DATA_NUM, 0L);
+  }
+
+  private void changeData10(Path dir) throws Exception {
+    final Path staging = new Path(dir, ".staging");
+    final Path prod = new Path(dir, "prod");
+    dfs.rename(staging, prod);
+  }
+
+  private void generateFilterFile(String directory, String fileName){
+    File theDir = new File(directory);
+    boolean threwException = false;
+    if (!theDir.exists()) {
+      theDir.mkdir();
+    }
+    String str = ".*\\.staging.*";
+    BufferedWriter writer = null;
+    try {
+      writer = new BufferedWriter(new FileWriter(directory + "/" + fileName));
+      writer.write(str);
+      writer.close();
+    } catch (IOException e) {
+      threwException = true;
+    }
+    Assert.assertFalse(threwException);
+  }
+
+  private void deleteFilterFile(String directory, String fileName) {
+    File theDir = new File(directory);
+    if (!theDir.exists()) {
+      return;
+    }
+    File file = new File(directory + "/" + fileName);
+    if (file.exists()) {
+      file.delete();
+    }
+    theDir.delete();
+  }
+
+  @Test
+  public void testSync10() throws Exception {
+    try {
+      Path sourcePath = new Path(dfs.getWorkingDirectory(), "source");
+      initData10(sourcePath);
+      dfs.allowSnapshot(sourcePath);
+      dfs.createSnapshot(sourcePath, "s1");
+      generateFilterFile("/tmp", "filters.txt");
 
 Review comment:
   can't rely on /tmp, especially on jenkins systems. use the temp file API 
call in File itself, 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to