[prev in list] [next in list] [prev in thread] [next in thread]
List: hadoop-commits
Subject: svn commit: r903913 - in /hadoop/common/branches/branch-0.20: CHANGES.txt
From: cos () apache ! org
Date: 2010-01-27 23:56:33
Message-ID: 20100127235633.9E00323889C5 () eris ! apache ! org
[Download RAW message or body]
Author: cos
Date: Wed Jan 27 23:56:33 2010
New Revision: 903913
URL: http://svn.apache.org/viewvc?rev=903913&view=rev
Log:
HDFS-919. svn merge -c 903906 from trunk to branch_0.20
Modified:
hadoop/common/branches/branch-0.20/CHANGES.txt
hadoop/common/branches/branch-0.20/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
Modified: hadoop/common/branches/branch-0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20/CHANGES.txt?rev=903913&r1=903912&r2=903913&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20/CHANGES.txt Wed Jan 27 23:56:33 2010
@@ -7,6 +7,11 @@
HADOOP-6218. Adds a feature where TFile can be split by Record
Sequence number. (Hong Tang and Raghu Angadi via ddas)
+ IMPROVEMENTS
+
+ HDFS-919. Create test to validate the BlocksVerified metric (Gary Murry
+ via cos)
+
BUG FIXES
MAPREDUCE-112. Add counters for reduce input, output records to the new API.
Modified: hadoop/common/branches/branch-0.20/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20/src/test/org/apac \
he/hadoop/hdfs/TestDatanodeBlockScanner.java?rev=903913&r1=903912&r2=903913&view=diff \
==============================================================================
--- hadoop/common/branches/branch-0.20/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java \
(original)
+++ hadoop/common/branches/branch-0.20/src/test/org/apache/hadoop/hdfs/TestDatanodeBlockScanner.java \
Wed Jan 27 23:56:33 2010 @@ -50,12 +50,15 @@
private static Pattern pattern =
Pattern.compile(".*?(blk_[-]*\\d+).*?scan time\\s*:\\s*(\\d+)");
+
+ private static Pattern pattern_blockVerify =
+ Pattern.compile(".*?(SCAN_PERIOD)\\s*:\\s*(\\d+.*?)");
/**
* This connects to datanode and fetches block verification data.
* It repeats this until the given block has a verification time > 0.
*/
private static long waitForVerification(DatanodeInfo dn, FileSystem fs,
- Path file) throws IOException {
+ Path file, int blocksValidated) throws \
IOException { URL url = new URL("http://localhost:" + dn.getInfoPort() +
"/blockScannerReport?listblocks");
long lastWarnTime = System.currentTimeMillis();
@@ -65,6 +68,14 @@
while (verificationTime <= 0) {
String response = DFSTestUtil.urlGet(url);
+ if(blocksValidated >= 0) {
+ for(Matcher matcher = pattern_blockVerify.matcher(response); \
matcher.find();) { + if (block.equals(matcher.group(1))) {
+ assertEquals("Wrong number of blocks reported for validation.", \
blocksValidated, Long.parseLong(matcher.group(2))); + break;
+ }
+ }
+ }
for(Matcher matcher = pattern.matcher(response); matcher.find();) {
if (block.equals(matcher.group(1))) {
verificationTime = Long.parseLong(matcher.group(2));
@@ -115,7 +126,7 @@
/*
* The cluster restarted. The block should be verified by now.
*/
- assertTrue(waitForVerification(dn, fs, file1) > startTime);
+ assertTrue(waitForVerification(dn, fs, file1, 1) > startTime);
/*
* Create a new file and read the block. The block should be marked
@@ -124,7 +135,7 @@
DFSTestUtil.createFile(fs, file2, 10, (short)1, 0);
IOUtils.copyBytes(fs.open(file2), new IOUtils.NullOutputStream(),
conf, true);
- assertTrue(waitForVerification(dn, fs, file2) > startTime);
+ assertTrue(waitForVerification(dn, fs, file2, 2) > startTime);
cluster.shutdown();
}
[prev in list] [next in list] [prev in thread] [next in thread]
Configure |
About |
News |
Add a list |
Sponsored by KoreLogic