Index: /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple.java
===================================================================
--- /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple.java	(revision 27)
+++ /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple.java	(revision 28)
@@ -163,10 +163,8 @@
 		conf.setNumReduceTasks(numReduceTasks);
 		// 0.16
-		// conf.setInputPath(new Path(inPath));
-		Convert.setInputPath(conf, new Path(inPath));
+		conf.setInputPath(new Path(inPath));
 		conf.setInputFormat(SequenceFileInputFormat.class);
 
-		// conf.setOutputPath(new Path(outputPath));
-		Convert.setInputPath(conf, new Path(outputPath));
+		conf.setOutputPath(new Path(outputPath));
 		conf.setOutputKeyClass(Tuple.class);
 		conf.setOutputValueClass(IntWritable.class);
Index: /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple2.java
===================================================================
--- /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple2.java	(revision 27)
+++ /sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCountTuple2.java	(revision 28)
@@ -125,10 +125,11 @@
 		
 		// 0.16
-//		conf.setInputPath(new Path(inPath));
-		Convert.setInputPath(conf,new Path(inPath));
+		conf.setInputPath(new Path(inPath));
+//		Convert.setInputPath(conf,new Path(inPath));
 		conf.setInputFormat(SequenceFileInputFormat.class);
 		// 0.16
-//		conf.setOutputPath(new Path(outputPath));
-		Convert.setInputPath(conf, new Path(outputPath));
+		conf.setOutputPath(new Path(outputPath));
+		//		Convert.setInputPath(conf, new Path(outputPath));
+
 		
 		conf.setOutputKeyClass(Tuple.class);
Index: /sample/hadoop-0.16/tw/org/nchc/util/SequenceFileProcessor.java
===================================================================
--- /sample/hadoop-0.16/tw/org/nchc/util/SequenceFileProcessor.java	(revision 27)
+++ /sample/hadoop-0.16/tw/org/nchc/util/SequenceFileProcessor.java	(revision 28)
@@ -19,4 +19,5 @@
 import java.io.IOException;
 
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -118,6 +119,7 @@
 
 	private void run() throws IOException {
-		if (!FileSystem.get(conf).isFile(mPath)) {
-			for (Path p : FileSystem.get(conf).listPaths(new Path[] { mPath })) {
+		FileSystem fs = FileSystem.get(conf);
+		if (!fs.isFile(mPath)) {
+						for (Path p : listPaths(fs,mPath)) {
 				// System.out.println("Applying to " + p);
 				applyToFile(p);
@@ -128,5 +130,15 @@
 
 	}
-
+	static public Path[] listPaths(FileSystem fsm,Path path) throws IOException
+	{
+		FileStatus[] fss = fsm.listStatus(path);
+		int length = fss.length;
+		Path[] pi = new Path[length];
+		for (int i=0 ; i< length; i++)
+		{
+			pi[i] = fss[i].getPath();
+		}
+		return pi;
+	}
 	@SuppressWarnings("unchecked")
 	private void applyToFile(Path path) throws IOException {
