This is the test code from the Hadoop The Definitive Guide Second Edition revised edition of 135
package lin;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class ConfigurationPrinter extends Configured implements Tool{
static{
Configuration.addDefaultResource("hdfs-default.xml");
Configuration.addDefaultResource("hdfs-sit.xml");
Configuration.addDefaultResource("mapred-default.xml");
Configuration.addDefaultResource("mapred-site.xml");
}
@Override
public int run(String[] arg0) throws Exception {
// TODO Auto-generated method stub
Configuration conf = getConf();
for(Entry<String,String> entry : conf){
System.out.printf("%s=%s\n",entry.getKey(),entry.getValue());
}
return 0;
}
public static void main(String args[]) throws Exception{
int exitCode = ToolRunner.run(new ConfigurationPrinter(),args);
System.out.println(exitCode);
}
}
eclipse print information
fs.default.name = file :/ / /
packaged into jar command line after printing information
fs.default.name = hdfs :/ / localhost: 9000
This situation is normal do
------ Solution ------------------------------------ --------
not called , do not control
没有评论:
发表评论