Eclipse下第一个Hadoop程序出现错误ClassCastException

2014-11-24 09:10:20 · 作者: · 浏览: 0

解决方法:


原来发现是版本的问题,我使用的是如下代码:


并用这些代码运行在hadoop-0.20.2版本上才出现这样的问题,在hadoop-0.20.2上请使用新的接口方法来实现就不会有这样的问题.



public static void main(String[] args) throws IOException {


if(args.length != 2){


System.err.print("Usage: MaxTemperature ");


System.exit(-1);


}



JobConf conf = new JobConf(MaxTemperature.class);


conf.setJobName("Max temperature");



FileInputFormat.addInputPath(conf, new Path(args[0]));


FileOutputFormat.setOutputPath(conf, new Path(args[1]));



conf.setMapperClass(MaxTemperatureMapper.class);


conf.setReducerClass(MaxTemperatureReducer.class);



conf.setOutputKeyClass(Text.class);


conf.setOutputValueClass(IntWritable.class);



JobClient.runJob(conf);



}



public void map(LongWritable key, Text value,


OutputCollector output, Reporter reporter)


throws IOException {


String line = value.toString();


System.out.println("key: " + key);


String year = line.substring(15,19);


int airTemperature;



if(line.charAt(45) == '+'){


airTemperature = Integer.parseInt(line.substring(46,50));


}else{


airTemperature = Integer.parseInt(line.substring(45,50));


}



String quality = line.substring(50,51);


System.out.println("quality: " + quality);



if(airTemperature != MISSING && quality.matches("[01459")){


output.collect(new Text(year), new IntWritable(airTemperature));


}


}




@Override


public void reduce(Text key, Iterator values,


OutputCollector output, Reporter reporter)


throws IOException {


int maxValue = Integer.MIN_VALUE;



while(values.hasNext()){


maxValue = Math.max(maxValue, values.next().get());


}



output.collect(key, new IntWritable(maxValue));



}



请将上面的代码改成新版本的写法就不会出现问题.



public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {


if(args.length != 2){


System.err.print("Usage: MaxTemperature ");


System.exit(-1);


}



Job job = new Job();


job.setJarByClass(NewMaxTemperature.class);



FileInputFormat.addInputPath(job, new Path(args[0]));


FileOutputFormat.setOutputPath(job, new Path(args[1]));



job.setMapperClass(NewMaxTemperatureMapper.class);


job.setReducerClass(NewMaxTemperatureReducer.class);



job.setOutputKeyClass(Text.class);


job.setOutputValueClass(IntWritable.class);



System.exit(job.waitForCompletion(true) 0:1);





public class NewMaxTemperatureMapper extends


Mapper{



private static final int MISSING= 9999;


@Override


public void map(LongWritable key, Text value,


Context context)


throws IOException ,InterruptedException{


String line = value.toString();


System.out.println("key: " + key);


String year = line.substring(15,19);


int airTemperature;



if(line.charAt(45) == '+'){


airTemperature = Integer.parseInt(line.substring(46,50));


}else{


airTemperature = Integer.parseInt(line.substring(45,50));


}



String quality = line.substring(50,51);


System.out.println("quality: " + quality);


if(airTemperature != MISSING && quality.matches("[01459]")){


context.write(new Text(year), new IntWritable(airTemperature));


}


}



}



public class NewMaxTemperatureReducer extends


Reducer {



@Override


public void reduce(Text key, Iterable values,


Context context)


throws IOException,InterruptedException {


int maxValue = Integer.MIN_VALUE;


for(IntWritable value: values){


maxValue = Math.max(maxValue, value.get());


}



context.write(key, new IntWritable(maxValue));



}




}


解决方案2:可以使用书中指定的hadoop版本,可以使用hadoop 0.20.0以前的版本