`

hadoop权威指南--气温最大值所遇到的--内部类为静态的问题

 
阅读更多

老的API:

public class MaxTemperature {
  
  public static  class  MaxTemperatureMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> {
    private  static final  int MISSING = 9999;
    @Override
    public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter)
        throws IOException {
      String line = value.toString();
      String year = line.substring(15, 19);
      int airTemperature;
      if(line.charAt(87) == '+') {
        airTemperature = Integer.parseInt(line.substring(88,92));
      } else {
        airTemperature = Integer.parseInt(line.substring(87, 92));
      }
      String  quality = line.substring(92, 93);
      if(airTemperature != MISSING && quality.matches("[01459]")) {
        output.collect(new Text(year), new IntWritable(airTemperature));
      }
    }
  }
  
  public static class MaxTemperatureReducer  extends  MapReduceBase implements  Reducer<Text,IntWritable, Text, IntWritable>{

    @Override
    public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter)
        throws IOException {
      
      int maxvalue = Integer.MIN_VALUE;
      while (values.hasNext()) {
        maxvalue = Math.max(maxvalue, values.next().get());
      }
      output.collect(key, new IntWritable(maxvalue));
      
    }
    
  } 
  public static void main(String[] args) throws IOException {
    if (args.length != 2) {
      System.err.println("Usage: MaxTemperature <input path> <output path>");
    }
    JobConf  conf  = new JobConf(MaxTemperature.class);
    conf.setJobName("Max temperature");
    
    FileInputFormat.addInputPath(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));
    conf.setMapperClass(MaxTemperatureMapper.class);
    conf.setReducerClass(MaxTemperatureReducer.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);
    
    JobClient.runJob(conf);
  }
  
}

 新的api:

public class NewMaxTemperature {

  public static  class NewMaxTemperatureMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    private static final int MISSING = 9999;

    public void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
        throws IOException, InterruptedException {
      String line = value.toString();
      String year = line.substring(15, 19);
      int airTemperature;
      if (line.charAt(87) == '+') {
        airTemperature = Integer.parseInt(line.substring(88, 92));
      } else {
        airTemperature = Integer.parseInt(line.substring(87, 92));
      }
      String quality = line.substring(92, 93);
      if (airTemperature != MISSING && quality.matches("[01459]")) {
        context.write(new Text(year), new IntWritable(airTemperature));
      }
    }
  }

  public static class NewMaxTemperatureReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
    @Override
    public void reduce(Text key, Iterable<IntWritable> values,
        Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
      int maxvalue = Integer.MIN_VALUE;
      while (values.iterator().hasNext()) {
        maxvalue = Math.max(maxvalue, values.iterator().next().get());
      }
      context.write(key, new IntWritable(maxvalue));
    }

  }

  public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    if (args.length != 2) {
      System.err.println("Usage: MaxTemperature <input path> <output path>");
    }
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    HashMap<String, String> params = ParamsUtil.getJobParameters(otherArgs);
    Job job = Job.getInstance(new Cluster(conf), conf);
    job.setJarByClass(NewMaxTemperature.class);
    job.setMapperClass(NewMaxTemperatureMapper.class);
    job.setReducerClass(NewMaxTemperatureReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    String reduceTasksNum = params.get("r");
    if (reduceTasksNum == null) {
      System.out.println("No reduce tasks num");
      System.exit(-1);
    }

    String input = params.get("input");
    if (input == null) {
      System.out.println("No input");
      System.exit(-1);
    }

    String output = params.get("output");
    if (output == null) {
      System.out.println("No output");
      System.exit(-1);
    }
    String name = params.get("name");
    if (name == null || "".equals(name)) {
      name = "MaxTemperature Test";
    }
    job.setNumReduceTasks(Integer.valueOf(reduceTasksNum));
    FileInputFormat.setInputPaths(job, input);
    FileOutputFormat.setOutputPath(job, new Path(output));
    job.setJobName(name);
    job.waitForCompletion(true);
  }

}

 在运行新的程序时,刚开始内部类没有用静态修饰,运行时报错:

WARN org.apache.hadoop.mapred.Child: Exception running child : java.lang.RuntimeException: java.lang.NoSuchMethodException: com.renren.dp.mapred.NewMaxTemperature$NewMaxTemperatureMapper.<init>()
	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:123)
	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:612)
	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:328)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:217)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:742)
	at org.apache.hadoop.mapred.Child.main(Child.java:211)
Caused by: java.lang.NoSuchMethodException: com.renren.dp.mapred.NewMaxTemperature$NewMaxTemperatureMapper.<init>()
	at java.lang.Class.getConstructor0(Class.java:2706)
	at java.lang.Class.getDeclaredConstructor(Class.java:1985)
	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
	... 7 more
原因是:hadoop在调用map和reduce类时采用的反射调用,内部类不是静态的,没有获取到内部类的实例,因为必须要获取外部类的实例,new NewMaxTemperature().new NewMaxTemperatureMapper();如果是静态的内部类的话:直接new NewMaxTemperatureMapper() 得出实例。
在新api中封装了参数解析类:
public class ParamsUtil {
	public static HashMap<String, String> getJobParameters(String[] args) {
		Options options = new Options();
		for (String arg : args) {
			if (arg.startsWith("-")) {
				Option option = OptionBuilder.hasArg(true).create(
						arg.substring(1));
				option.setArgName(arg.substring(1));
				options.addOption(option);
			}
		}
		PosixParser posixerParser = new PosixParser();
		CommandLine cmd = null;
		try {
			cmd = posixerParser.parse(options, args);
			HashMap<String, String> params = new HashMap<String, String>();
			for (Option op : cmd.getOptions()) {
				params.put(op.getArgName(), op.getValue());
				System.out.println("Params:" + op.getArgName() + "="
						+ op.getValue());
			}
			return params;
		} catch (org.apache.commons.cli.ParseException e) {
			e.printStackTrace();
			System.exit(0);
		}
		return null;
	}
分享到:
评论

相关推荐

    Hadoop权威指南----读书笔记.pdf

    Hadoop权威指南----读书笔记

    hadoop-eclipse-plugin1.2.1 and hadoop-eclipse-plugin2.8.0

    《Hadoop Eclipse Plugin:开发利器的进化》 在大数据领域,Hadoop作为开源分布式计算框架...随着Hadoop技术的不断演进,我们期待未来的Hadoop-Eclipse-Plugin能带来更多的创新和改进,为大数据开发提供更强大的支持。

    hadoop插件apache-hadoop-3.1.0-winutils-master.zip

    标题中的"apache-hadoop-3.1.0-winutils-master.zip"是一个针对Windows用户的Hadoop工具包,它包含了运行Hadoop所需的特定于Windows的工具和配置。`winutils.exe`是这个工具包的关键组件,它是Hadoop在Windows上的一...

    hadoop-yarn-client-2.6.5-API文档-中文版.zip

    赠送jar包:hadoop-yarn-client-2.6.5.jar; 赠送原API文档:hadoop-yarn-client-2.6.5-javadoc.jar; 赠送源代码:hadoop-yarn-client-2.6.5-sources.jar; 赠送Maven依赖信息文件:hadoop-yarn-client-2.6.5.pom;...

    hadoop-mapreduce-client-jobclient-2.6.5-API文档-中文版.zip

    赠送jar包:hadoop-mapreduce-client-jobclient-2.6.5.jar; 赠送原API文档:hadoop-mapreduce-client-jobclient-2.6.5-javadoc.jar; 赠送源代码:hadoop-mapreduce-client-jobclient-2.6.5-sources.jar; 赠送...

    hadoop-eclipse-plugin-2.10.0.jar

    Eclipse集成Hadoop2.10.0的插件,使用`ant`对hadoop的jar包进行打包并适应Eclipse加载,所以参数里有hadoop和eclipse的目录. 必须注意对于不同的hadoop版本,` HADDOP_INSTALL_PATH/share/hadoop/common/lib`下的jar包...

    Hadoop权威指南-最新中文版

    总而言之,《Hadoop权威指南》是一本全面、深入的Hadoop教程,不仅适合初学者了解和掌握Hadoop的基础知识,也适合有经验的开发人员深入了解Hadoop的内部机制和高级特性。通过阅读这本书,读者可以提升自己在大数据...

    flink-shaded-hadoop-3-uber-3.1.1.7.1.1.0-565-9.0.jar.tar.gz

    为解决这个问题,Flink社区提供了"flink-shaded-hadoop-3-uber-3.1.1.7.1.1.0-5.6.5-9.0.jar"这样的兼容包,这是一个"uber jar",也称为fat jar,它包含了Flink对Hadoop 3.x的依赖,并进行了重打包处理,以避免类...

    hadoop-yarn-common-2.6.5-API文档-中文版.zip

    赠送jar包:hadoop-yarn-common-2.6.5.jar 赠送原API文档:hadoop-yarn-common-2.6.5-javadoc.jar 赠送源代码:hadoop-yarn-common-2.6.5-sources.jar 包含翻译后的API文档:hadoop-yarn-common-2.6.5-javadoc-...

    hadoop-auth-2.5.1-API文档-中文版.zip

    赠送jar包:hadoop-auth-2.5.1.jar; 赠送原API文档:hadoop-auth-2.5.1-javadoc.jar; 赠送源代码:hadoop-auth-2.5.1-sources.jar; 赠送Maven依赖信息文件:hadoop-auth-2.5.1.pom; 包含翻译后的API文档:hadoop...

    hadoop-mapreduce-client-common-2.6.5-API文档-中英对照版.zip

    赠送jar包:hadoop-mapreduce-client-common-2.6.5.jar; 赠送原API文档:hadoop-mapreduce-client-common-2.6.5-javadoc.jar; 赠送源代码:hadoop-mapreduce-client-common-2.6.5-sources.jar; 赠送Maven依赖信息...

    hadoop-eclipse-plugin-3.1.1.tar.gz

    Hadoop-Eclipse-Plugin-3.1.1是一款专为Eclipse集成开发环境设计的插件,用于方便地在Hadoop分布式文件系统(HDFS)上进行开发和调试MapReduce程序。这款插件是Hadoop生态系统的组成部分,它使得Java开发者能够更加...

    hadoop-common-2.6.0-bin-master.zip

    `hadoop-common-2.6.0-bin-master.zip` 提供了一个解决方案,因为它包含了专门为Windows编译的`winutils.exe`。这个工具主要用于设置HDFS的HDFS权限、启动Hadoop守护进程等操作。你需要解压此文件到一个适当的目录,...

    hadoop权威指南 - ncdc 气象数据 - 1901 - 1942 年的数据

    hadoop权威指南里全文贯穿案例提到的气象数据, 直接从附录提到的网站下载非常慢. 这里分享出来, 便于大家测试. 由于限制文件大小, 所以上传了1901-1942 年的数据, 对于测试学习足够了. 如果觉得少, 多复制几次, 将...

    Hadoop-eclipse-plugin-2.7.2

    Hadoop-eclipse-plugin-2.7.2正是为了解决这个问题,它为Eclipse提供了与Hadoop集群无缝对接的功能,使得开发者可以在熟悉的Eclipse环境中编写、调试和运行Hadoop MapReduce程序。 首先,让我们深入了解Hadoop-...

    Hadoop权威指南---中文版

    ### Hadoop权威指南知识点概述 #### 一、Hadoop概览 - **Hadoop的核心功能**:Hadoop提供了一个稳定的基础架构,支持大规模数据的存储和分析。这一体系主要由两部分组成——Hadoop分布式文件系统(HDFS)和...

    hadoop-eclipse-plugin-2.7.3和2.7.7

    hadoop-eclipse-plugin-2.7.3和2.7.7的jar包 hadoop-eclipse-plugin-2.7.3和2.7.7的jar包 hadoop-eclipse-plugin-2.7.3和2.7.7的jar包 hadoop-eclipse-plugin-2.7.3和2.7.7的jar包

    hadoop最新版本3.1.1全量jar包

    hadoop-annotations-3.1.1.jar hadoop-common-3.1.1.jar hadoop-mapreduce-client-core-3.1.1.jar hadoop-yarn-api-3.1.1.jar hadoop-auth-3.1.1.jar hadoop-hdfs-3.1.1.jar hadoop-mapreduce-client-hs-3.1.1.jar ...

Global site tag (gtag.js) - Google Analytics