windows 开发mapreduce程序的配置
第一:下载所有hadoop二进制包
第二:将所有的jar 做成user libary
第三:设置二个环境变量
HADOOP_HOME=D:\hadoop-2.7.6
HADOOP_USER_NAME=hdfs
package com.jsptpd.test1314;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import java.io.IOException;
import java.util.StringTokenizer;
public class WordCount {
private static String PATH_TO_CORE_SITE_XML = "D:\\workspace_scala1\\test1314\\conf\\core-site.xml";
private static String PATH_TO_YARN_SITE_XML = "D:\\workspace_scala1\\test1314\\conf\\yarn-site.xml";
private static String PATH_TO_MAP_SITE_XML = "D:\\workspace_scala1\\test1314\\conf\\mapred-site.xml";
private static String PATH_TO_HDFS_SITE_XML = "D:\\workspace_scala1\\test1314\\conf\\hdfs-site.xml";
public static void main(String[] args) throws Exception {
//设置hadoop运行用户
System.setProperty("HADOOP_USER_NAME", "hdfs");
//获取默认配置信息
Configuration conf = new Configuration();
//conf.addResource(new Path(PATH_TO_HDFS_SITE_XML));
//增加hadoop的配置信息
conf.addResource(new Path(PATH_TO_CORE_SITE_XML));
//重新加载mapreduce配置包
conf.set("mapreduce.application.framework.path", "file:///D:/mapreduce.tar.gz");
//conf.addResource(new Path(PATH_TO_YARN_SITE_XML));
//conf.addResource(new Path(PATH_TO_MAP_SITE_XML));
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: wordcount <in> <out>");
System.exit(2);
}
Job job = Job.getInstance(conf);
job.setJobName("WordCount");
job.setJarByClass(WordCount.class);
job.setMapperClass(TokenizerMapper.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
//System.out.println(value.toString());
String cleanLine = value.toString().toLowerCase().replaceAll("[_|$#<>\\^=\\[\\]\\*\\\\,;,.\\-:()?!\"']", " ");
System.out.println(cleanLine);
StringTokenizer itr = new StringTokenizer(cleanLine," ",true);
while (itr.hasMoreTokens()) {
word.set(itr.nextToken().trim());
context.write(word, one);
}
}
}
public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
private IntWritable result = new IntWritable();
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
}
第四:配置windos二制包:
只需要配置core-sit.xml
<configuration>
<property>
<name>io.map.index.interval</name>
</property>
</configuration>