1.創(chuàng)建maven工程
??配置hadoop的依賴環(huán)境肩祥;
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>hadoop</groupId>
<artifactId>hadoop1</artifactId>
<version>0.0.1-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.1</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
</dependencies>
</project>
2.測試所使用數(shù)據(jù):1995年美國航空數(shù)據(jù)归粉,字段名;
3.編寫MyMapper1.java
package test;
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class MyMapper1 extends Mapper<LongWritable,Text,Text,Text>{
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
//得到待處理的字符串,value是行
String line = value.toString();
//字符串分割,typeList數(shù)組中存放著字段類型腻扇。
String [] typeList = line.split(",");
String month = null;
String reason = null;
if((!typeList[1].equals("CancellationCode")) && typeList.length>22) {
reason = typeList[22].toString(); //得到這一行數(shù)據(jù)的原因
month = typeList[1].toString(); //得到這一行數(shù)據(jù)的月份
context.write(new Text(reason), new Text(month));
}
}
}
4.編寫MyReducer1.java
package test;
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class MyReducer1 extends Reducer<Text, Text, Text, LongWritable>{
//得到的數(shù)據(jù)為:// <k2取消原因:v2s月份[1,1,2,2,2,..]>
@Override
protected void reduce(Text k2, Iterable<Text> v2s, Reducer<Text, Text, Text, LongWritable>.Context context)
throws IOException, InterruptedException {
long sum = 0;
//對1月份進行統(tǒng)計
for(Text month : v2s) {
if(month.toString().equals("1")) {
sum += 1L;
};
}
//輸出格式為: <原因:次數(shù)>
context.write(k2, new LongWritable(sum));
}
}
5.編寫入口程序WordCount.java
package test;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WordCount {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://hadoop01:9000");
Job job = Job.getInstance(conf,WordCount.class.getSimpleName());
//set main class
job.setJarByClass(WordCount.class);
//set <k2,v2> type
job.setMapperClass(MyMapper1.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
//set <k3,v3> type
job.setReducerClass(MyReducer1.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
//輸入捏题、輸出路徑設(shè)置明郭;
FileInputFormat.setInputPaths(job, new Path("/input"));
FileOutputFormat.setOutputPath(job, new Path("/output"));
System.out.println(job.waitForCompletion(true));
}
}
6.打成jar包壳澳,上傳到服務(wù)器中并執(zhí)行
??服務(wù)器執(zhí)行jar包:hadoop jar test.jar test.WordCount
??其中test.WordCount是入口程序的全限定名淹魄,一定要注意這里灿里。
??運行結(jié)果:
??該數(shù)據(jù)統(tǒng)計的是:1995年1月份31天中靖苇,因為各種原因取消的航班數(shù)嘶朱,ABCD對應(yīng)不同原因溯捆,NA對應(yīng)正常航行配并。由于該數(shù)據(jù)源所有的航班都是正常航行括荡,所以我手動修改了其中的幾個航班,分別設(shè)置成A B C D原因溉旋』澹可見該程序正常統(tǒng)計。其中第4行是數(shù)據(jù)源中第一行數(shù)據(jù)观腊,記錄著列名邑闲。
??如果數(shù)據(jù)篩選過程較為復(fù)雜,那么可編寫多個Mapper和Reducer梧油。具體方法參考以下:
https://blog.csdn.net/u010521842/article/details/75042771 ??作者:yanzhelee