十年網(wǎng)站開發(fā)經(jīng)驗(yàn) + 多家企業(yè)客戶 + 靠譜的建站團(tuán)隊(duì)
量身定制 + 運(yùn)營(yíng)維護(hù)+專業(yè)推廣+無(wú)憂售后,網(wǎng)站問(wèn)題一站解決
這篇文章主要為大家展示了“Hadoop中如何自定義Map端輸出Key”,內(nèi)容簡(jiǎn)而易懂,條理清晰,希望能夠幫助大家解決疑惑,下面讓小編帶領(lǐng)大家一起研究并學(xué)習(xí)一下“Hadoop中如何自定義Map端輸出Key”這篇文章吧。

你所需要的網(wǎng)站建設(shè)服務(wù),我們均能行業(yè)靠前的水平為你提供.標(biāo)準(zhǔn)是產(chǎn)品質(zhì)量的保證,主要從事成都網(wǎng)站設(shè)計(jì)、做網(wǎng)站、企業(yè)網(wǎng)站建設(shè)、手機(jī)網(wǎng)站制作、網(wǎng)頁(yè)設(shè)計(jì)、品牌網(wǎng)站制作、網(wǎng)頁(yè)制作、做網(wǎng)站、建網(wǎng)站。創(chuàng)新互聯(lián)擁有實(shí)力堅(jiān)強(qiáng)的技術(shù)研發(fā)團(tuán)隊(duì)及素養(yǎng)的視覺設(shè)計(jì)專才。
package sort;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/**
* 自定義排序
* 初始結(jié)果:
* 3 3
* 3 2
* 3 1
* 2 2
* 2 1
* 1 1
* 輸出結(jié)果:
1 1
2 1
2 2
3 1
3 2
3 3
* @author Xr
*
*/
public class SortApp {
public static final String INPUT_PATH = "hdfs://hadoop:9000/data";
public static final String OUTPUT_PATH = "hdfs://hadoop:9000/datas";
public static void main(String[] args)throws Exception{
Configuration conf = new Configuration();
existsFile(conf);
Job job = new Job(conf, SortApp.class.getName());
FileInputFormat.setInputPaths(job, INPUT_PATH);
job.setMapperClass(MyMapper.class);
//自定義輸出Key
job.setMapOutputKeyClass(NewKey.class);
job.setMapOutputValueClass(LongWritable.class);
job.setReducerClass(MyReducer.class);
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(LongWritable.class);
FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));
job.waitForCompletion(true);
}
private static void existsFile(Configuration conf) throws IOException,
URISyntaxException {
FileSystem fs = FileSystem.get(new URI(OUTPUT_PATH),conf);
if(fs.exists(new Path(OUTPUT_PATH))){
fs.delete(new Path(OUTPUT_PATH),true);
}
}
}
class MyMapper extends Mapper{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String string = value.toString();
String[] split = string.split("\t");
NewKey k2 = new NewKey();
k2.set(Long.parseLong(split[0]),Long.parseLong(split[1]));
context.write(k2, new LongWritable(Long.parseLong(split[1])));
}
}
class MyReducer extends Reducer{
@Override
protected void reduce(NewKey key2, Iterable values,Context context)
throws IOException, InterruptedException {
long max = Long.MIN_VALUE;
for(LongWritable v2 : values){
long l = v2.get();
if(l>max){
max = l;
}
}
context.write(new LongWritable(key2.first),new LongWritable(max));
}
}
class NewKey implements WritableComparable{
long first;
long second;
@Override
public void write(DataOutput out) throws IOException {
out.writeLong(this.first);
out.writeLong(this.second);
}
public void set(long parseLong, long parseLong2) {
this.first = parseLong;
this.second = parseLong2;
}
@Override
public void readFields(DataInput in) throws IOException {
this.first = in.readLong();
this.second = in.readLong();
}
@Override
public int compareTo(NewKey o) {
if(this.first==o.first){
if(this.second < o.second){
return -1;
}else if(this.second == o.second){
return 0;
}else{
return 1;
}
}else{
if(this.first < o.first){
return -1;
}else{
return 1;
}
}
}
}
Name:Xr
Date:2014-03-04 21:31 以上是“Hadoop中如何自定義Map端輸出Key”這篇文章的所有內(nèi)容,感謝各位的閱讀!相信大家都有了一定的了解,希望分享的內(nèi)容對(duì)大家有所幫助,如果還想學(xué)習(xí)更多知識(shí),歡迎關(guān)注創(chuàng)新互聯(lián)行業(yè)資訊頻道!