課程
/云計算&大數據
/大數據
/認識Hadoop--基礎篇
hadoop ?jar ?number_sort_class/numbersort.jar Sort sort_input sort_output
這里運行后出問題,不知道Partition這個類該如何嵌入代碼運行
2015-12-08
源自:認識Hadoop--基礎篇 5-3
正在回答
import java.io.IOException;import java.util.StringTokenizer;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IntWritable;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;public class WordCount {?? ?public static class WordCountMap extends?? ??? ??? ?Mapper<LongWritable, Text, Text, IntWritable> {?? ??? ?private final IntWritable one = new IntWritable(1);?? ??? ?private Text word = new Text();?? ??? ?public void map(LongWritable key, Text value, Context context)?? ??? ??? ??? ?throws IOException, InterruptedException {?? ??? ??? ?String line = value.toString();?? ??? ??? ?StringTokenizer token = new StringTokenizer(line);?? ??? ??? ?while (token.hasMoreTokens()) {?? ??? ??? ??? ?word.set(token.nextToken());?? ??? ??? ??? ?context.write(word, one);?? ??? ??? ?}?? ??? ?}?? ?}?? ?public static class WordCountReduce extends?? ??? ??? ?Reducer<Text, IntWritable, Text, IntWritable> {?? ??? ?public void reduce(Text key, Iterable<IntWritable> values,?? ??? ??? ??? ?Context context) throws IOException, InterruptedException {?? ??? ??? ?int sum = 0;?? ??? ??? ?for (IntWritable val : values) {?? ??? ??? ??? ?sum += val.get();?? ??? ??? ?}?? ??? ??? ?context.write(key, new IntWritable(sum));?? ??? ?}?? ?}?? ?public static void main(String[] args) throws Exception {?? ??? ?Configuration conf = new Configuration();?? ??? ?Job job = new Job(conf);?? ??? ?job.setJarByClass(WordCount.class);?? ??? ?job.setJobName("wordcount");?? ??? ?job.setOutputKeyClass(Text.class);?? ??? ?job.setOutputValueClass(IntWritable.class);?? ??? ?job.setMapperClass(WordCountMap.class);?? ??? ?job.setReducerClass(WordCountReduce.class);?? ??? ?job.setInputFormatClass(TextInputFormat.class);?? ??? ?job.setOutputFormatClass(TextOutputFormat.class);?? ??? ?FileInputFormat.addInputPath(job, new Path(args[0]));?? ??? ?FileOutputFormat.setOutputPath(job, new Path(args[1]));?? ??? ?job.waitForCompletion(true);?? ?}}
舉報
Hadoop入門視頻教程,大數據人才的入門首選
2 回答老師,你的Partition好像只分了numPartitions-1個區?
3 回答hadoop運行問題
1 回答運行Sort.java報錯
1 回答運行jar包出現錯誤
1 回答運行hadoop jar WordCount.jar WordCount input output報錯
Copyright ? 2025 imooc.com All Rights Reserved | 京ICP備12003892號-11 京公網安備11010802030151號
購課補貼聯系客服咨詢優惠詳情
慕課網APP您的移動學習伙伴
掃描二維碼關注慕課網微信公眾號
2016-01-13
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class WordCount {
?? ?public static class WordCountMap extends
?? ??? ??? ?Mapper<LongWritable, Text, Text, IntWritable> {
?? ??? ?private final IntWritable one = new IntWritable(1);
?? ??? ?private Text word = new Text();
?? ??? ?public void map(LongWritable key, Text value, Context context)
?? ??? ??? ??? ?throws IOException, InterruptedException {
?? ??? ??? ?String line = value.toString();
?? ??? ??? ?StringTokenizer token = new StringTokenizer(line);
?? ??? ??? ?while (token.hasMoreTokens()) {
?? ??? ??? ??? ?word.set(token.nextToken());
?? ??? ??? ??? ?context.write(word, one);
?? ??? ??? ?}
?? ??? ?}
?? ?}
?? ?public static class WordCountReduce extends
?? ??? ??? ?Reducer<Text, IntWritable, Text, IntWritable> {
?? ??? ?public void reduce(Text key, Iterable<IntWritable> values,
?? ??? ??? ??? ?Context context) throws IOException, InterruptedException {
?? ??? ??? ?int sum = 0;
?? ??? ??? ?for (IntWritable val : values) {
?? ??? ??? ??? ?sum += val.get();
?? ??? ??? ?}
?? ??? ??? ?context.write(key, new IntWritable(sum));
?? ??? ?}
?? ?}
?? ?public static void main(String[] args) throws Exception {
?? ??? ?Configuration conf = new Configuration();
?? ??? ?Job job = new Job(conf);
?? ??? ?job.setJarByClass(WordCount.class);
?? ??? ?job.setJobName("wordcount");
?? ??? ?job.setOutputKeyClass(Text.class);
?? ??? ?job.setOutputValueClass(IntWritable.class);
?? ??? ?job.setMapperClass(WordCountMap.class);
?? ??? ?job.setReducerClass(WordCountReduce.class);
?? ??? ?job.setInputFormatClass(TextInputFormat.class);
?? ??? ?job.setOutputFormatClass(TextOutputFormat.class);
?? ??? ?FileInputFormat.addInputPath(job, new Path(args[0]));
?? ??? ?FileOutputFormat.setOutputPath(job, new Path(args[1]));
?? ??? ?job.waitForCompletion(true);
?? ?}
}