-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathHotTopicsApp.java
More file actions
185 lines (154 loc) · 5.08 KB
/
HotTopicsApp.java
File metadata and controls
185 lines (154 loc) · 5.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
package examples.cn.crxy.offline5.mr.pattern;
import java.io.IOException;
import java.util.Comparator;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class HotTopicsApp extends Configured implements Tool{
@Override
public int run(String[] args) throws Exception {
//该判断方法不能位于main()中,只能位于run()方法中。否则,不会加载-D参数
if(args==null ||args.length!=2){
System.err.println("参数必须有2个,分别是[inputPath] [outputPath]");
System.exit(-1);
}
String inputPath = args[0];
Path outputDir = new Path(args[1]);
Configuration conf = getConf();
outputDir.getFileSystem(conf).delete(outputDir, true);
String jobName = HotTopicsApp.class.getSimpleName();
Job job = Job.getInstance(conf , jobName );
job.setJarByClass(HotTopicsApp.class);
FileInputFormat.setInputPaths(job, inputPath);
FileOutputFormat.setOutputPath(job, outputDir);
job.setMapperClass(HotTopicsMapper.class);
job.setMapOutputKeyClass(NullWritable.class);
job.setMapOutputValueClass(Text.class);
job.setReducerClass(HotTopicsReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
boolean waitForCompletion = job.waitForCompletion(true);
return waitForCompletion?0:-1;
}
public static void main(String[] args) throws Exception {
ToolRunner.run(new HotTopicsApp(), args);
}
public static class HotTopicsMapper extends Mapper<LongWritable, Text, NullWritable, Text>{
TreeSet<TFIDFWord> topkSet = null;
int k = 1;
String type = null;
@Override
protected void setup(Mapper<LongWritable, Text, NullWritable, Text>.Context context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
this.k = conf.getInt("topk", 1);
this.type = conf.get("type", "min");
if("min".equals(this.type)){
topkSet = new TreeSet<>();
}else {
topkSet = new TreeSet<>(new Comparator<TFIDFWord>() {
@Override
public int compare(TFIDFWord o1, TFIDFWord o2) {
return -o1.compareTo(o2);
}
});
}
}
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable, Text, NullWritable, Text>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] splited = line.split("\t");
topkSet.add(new TFIDFWord(Double.parseDouble(splited[2]), splited[1]));
if(topkSet.size()>k){
topkSet.pollLast();
}
}
@Override
protected void cleanup(Mapper<LongWritable, Text, NullWritable, Text>.Context context)
throws IOException, InterruptedException {
for (TFIDFWord v : topkSet) {
NullWritable k2 = NullWritable.get();
Text v2 = new Text(v.toString());
context.write(k2, v2);
}
}
}
public static class TFIDFWord implements Comparable<TFIDFWord>{
public double itfidf;
public String word;
public TFIDFWord(String line) {
String[] splited = line.split("\t");
this.itfidf = Double.parseDouble(splited[0]);
this.word = splited[1];
}
public TFIDFWord(double itfidf, String word) {
super();
this.itfidf = itfidf;
this.word = word;
}
@Override
public int compareTo(TFIDFWord o) {
double val = itfidf-o.itfidf;
if(val<0)return -1;
if(val>0)return 1;
return 0;
}
@Override
public String toString() {
return itfidf + "\t" + word;
}
}
public static class HotTopicsReducer extends Reducer<NullWritable, Text, Text, NullWritable>{
TreeSet<TFIDFWord> topkSet = null;
int k = 1;
String type = null;
@Override
protected void setup(
Reducer<NullWritable, Text, org.apache.hadoop.io.Text, NullWritable>.Context context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
this.k = conf.getInt("topk", 1);
this.type = conf.get("type", "min");
if("min".equals(this.type)){
topkSet = new TreeSet<>();
}else {
topkSet = new TreeSet<>(new Comparator<TFIDFWord>() {
@Override
public int compare(TFIDFWord o1, TFIDFWord o2) {
return -o1.compareTo(o2);
}
});
}
}
Text k3 = new Text();
@Override
protected void reduce(NullWritable k2, Iterable<Text> v2s,
Reducer<NullWritable, Text, Text, NullWritable>.Context context)
throws IOException, InterruptedException {
for (Text v2 : v2s) {
String line = v2.toString();
topkSet.add(new TFIDFWord(line));
if(topkSet.size()>k){
topkSet.pollLast();
}
}
for (TFIDFWord v : topkSet) {
k3.set(v.toString());
context.write(k3, NullWritable.get());
}
}
}
}