-
Notifications
You must be signed in to change notification settings - Fork 1
/
Covid19_3.java
118 lines (102 loc) · 4.29 KB
/
Covid19_3.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import java.io.*;
import java.util.*;
import java.net.URI;
import org.apache.commons.lang3.text.WordUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.fs.FileSystem;
public class Covid19_3 {
// 4 types declared: Type of input key, type of input value, type of output key, type of output value
public static class MyMapper extends Mapper<Object, Text, Text, DoubleWritable> {
private static DoubleWritable new_cases = new DoubleWritable(0);
private Text loc = new Text();
// The 4 types declared here should match the types that was declared on the top
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
StringTokenizer tok = new StringTokenizer(value.toString(), ",");
String date_year="";
try{
if(tok.hasMoreTokens()) {
tok.nextToken();
}
if(tok.hasMoreTokens()) {
loc.set(tok.nextToken());
}
if(tok.hasMoreTokens()) {
new_cases.set(Long.parseLong(tok.nextToken()));
}
context.write(loc,new_cases);
}
catch(Exception e){
System.out.println("exception occured");
e.printStackTrace();
}
}
}
// 4 types declared: Type of input key, type of input value, type of output key, type of output value
// The input types of reduce should match the output type of map
public static class MyReducer extends Reducer<Text, DoubleWritable, Text, DoubleWritable> {
private DoubleWritable total_cases_per_million = new DoubleWritable(0);
Hashtable<String, Long> loc_population = new Hashtable<String,Long>();
public void setup(Context context) throws IOException, InterruptedException{
System.out.println("Hello from setup");
URI[] files_uri = context.getCacheFiles();
FileSystem fs = FileSystem.get(context.getConfiguration());
Path getFilePath = new Path(files_uri[0].toString());
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(getFilePath)));
String line = "";
while ((line = reader.readLine()) != null) {
String[] words = line.split(",");
try{
loc_population.put(words[1],Long.parseLong(words[4]));
//System.out.println(" word[1] "+ words[1] + " " + Long.parseLong(words[4]));
}
catch(Exception e){
System.out.println("Exception for word[1] "+ words[1]);
}
}
}
// Notice the that 2nd argument: type of the input value is an Iterable collection of objects
// with the same type declared above/as the type of output value from map
public void reduce(Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
long sum = 0;
System.out.println("HELLO INSIDE reduce");
for (DoubleWritable tmp: values) {
sum += tmp.get();
}
try{
//System.out.println("key and value"+ key.toString() + " " + loc_population.get(key.toString()));
long tot_pop = loc_population.get(key.toString());
double temp = (sum*1000000.0)/tot_pop;
total_cases_per_million.set(temp);
context.write(key, total_cases_per_million);
}catch(Exception e){
System.out.println("error in key "+ key.toString());
}
// This write to the final output
}
}
public static void main(String[] args) throws Exception {
long st = new Date().getTime();
Configuration conf = new Configuration();
Job myjob = Job.getInstance(conf, "task 3");
myjob.addCacheFile(new Path(args[1]).toUri());
myjob.setJarByClass(Covid19_3.class);
myjob.setMapperClass(MyMapper.class);
myjob.setReducerClass(MyReducer.class);
myjob.setOutputKeyClass(Text.class);
myjob.setOutputValueClass(DoubleWritable.class);
// Uncomment to set the number of reduce tasks
// myjob.setNumReduceTasks(2);
FileInputFormat.addInputPath(myjob, new Path(args[0]));
FileOutputFormat.setOutputPath(myjob, new Path(args[2]));
boolean ret = myjob.waitForCompletion(true);
long et = new Date().getTime();
double est_t = (et - st)/1000.0;
System.out.println("Time taken for task 3: " + est_t + " sec");
System.exit(ret ? 0 : 1);
}
}