EMMA Coverage Report (generated Sat Aug 20 11:00:51 CDT 2011)
[all classes][felix.io]

COVERAGE SUMMARY FOR SOURCE FILE [TestHadoop_0_20.java]

nameclass, %method, %block, %line, %
TestHadoop_0_20.java0%   (0/3)0%   (0/11)0%   (0/497)0%   (0/104)

COVERAGE BREAKDOWN BY CLASS AND METHOD

nameclass, %method, %block, %line, %
     
class TestHadoop_0_200%   (0/1)0%   (0/5)0%   (0/285)0%   (0/63)
TestHadoop_0_20 (String, String): void 0%   (0/1)0%   (0/13)0%   (0/6)
executeHadoopProgram (String []): void 0%   (0/1)0%   (0/33)0%   (0/5)
main (String []): void 0%   (0/1)0%   (0/22)0%   (0/7)
post (String []): void 0%   (0/1)0%   (0/50)0%   (0/9)
run (String []): int 0%   (0/1)0%   (0/167)0%   (0/36)
     
class TestHadoop_0_20$Map0%   (0/1)0%   (0/3)0%   (0/96)0%   (0/18)
TestHadoop_0_20$Map (): void 0%   (0/1)0%   (0/8)0%   (0/2)
map (LongWritable, Text, Mapper$Context): void 0%   (0/1)0%   (0/57)0%   (0/9)
setup (Mapper$Context): void 0%   (0/1)0%   (0/31)0%   (0/7)
     
class TestHadoop_0_20$Reduce0%   (0/1)0%   (0/3)0%   (0/116)0%   (0/23)
TestHadoop_0_20$Reduce (): void 0%   (0/1)0%   (0/3)0%   (0/1)
reduce (Text, Iterable, Reducer$Context): void 0%   (0/1)0%   (0/88)0%   (0/16)
setup (Reducer$Context): void 0%   (0/1)0%   (0/25)0%   (0/6)

1package felix.io;
2import edu.umd.cloud9.collection.XMLInputFormat;
3 
4import java.io.IOException;
5import java.util.ArrayList;
6import java.util.Date;
7import java.util.Iterator;
8import java.util.StringTokenizer;
9import org.apache.hadoop.streaming.StreamInputFormat;
10import org.apache.hadoop.streaming.StreamXmlRecordReader;
11 
12import org.apache.hadoop.conf.Configuration;
13import org.apache.hadoop.conf.Configured;
14import org.apache.hadoop.fs.FileSystem;
15import org.apache.hadoop.fs.FileUtil;
16import org.apache.hadoop.fs.Path;
17import org.apache.hadoop.io.*;
18import org.apache.hadoop.mapred.JobClient;
19import org.apache.hadoop.mapred.OutputCollector;
20import org.apache.hadoop.mapred.Reporter;
21import org.apache.hadoop.mapred.lib.db.DBInputFormat;
22 
23import org.apache.hadoop.mapreduce.Job;
24import org.apache.hadoop.mapreduce.Mapper;
25import org.apache.hadoop.mapreduce.Reducer;
26import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
27import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
28import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
29import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
30 
31 
32import org.apache.hadoop.util.Tool;
33import org.apache.hadoop.util.ToolRunner;
34import org.python.core.PyArray;
35import org.python.core.PyList;
36import org.python.core.PyObject;
37import org.python.core.PyString;
38 
39import tuffy.util.ExceptionMan;
40 
41import felix.thirdpart.XmlInputFormat;
42import felix.util.FelixConfig;
43 
44/**
45 * @deprecated
46 * @author czhang
47 *
48 */
49public class TestHadoop_0_20 extends Configured implements Tool{
50 
51        
52        public static class Map extends Mapper<LongWritable, Text, Text, IntWritable>{
53                
54                private Text word = new Text();
55                public static PythonExecutor pyMap;
56                
57                public static String _inputvalue;
58                
59                @Override
60                public void setup(Mapper.Context contex) {
61 
62                        pyMap = new PythonExecutor(
63                                        "_felix_donotusemyname_outkey=[]\n" + "_felix_donotusemyname_outvalue=[]\n"+
64                                        contex.getConfiguration().get("pyMapScript"));
65                        _inputvalue = contex.getConfiguration().get("mapinputvalue");
66                        
67                        if(contex.getConfiguration().get("pyMapInitScript") != null){
68                                pyMap.execSingle(contex.getConfiguration().get("pyMapInitScript"));
69                        }
70                        
71                }
72 
73                public void map(LongWritable key, Text value, Mapper.Context context) throws IOException, InterruptedException {        
74                        
75                        pyMap.set(_inputvalue, new PyString(value.toString()));
76                        
77                        pyMap.run();
78                        PyList outKey = (PyList) pyMap.get("_felix_donotusemyname_outkey");
79                        PyList outValue = (PyList) pyMap.get("_felix_donotusemyname_outvalue");
80                        
81                        for(int i=0;i<outKey.__len__();i++){
82                                PyObject _key = outKey.__getitem__(i);
83                                PyObject _value = outValue.__getitem__(i);
84                                
85                                context.write(new Text(""+_key), new Text(""+_value));
86                        //        System.out.println(""+key + "\t" + value);
87                        }
88                        
89                }
90                
91        }
92        
93        public static class Reduce extends Reducer<Text, Text, Text, Text>{
94                
95                public static PythonExecutor pyReducer;
96                
97                public static String _inputkey;
98                public static String _inputvalues;
99                
100                @Override
101                public void setup(Reducer.Context contex) {
102                        pyReducer = new PythonExecutor(
103                                        "_felix_donotusemyname_outkey=[]\n" + "_felix_donotusemyname_outvalue=[]\n" +
104                                        contex.getConfiguration().get("pyReduceScript"));
105                        _inputkey = contex.getConfiguration().get("reduceinputkey");
106                        _inputvalues = contex.getConfiguration().get("reduceinputvalues");
107                        
108                }
109 
110                protected void reduce(Text _key, Iterable _values, Context context) throws IOException, InterruptedException{
111                        
112                
113                        ArrayList<PyString> args = new ArrayList<PyString>();
114                        Iterator it = _values.iterator();
115                        while(it.hasNext()){
116                                args.add(new PyString(it.next().toString()));
117                        }
118                                                
119                        PyList values = new PyList(args);
120                        PyObject inputkey  = new PyString(_key.toString());
121                        
122                        pyReducer.set(_inputvalues, values);
123                        pyReducer.set(_inputkey, inputkey);
124                        
125                        pyReducer.run();
126                        PyList outKey = (PyList) pyReducer.get("_felix_donotusemyname_outkey");
127                        PyList outValue = (PyList) pyReducer.get("_felix_donotusemyname_outvalue");
128                        
129                        for(int i=0;i<outKey.__len__();i++){
130                                PyObject key = outKey.__getitem__(i);
131                                PyObject value = outValue.__getitem__(i);
132                                context.write(new Text(""+key), new Text(""+value));
133                        }
134                        
135                }
136                
137        }
138        
139        public TestHadoop_0_20(String mapScript, String reduceScript){
140                Map.pyMap = new PythonExecutor(
141                                mapScript);
142                
143                Reduce.pyReducer = new PythonExecutor(
144                                reduceScript);
145        }
146        
147        public static void executeHadoopProgram(String[] toPass) throws Exception{
148                
149                String[] dirtoPass = toPass.clone();
150                dirtoPass[2] += "_dir";
151                int res = ToolRunner.run(new Configuration(), new TestHadoop_0_20(dirtoPass[3], dirtoPass[4]), 
152                                dirtoPass);
153                                
154        }
155        
156        public static void post(String[] toPass) throws Exception{
157                
158                Configuration fsconf = new Configuration();
159                fsconf.set("fs.default.name", "hdfs://d-02.cs.wisc.edu:9000");
160        FileSystem fileSystem = FileSystem.get(fsconf);
161                
162                String[] dirtoPass = toPass.clone();
163                dirtoPass[2] += "_dir";
164                
165                FileUtil.copyMerge(fileSystem, new Path(dirtoPass[2]), fileSystem, new Path(toPass[2]), true,
166                                fsconf, "");
167                
168                fileSystem.close();
169                        
170        }
171        
172        
173        
174        public static void main(String[] args) throws Exception{
175        
176                String[] toPass = {
177                                
178                                "hdfs://d-02.cs.wisc.edu:9000/felixNE/Entity.db",
179                                //"hdfs://d-02.cs.wisc.edu:9000/firstMapReduceOut42/part-00000",
180                                
181                                "hdfs://d-02.cs.wisc.edu:9000/firstMapReduceOut100",
182                                
183                                "for k in _input.split(' '):\n"+
184                                "\t_outkey.append(k)\n"+
185                                "\t_outvalue.append('1')\n",
186                                
187                                "_outkey.append(_inputkey)\n"+
188                                "_outvalue.append(len(_inputvalues))\n"
189                                
190                };
191                
192                TestHadoop_0_20.executeHadoopProgram(toPass);
193                
194        
195        }
196 
197 
198        @Override
199        public int run(String[] arg0) throws Exception {
200                                
201 
202                Configuration conf = getConf();
203                
204                if(!FelixConfig.hadoopLocal){
205                        conf.set("fs.defaultFS", "hdfs://d-02.cs.wisc.edu:9000");
206                        conf.set("mapred.job.tracker", "d-02.cs.wisc.edu:9001");
207                }
208                conf.set("pyMapScript", arg0[3]);
209                conf.set("pyReduceScript", arg0[4]);
210                conf.set("mapinputvalue", arg0[5]);
211                conf.set("reduceinputkey", arg0[6]);
212                conf.set("reduceinputvalues", arg0[7]);
213                conf.set("pyMapInitScript", arg0[8]);
214                
215                conf.set("mapred.map.tasks", "30");
216                conf.set("mapred.min.split.size", "1B");
217                
218                if(arg0[0].equals("xml")){
219                        conf.set("xmlinput.start", arg0[9]);
220                        conf.set("xmlinput.end", arg0[10]);
221                }
222                
223                Job job = new Job(conf, "Felix_Run_On" + (new Date()).toLocaleString());
224 
225                job.setNumReduceTasks(30);
226                
227                job.setJarByClass(TestHadoop_0_20.class);
228                
229                job.setMapperClass(Map.class);
230                job.setReducerClass(Reduce.class);
231 
232                job.setOutputKeyClass(Text.class);
233                job.setOutputValueClass(Text.class);
234 
235                FileInputFormat.addInputPath(job, new Path(arg0[1]));
236                FileOutputFormat.setOutputPath(job, new Path(arg0[2]));
237                
238                
239                if(arg0[0].equals("xml")){
240                        //XML
241                                                
242                        job.setInputFormatClass(XmlInputFormat.class);
243                        job.setOutputFormatClass(TextOutputFormat.class);
244                        
245                        job.setOutputKeyClass(Text.class);
246                        job.setOutputValueClass(Text.class);
247                        
248                }else if(arg0[0].equals("standard")){
249                        
250                        job.setInputFormatClass(TextInputFormat.class);
251                        job.setOutputFormatClass(TextOutputFormat.class);
252                        
253                        job.setOutputKeyClass(Text.class);
254                        job.setOutputValueClass(Text.class);
255                }
256                
257                job.waitForCompletion(true);
258                return 0;
259        }
260        
261}

[all classes][felix.io]
EMMA 2.0.5312 EclEmma Fix 2 (C) Vladimir Roubtsov