EMMA Coverage Report (generated Sat Aug 20 11:00:51 CDT 2011)
[all classes][felix.main]

COVERAGE SUMMARY FOR SOURCE FILE [Felix.java]

nameclass, %method, %block, %line, %
Felix.java100% (1/1)92%  (11/12)82%  (1651/2014)78%  (275.9/352)

COVERAGE BREAKDOWN BY CLASS AND METHOD

nameclass, %method, %block, %line, %
     
class Felix100% (1/1)92%  (11/12)82%  (1651/2014)78%  (275.9/352)
giveMeTableNameIfExist (String, String): String 0%   (0/1)0%   (0/37)0%   (0/10)
blahblahExecutor (): void 100% (1/1)62%  (330/533)56%  (56.5/101)
parseFelixQuery (): FelixQuery 100% (1/1)66%  (90/136)69%  (20/29)
loadEvidence (): void 100% (1/1)92%  (816/891)91%  (127.5/140)
cleanUp (): void 100% (1/1)98%  (117/119)100% (14.9/15)
Felix (): void 100% (1/1)100% (6/6)100% (2/2)
generateFESignature (FelixPredicate): String 100% (1/1)100% (154/154)100% (20/20)
getAllTextArray (int): ArrayList 100% (1/1)100% (17/17)100% (4/4)
getFelixQuery (): FelixQuery 100% (1/1)100% (3/3)100% (1/1)
normalizeScript (String): String 100% (1/1)100% (19/19)100% (5/5)
resetACoupleAuxDataStructures (): void 100% (1/1)100% (10/10)100% (4/4)
run (FelixCommandOptions): void 100% (1/1)100% (89/89)100% (21/21)

1package felix.main;
2 
3import java.io.File;
4import java.sql.ResultSet;
5import java.sql.SQLException;
6import java.util.ArrayList;
7import java.util.Date;
8import java.util.HashMap;
9import java.util.HashSet;
10import java.util.regex.Matcher;
11import java.util.regex.Pattern;
12 
13 
14 
15import tuffy.db.RDB;
16import tuffy.db.SQLMan;
17import tuffy.mln.Clause;
18import tuffy.mln.Predicate;
19import tuffy.mln.Type;
20import tuffy.ra.ConjunctiveQuery;
21import tuffy.util.Config;
22import tuffy.util.ExceptionMan;
23import tuffy.util.FileMan;
24import tuffy.util.StringMan;
25import tuffy.util.Timer;
26import tuffy.util.UIMan;
27import felix.compiler.StaticAnalyzer;
28import felix.dstruct.ExecutionPlan;
29import felix.dstruct.FelixPredicate;
30import felix.dstruct.FelixQuery;
31import felix.executor.DDExecutor;
32import felix.executor.Executor;
33import felix.io.HadoopPostgreSQLPopulator;
34import felix.io.IDNormalizor;
35import felix.io.TestHadoop;
36import felix.optimizer.Scheduler;
37import felix.parser.FelixCommandOptions;
38import felix.util.FelixConfig;
39import felix.util.FelixUIMan;
40 
41/**
42 * Felix, an operator-based statistical inference system.
43 * @author Ce Zhang
44 *
45 */
46public class Felix {
47 
48        /**
49         * Felix's query, which consists of program, query and evidence.
50         */
51        public FelixQuery fq;
52 
53        /**
54         * Static Analyzer, which analyzes the property of each predicates according to rules.
55         */
56        protected StaticAnalyzer sa;
57 
58        /**
59         * Database connection.
60         */
61        public static RDB db;
62 
63        /**
64         * Command line option.
65         */
66        public FelixCommandOptions options;
67 
68        /**
69         * Whether this Felix object run has loaded evidences.
70         */
71        public boolean hasLoadedEvidence = false;
72 
73        /**
74         * Return Felix's query.
75         * @return
76         */
77        public FelixQuery getFelixQuery(){
78                return fq;
79        }
80 
81        /**
82         * Clear static parameters and initialize variables.
83         */
84        public void resetACoupleAuxDataStructures(){
85 
86                ConjunctiveQuery.clearIndexHistory();
87 
88                Clause.mappingFromID2Const = new HashMap<Integer, String>();
89                Clause.mappingFromID2Desc = new HashMap<String, String>();
90 
91        }
92 
93        /**
94         * Load Felix query from program and query file. If -useEvid
95         * is used, evidence file will also be loaded by this function.
96         * @throws InterruptedException 
97         */
98        public FelixQuery parseFelixQuery() throws InterruptedException{
99 
100                FelixUIMan.println(">>> Connecting to RDBMS at " + FelixConfig.db_url);
101 
102                if(FelixConfig.evidDBSchema != null){
103                        FelixConfig.db_schema = FelixConfig.evidDBSchema;
104                }
105 
106                db = RDB.getRDBbyConfig();
107                db.resetSchema(FelixConfig.db_schema);
108                db.schema = FelixConfig.db_schema;
109                
110                FelixQuery fq = new FelixQuery();
111 
112                String[] progFiles = options.fprog.split(",");
113                fq.loadPrograms(progFiles);
114 
115                if(options.fquery != null){
116                        String[] queryFiles = options.fquery.split(",");
117                        fq.loadQueries(queryFiles);
118                }
119 
120                if(options.queryAtoms != null){
121                        FelixUIMan.println(">>> Parsing query atoms in command line");
122                        fq.parseQueryCommaList(options.queryAtoms);
123                }
124 
125                if(options.cwaPreds != null){
126                        String[] preds = options.cwaPreds.split(",");
127                        for(String ps : preds){
128                                Predicate p = fq.getPredByName(ps);
129                                if(p == null){
130                                        fq.closeFiles();
131                                        ExceptionMan.die("COMMAND LINE: Unknown predicate name -- " + ps);
132                                }else{
133                                        p.setClosedWorld(true);
134                                }
135                        }
136                }
137 
138                for(Predicate p : fq.getAllPred()){
139                        p.prepareDB(db);
140                }
141 
142                // whether we use evidence for static analyzer.
143                // if we do not load evidence here, it will be
144                // loaded in the Scheduler.schedule(). 
145                if(options.useEvid == true){
146                        this.loadEvidence();
147                }
148                return fq;
149        }
150 
151        /**
152         * Normalizes string by replacing whitespace
153         * @param script
154         * @return normalized string
155         */
156        public String normalizeScript(String script){
157                String rs = "";
158                rs = script.replaceAll("^\t", "");
159                rs = rs.replaceAll("^(\\t|\\s)*<~hdfs~(\\t|\\s)*", "");
160                rs = rs.replaceAll("\n\t", "\n");
161                return rs;
162        }
163        
164        /**
165         * Returns array list of TEXT
166         * @param n size of ArrayList
167         * @return ArrayList
168         */
169        public ArrayList<String> getAllTextArray(int n){
170                ArrayList<String> rs = new ArrayList<String>();
171                for(int i=0;i<n;i++){
172                        rs.add("TEXT");
173                }
174                return rs;
175        }
176        
177        /**
178         * Returns table name for given signature.
179         * @param metaTable
180         * @param signature
181         * @return
182         */
183        public String giveMeTableNameIfExist(String metaTable, String signature){
184                String ret = null;
185                
186                RDB db = RDB.getRDBbyConfig(FelixConfig.auxSchema);
187                
188                ResultSet rs = db.query("SELECT * FROM " + metaTable + " WHERE signature='" + signature + "'");
189                
190                try {
191                        while(rs.next()){
192                                ret = rs.getString(2);
193                                return ret;
194                        }
195                } catch (SQLException e) {
196                        e.printStackTrace();
197                }
198                
199                db.close();
200                return ret;
201        }
202        
203        /**
204         * @deprecated
205         * @param p
206         * @return
207         */
208        public String generateFESignature(FelixPredicate p){
209                
210                
211                System.err.println(p.mapinputvar);
212                System.err.println(p.reduceinputkeyvar);
213                System.err.println(p.reduceinputvaluesvar);
214                
215                System.err.println(p.dependencyFile);
216                System.err.println(p.mapScript);
217                System.err.println(p.reduceScript);
218                System.err.println(p.mapinitScript);
219                System.err.println(p.xmltag);
220                
221 
222                
223                String ret = "";
224                ret += p + "|||||" + StringMan.commaList(p.getArgs()) + "|||||";
225                ret += p.mapinputvar + "|||||";
226                ret += p.mapinitScript + "|||||";
227                ret += p.reduceinputkeyvar + "|||||";
228                ret += p.reduceinputvaluesvar + "|||||";
229                ret += p.dependencyFile + "|||||";
230                ret += p.mapScript + "|||||";
231                ret += p.reduceScript + "|||||";
232                ret += p.xmltag + "|||||";
233                
234                ret = ret.replaceAll("\\s|\\t|\n|\r|\\n|\\r|'", "\"");
235                
236                return ret;
237        }
238        
239        /**
240         * Executor for the Blah Blah feature extraction language.
241         */
242        public void blahblahExecutor(){
243 
244                // TODO: current assumption is not cross-dependency, i.e.,
245                // one jdbc predicate only relies on one hadoop predicates
246                ArrayList<FelixPredicate> trivialPlanner = new ArrayList<FelixPredicate>();
247                for(FelixPredicate p : fq.getAllPred()){
248                        if(p.needExtractFeatures){
249                                if(p.dependencyFile != null){
250                                        trivialPlanner.add(0, p);
251                                }else if(p.jdbcdep != null){
252                                        trivialPlanner.add(trivialPlanner.size(), p);
253                                }
254                        }
255                }
256                
257                // first extract features
258                for(FelixPredicate p : trivialPlanner){
259                        
260                        if(p.needExtractFeatures){
261                                
262                                if(FelixConfig.auxSchema == null){
263                                        ExceptionMan.die("You must provide a schema to save result files from hadoop using -auxSchema option!");
264                                }
265                                
266                                String signature = this.generateFESignature(p);
267                                //String isInTable = this.giveMeTableNameIfExist(metaTable, signature);
268                                
269                                String rsTableName = "_fe_raw_" + p.getName();
270                                
271                                //disable re-use
272                                FelixConfig.forceExt = true;
273                                String isInTable = null;
274                                
275                                if(isInTable != null && !FelixConfig.forceExt){
276                                        UIMan.println(">>> The feature extraction code for " + p + 
277                                                        " is cached and does not need to be extracted agian!");
278                                        rsTableName = isInTable;
279                                        
280                                }else{
281                                        
282                                        RDB db = RDB.getRDBbyConfig();
283                                        db.dropTable(FelixConfig.auxSchema + "." + rsTableName);
284                                        db.close();
285                                        
286                                        //db.execute("SET search_path = " + FelixConfig.auxSchema);
287                                        //db.execute("DELETE FROM " + metaTable + " WHERE tableName='" + rsTableName + "'");
288                                        //db.commit();
289                                        //db.close();
290                                        
291                                        UIMan.println(">>> Extracting Features for predicate " + p + " using " + p.dependencyFile);
292                                
293                                        String tmpFileName = "rsFileOnHadoop_" + 
294                                                                        (new Date()).getTime() + 
295                                                                        "_pred_" + p.getName();
296                                        
297                                        if(p.jdbcdep != null){
298                                        
299                                                String relyOn = normalizeScript(p.jdbcdep).replace("jdbc://", "");
300                                                Pattern pp = Pattern.compile("(.*?)\\((.*?)\\)");
301                                                Matcher m = pp.matcher(relyOn);
302                                                m.find();
303                                                String pred = m.group(1);
304                                                String[] vairableName = m.group(2).split(",");
305                                                        
306                                                String tableName = "_fe_raw_" + pred;
307                                                String fileName = FelixConfig.hdfsServer + tmpFileName + "_ori_" + pred + "_for_" + p.getName();
308                                                
309                                                UIMan.println(">>> Dumping database table " + tableName + " to HDFS...");
310                                                HadoopPostgreSQLPopulator.dumpTableToHDFS(FelixConfig.auxSchema, 
311                                                                tableName, 
312                                                                vairableName.length, 
313                                                                fileName);
314                                                
315                                                String[] toPass = {
316                                                                
317                                                                "standard",
318                                                                
319                                                                normalizeScript(fileName),
320                                                                
321                                                                FelixConfig.hdfsServer + tmpFileName,
322                                                                
323                                                                normalizeScript(p.mapScript),
324                                                                
325                                                                normalizeScript(p.reduceScript),
326                                                                
327                                                                p.mapinputvar,
328                                                                
329                                                                p.reduceinputkeyvar,
330                                                                
331                                                                p.reduceinputvaluesvar,
332                                                                
333                                                                normalizeScript(p.mapinitScript),
334                                                                
335                                                                normalizeScript(p.reduceinitScript)
336                                                                
337                                                };
338                                                
339                                                try {
340                                                        TestHadoop.executeHadoopProgram(toPass);
341                                                        //TestHadoop.post(toPass);
342                                                } catch (Exception e) {
343                                                        e.printStackTrace();
344                                                        ExceptionMan.die("Hadoop Error!");
345                                                }
346                                                
347                                        }else if(p.xmltag != null){
348                                                
349                                                String[] toPass = {
350                                                                
351                                                                "xml",
352                                                                
353                                                                normalizeScript(p.dependencyFile),
354                                                                
355                                                                FelixConfig.hdfsServer + tmpFileName,
356                                                                
357                                                                normalizeScript(p.mapScript),
358                                                                
359                                                                normalizeScript(p.reduceScript),
360                                                                
361                                                                p.mapinputvar,
362                                                                
363                                                                p.reduceinputkeyvar,
364                                                                
365                                                                p.reduceinputvaluesvar,
366                                                                
367                                                                normalizeScript(p.mapinitScript),
368                                                                
369                                                                normalizeScript(p.reduceinitScript),
370                                                                
371                                                                "<" + p.xmltag + ">",
372                                                                
373                                                                "</" + p.xmltag + ">"
374                                                                
375                                                };
376                                                
377                                                try {
378                                                        TestHadoop.executeHadoopProgram(toPass);
379                                                        //TestHadoop.post(toPass);
380                                                } catch (Exception e) {
381                                                        e.printStackTrace();
382                                                        ExceptionMan.die("Hadoop Error!");
383                                                }
384                                                
385                                        }else{
386                                                
387                                                String[] toPass = {
388                                                                
389                                                                "standard",
390                                                                
391                                                                normalizeScript(p.dependencyFile),
392                                                                
393                                                                FelixConfig.hdfsServer + tmpFileName,
394                                                                
395                                                                normalizeScript(p.mapScript),
396                                                                
397                                                                normalizeScript(p.reduceScript),
398                                                                
399                                                                p.mapinputvar,
400                                                                
401                                                                p.reduceinputkeyvar,
402                                                                
403                                                                p.reduceinputvaluesvar,
404                                                                
405                                                                normalizeScript(p.mapinitScript),
406 
407                                                                normalizeScript(p.reduceinitScript)
408                                                                
409                                                };
410                                                
411                                                try {
412                                                        TestHadoop.executeHadoopProgram(toPass);
413                                                        //TestHadoop.post(toPass);
414                                                } catch (Exception e) {
415                                                        e.printStackTrace();
416                                                        ExceptionMan.die("Hadoop Error!");
417                                                }
418                                                
419                                        }
420                                        
421                                        try {
422                                                
423                                                ArrayList<String> tableArgs = (ArrayList<String>) p.getArgs().clone();
424                                                ArrayList<String> tableTypes = getAllTextArray(p.arity());
425                                                
426                                                if(p.getArgs().size() == 1){
427                                                        tableArgs.add("I_am_just_a_sad_meaningless_value");
428                                                        tableTypes.add("TEXT");
429                                                }
430                                                
431                                                UIMan.verbose(0, ">>> Dump to database table for relation " + p.toString());
432                                                HadoopPostgreSQLPopulator.createAndPopulateTableFromDir
433                                                        (FelixConfig.auxSchema, rsTableName, 
434                                                                        tableArgs, tableTypes, 
435                                                                        FelixConfig.hdfsServer + tmpFileName + "_dir", FelixConfig.nReduce);
436                                        } catch (Exception e) {
437                                                e.printStackTrace();
438                                        }
439                                }
440                                
441                        }
442                }
443                
444        }
445        
446        /**
447         * Load evidence from evidence from file or database table.
448         * @throws InterruptedException 
449         */
450        public void loadEvidence() throws InterruptedException{
451                
452                FelixConfig.mixturedLoading = false;
453                
454                boolean isOnlyFromFile = true;
455                
456                for(FelixPredicate fp : fq.getAllPred()){
457                        if(fp.needExtractFeatures || fp.loadFromDatabase){
458                                isOnlyFromFile = false;
459                                FelixConfig.mixturedLoading = true;
460                        }        
461                }
462                
463                if(options.fevid != null && isOnlyFromFile){
464                        String[] evidFiles = options.fevid.split(",");
465                        fq.loadEvidences(evidFiles);
466                        fq.materializeTables(db);
467                }else{
468                        
469                        // first, generate pure-string form tables
470                        FelixConfig.mixturedLoading = true;
471                        
472                        HashMap<FelixPredicate, String> strTableName = 
473                                        new HashMap<FelixPredicate, String>();
474                        
475                                // first, load from evid file
476                        if(options.fevid != null){
477                                String[] evidFiles = options.fevid.split(",");
478                                fq.loadEvidences(evidFiles);
479                        }
480                        for(FelixPredicate fp : fq.getAllPred()){
481                                if(!fp.needExtractFeatures && !fp.loadFromDatabase){
482                                        fp.flushStrEvidence("_tmp_str_loading_" + fp.getName());
483                                        strTableName.put(fp, "_tmp_str_loading_" + fp.getName());
484                                }        
485                        }
486                        
487                                // second, from db table
488                        for(FelixPredicate fp : fq.getAllPred()){
489                                if(fp.loadFromDatabase){
490                                        String rName = fp.loadingSchema + "." + fp.loadingTable;
491                                        strTableName.put(fp, rName);
492                                }        
493                         }
494                        
495                                // third, blahblah program
496                        this.blahblahExecutor();
497                        for(FelixPredicate fp : fq.getAllPred()){
498                                if(fp.needExtractFeatures){
499                                        strTableName.put(fp, FelixConfig.auxSchema + "." + "_fe_raw_" + fp.getName());
500                                }
501                        }
502                        
503                        RDB db = RDB.getRDBbyConfig(Config.db_schema);
504                        db.dropSequence("_tmp_seq_id");
505                        db.execute("CREATE SEQUENCE _tmp_seq_id START " + (fq.mapIDConstant.size() + 1) + ";");
506                        //second, build constant table
507                        HashMap<String, ArrayList<String>> typeDomain = new HashMap<String, ArrayList<String>>();
508                                // first, flush constants in program
509                        for(FelixPredicate fp : fq.getAllPred()){
510                                for(int i=0;i<fp.arity();i++){
511                                        Type type = fp.getTypeAt(i);
512                                        String tName = type.name;
513                                        if(type.isNonSymbolicType()){
514                                                continue;
515                                        }
516                                        if(!typeDomain.containsKey(tName)){
517                                                typeDomain.put(tName, new ArrayList<String>());
518                                                
519                                                String sql = "CREATE TABLE type_" + tName + " (constantid bigint, constantvalue TEXT); ";
520                                                db.execute(sql);
521                                                db.commit();
522                                                
523                                                for(Integer constant : type.getDomain()){
524                                                        
525                                                        sql = "INSERT INTO type_" + tName + " VALUES (" 
526                                                                        + constant + ", " + SQLMan.escapeString(fq.mapIDConstant.get(constant)) 
527                                                                        + ")"; 
528                                                        db.execute(sql);                
529                                                        
530                                                }
531                                                
532                                                
533                                        }
534                                }
535                        }
536                        
537                        
538                        for(FelixPredicate fp : fq.getAllPred()){
539                                
540                                if(!strTableName.containsKey(fp)){
541                                        continue;
542                                }
543                                
544                                for(int i=0;i<fp.arity();i++){
545                                        Type type = fp.getTypeAt(i);
546                                        if(type.isNonSymbolicType()){
547                                                continue;
548                                        }
549                                        String tName = type.name;
550                                        if(!typeDomain.containsKey(tName)){
551                                                typeDomain.put(tName, new ArrayList<String>());
552                                        }
553                                        typeDomain.get(tName).add("SELECT " + fp.getArgs().get(i) 
554                                                        + " constantVALUE FROM " + strTableName.get(fp));
555                                }
556                         }
557                        
558 
559                        ArrayList<String> allTypeSQLs = new ArrayList<String>();
560                        for(String tName : typeDomain.keySet()){
561                                if(typeDomain.get(tName).size() == 0){
562                                        continue;
563                                }
564                                String sql = "INSERT INTO type_" + tName + " " + 
565                                                "SELECT nextval('_tmp_seq_id') constantID, constantVALUE FROM " + 
566                                                "(SELECT DISTINCT constantVALUE FROM " + 
567                                                "(" + StringMan.join(" UNION ALL ", typeDomain.get(tName)) + " ) nt " +
568                                                " WHERE constantVALUE NOT IN (SELECT constantvalue FROM type_" + tName + ")" +
569                                                ") wt";        
570                                allTypeSQLs.add("SELECT * FROM type_" + tName);
571                                UIMan.print(">>> Create table for type " + tName + "...");
572                                db.execute(sql);
573                                
574                                UIMan.print("*");
575                                sql = "CREATE INDEX _idx_type_id" + tName + " on type_" + tName + " (constantID) ";
576                                db.dropIndex("_idx_type_id" + tName);
577                                db.execute(sql);
578                                db.analyze("type_" + tName);
579                                UIMan.println("");
580                        }
581                        
582                        String crel = Config.relConstants;
583                        db.dropView(crel);
584                        db.dropTable(crel);
585                        String sql = "CREATE VIEW " + crel +
586                                        " AS SELECT constantID::bigint id, constantVALUE string FROM (" + 
587                                        StringMan.join(" UNION ALL ", allTypeSQLs) + " ) nt";
588                        db.execute(sql);
589                        
590                        //third, flush to Predicate table.
591                        for(FelixPredicate fp : fq.getAllPred()){
592                                String rawTable = strTableName.get(fp);
593                                String currentTable = strTableName.get(fp);
594                                
595                                UIMan.print(">>> Create ID table for " + fp);
596                                
597                                // first join the string table with constant map
598                                for(int i=0 ; i<fp.arity(); i++){
599                                        Type type = fp.getTypeAt(i);
600                                        
601                                        if(type.isNonSymbolicType()){
602                                                continue;
603                                        }
604                                        
605                                        String typeTable = "type_" + type.name;
606                                        
607                                        String column = fp.getArgs().get(i);
608                                        
609                                        String newTable = rawTable + "_ser_" + column;
610                                        
611                                        ArrayList<String> selList = new ArrayList<String>();
612                                        selList.add("t0.truth");
613                                        selList.add("t0.prior");
614                                        for(int j=0 ; j<fp.arity(); j++){
615                                                if(j==i){
616                                                        selList.add("t1.constantid::bigint " + fp.getArgs().get(j));
617                                                }else{
618                                                        selList.add("t0." + fp.getArgs().get(j));
619                                                }
620                                        }
621                                        
622                                        db.dropTable(newTable);
623                                        db.dropView(newTable);
624                                        sql = "CREATE TABLE " + newTable + " AS SELECT " + StringMan.commaList(selList) + " FROM " + 
625                                                        currentTable + " t0, " + typeTable + " t1" + " WHERE " +
626                                                        "t0." + column + "=" + "t1.constantvalue";
627                                        
628                                        UIMan.print(".");
629                                        db.execute(sql);
630                                        currentTable = newTable;
631                                }
632                                
633                                
634                                
635                                // second, copy to pred_* table
636                                ArrayList<String> insertList = new ArrayList<String>();
637                                ArrayList<String> selList = new ArrayList<String>();
638                                
639                                //insertList.add("truth");
640                                //insertList.add("prior");
641                                //insertList.add("club");
642                                
643                                db.dropSequence("_tmp_seq_id_" + fp.getName());
644                                db.execute("CREATE SEQUENCE _tmp_seq_id_" + fp.getName() + " START 1;");
645                                
646                                //selList.add("nextval('_tmp_seq_id_" + fp.getName()+"')");
647                                //selList.add("NULL::bigint");
648                                selList.add("truth::Bool");
649                                selList.add("prior::Float");
650                                selList.add("2::INT");
651                                selList.add("NULL::INT");
652                                for(int i=0 ; i<fp.arity(); i++){
653                                        
654                                        Type type = fp.getTypeAt(i);
655                                        if(type.isNonSymbolicType()){
656                                                insertList.add(fp.getArgs().get(i) + "");
657                                                selList.add(fp.getArgs().get(i) + "::" + type.getNonSymbolicTypeInSQL());
658                                        }else{
659                                                insertList.add(fp.getArgs().get(i) + "");
660                                                selList.add(fp.getArgs().get(i) + "::bigint");
661                                        }
662                                }
663                                
664                                /*
665                                sql = "INSERT INTO " + fp.getRelName() + 
666                                                " ( " + StringMan.commaList(insertList) + " ) " + 
667                                                " SELECT " + StringMan.commaList(selList) + " FROM " + currentTable; 
668                                */
669                                
670                                
671                                //sql += "id SERIAL PRIMARY KEY,\n";
672                                //sql += "truth BOOL,\n";
673                                //sql += "prior FLOAT,\n";
674                                //sql += "club INT DEFAULT 0,\n";
675                                //sql += "atomID INT DEFAULT NULL,\n";
676                                
677                                //db.dropTable(fp.getRelName());
678                                
679                                sql = "INSERT INTO " + fp.getRelName() + "(truth, " +
680                                        "prior, club, atomID, " +  
681                                        StringMan.commaList(insertList) + " ) " + 
682                                        " SELECT " + StringMan.commaList(selList) 
683                                        + " FROM " + currentTable;
684 
685                                
686                                UIMan.print("*");
687                                db.execute(sql);
688                                db.commit();
689                                
690                                UIMan.println("");
691                        }
692                        
693                        db.close();
694                        
695                }
696                
697                fq.executeAllDatalogRules(db);
698                
699                hasLoadedEvidence = true;
700 
701        }
702 
703        /**
704         * Clean up temporary data: the schema in PostgreSQL and the working directory.
705         */
706        protected void cleanUp(){                
707                Config.exiting_mode = true;
708                UIMan.println(">>> Cleaning up temporary data");
709                if(!Config.keep_db_data){
710                        UIMan.print("    Removing database schema '" + Config.db_schema + "'...");
711                        UIMan.println(db.dropSchema(Config.db_schema)?"OK" : "FAILED");
712                }else{
713                        UIMan.println("    Data remains in schema '" + Config.db_schema + "'.");
714                }
715                db.close();
716 
717                UIMan.print("    Removing temporary dir '" + Config.getWorkingDir() + "'...");
718                UIMan.println(FileMan.removeDirectory(new File(Config.getWorkingDir()))?"OK" : "FAILED");
719 
720                UIMan.println("*** Felix exited at " + Timer.getDateTime() + " after running for " + Timer.elapsed());
721                UIMan.closeDribbleFile();
722                
723                
724                for(RDB db : RDB.historyInstances){
725                        db.close();
726                }
727                
728        }
729        
730        /**
731         * Run Felix!
732         * @param args Command line options.
733         * @throws InterruptedException 
734         */
735        public void run(FelixCommandOptions opt) throws InterruptedException{
736 
737                Timer.start("Felix-Timer");
738                this.options = opt;
739                resetACoupleAuxDataStructures();
740 
741                FelixConfig.allRuleAsMLN = this.options.allRuleAsMLN;
742                FelixConfig.allView = this.options.allView;
743                FelixConfig.allMat = this.options.allMat;
744                FelixConfig.hadoopLocal = this.options.local;
745                FelixConfig.auxSchema = this.options.auxSchema;
746                FelixConfig.forceExt = this.options.forceExt;
747                
748                fq = this.parseFelixQuery();
749 
750                sa = new StaticAnalyzer(this.fq, options);
751                sa.parse();
752 
753                Scheduler sc = new Scheduler(this, this.fq, options);
754                ExecutionPlan ep = sc.schedule();
755 
756                if(options.useDualDecomposition){
757                        DDExecutor ec = new DDExecutor(ep, fq, options);
758                        ec.run();
759                }else{
760                        Executor ec = new Executor(ep, fq, options);
761                        ec.run();
762                }
763                
764                cleanUp();
765        }
766 
767}
768 
769 
770 
771 

[all classes][felix.main]
EMMA 2.0.5312 EclEmma Fix 2 (C) Vladimir Roubtsov